Make canonical_va_list_type more strict
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-vrp.h"
36 #include "tree-ssanames.h"
37 #include "expmed.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "recog.h"
41 #include "diagnostic-core.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "fold-const-call.h"
45 #include "stor-layout.h"
46 #include "calls.h"
47 #include "varasm.h"
48 #include "tree-object-size.h"
49 #include "realmpfr.h"
50 #include "cfgrtl.h"
51 #include "except.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "libfuncs.h"
57 #include "output.h"
58 #include "typeclass.h"
59 #include "langhooks.h"
60 #include "value-prof.h"
61 #include "builtins.h"
62 #include "asan.h"
63 #include "cilk.h"
64 #include "tree-chkp.h"
65 #include "rtl-chkp.h"
66 #include "internal-fn.h"
67 #include "case-cfn-macros.h"
68 #include "gimple-fold.h"
69
70
71 struct target_builtins default_target_builtins;
72 #if SWITCHABLE_TARGET
73 struct target_builtins *this_target_builtins = &default_target_builtins;
74 #endif
75
76 /* Define the names of the builtin function types and codes. */
77 const char *const built_in_class_names[BUILT_IN_LAST]
78 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
79
80 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
81 const char * built_in_names[(int) END_BUILTINS] =
82 {
83 #include "builtins.def"
84 };
85
86 /* Setup an array of builtin_info_type, make sure each element decl is
87 initialized to NULL_TREE. */
88 builtin_info_type builtin_info[(int)END_BUILTINS];
89
90 /* Non-zero if __builtin_constant_p should be folded right away. */
91 bool force_folding_builtin_constant_p;
92
93 static rtx c_readstr (const char *, machine_mode);
94 static int target_char_cast (tree, char *);
95 static rtx get_memory_rtx (tree, tree);
96 static int apply_args_size (void);
97 static int apply_result_size (void);
98 static rtx result_vector (int, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
108 static rtx expand_builtin_interclass_mathfn (tree, rtx);
109 static rtx expand_builtin_sincos (tree);
110 static rtx expand_builtin_cexpi (tree, rtx);
111 static rtx expand_builtin_int_roundingfn (tree, rtx);
112 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
113 static rtx expand_builtin_next_arg (void);
114 static rtx expand_builtin_va_start (tree);
115 static rtx expand_builtin_va_end (tree);
116 static rtx expand_builtin_va_copy (tree);
117 static rtx expand_builtin_strcmp (tree, rtx);
118 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
120 static rtx expand_builtin_memcpy (tree, rtx);
121 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
122 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
123 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 machine_mode, int, tree);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_strchr (location_t, tree, tree, tree);
151 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
153 static tree fold_builtin_strcmp (location_t, tree, tree);
154 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
155 static tree fold_builtin_isascii (location_t, tree);
156 static tree fold_builtin_toascii (location_t, tree);
157 static tree fold_builtin_isdigit (location_t, tree);
158 static tree fold_builtin_fabs (location_t, tree, tree);
159 static tree fold_builtin_abs (location_t, tree, tree);
160 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
161 enum tree_code);
162 static tree fold_builtin_0 (location_t, tree);
163 static tree fold_builtin_1 (location_t, tree, tree);
164 static tree fold_builtin_2 (location_t, tree, tree, tree);
165 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_varargs (location_t, tree, tree*, int);
167
168 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
169 static tree fold_builtin_strstr (location_t, tree, tree, tree);
170 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
173
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
181
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
192
193 /* Return true if NAME starts with __builtin_ or __sync_. */
194
195 static bool
196 is_builtin_name (const char *name)
197 {
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 if (flag_cilkplus
205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
208 return false;
209 }
210
211
212 /* Return true if DECL is a function symbol representing a built-in. */
213
214 bool
215 is_builtin_fn (tree decl)
216 {
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218 }
219
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
223
224 bool
225 called_as_built_in (tree node)
226 {
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
232 }
233
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
238
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
245
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
248
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 {
253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
259
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
264
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
268 {
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
275 }
276 else if (TREE_CODE (exp) == LABEL_DECL)
277 ;
278 else if (TREE_CODE (exp) == CONST_DECL)
279 {
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
285
286 known_alignment = true;
287 }
288 else if (DECL_P (exp))
289 {
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
292 }
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
296 {
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 {
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = ptr_bitmask & -ptr_bitmask;
309 addr = TREE_OPERAND (addr, 0);
310 }
311
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
315
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
318
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 {
323 if (TMR_INDEX (exp))
324 {
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, (step & -step) * BITS_PER_UNIT);
329 }
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
333 }
334
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 if (!addr_p && !known_alignment
341 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 else
344 {
345 /* Else adjust bitpos accordingly. */
346 bitpos += ptr_bitpos;
347 if (TREE_CODE (exp) == MEM_REF
348 || TREE_CODE (exp) == TARGET_MEM_REF)
349 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
350 }
351 }
352 else if (TREE_CODE (exp) == STRING_CST)
353 {
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 if (CONSTANT_CLASS_P (exp))
358 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
359
360 known_alignment = true;
361 }
362
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 if (offset)
366 {
367 unsigned int trailing_zeros = tree_ctz (offset);
368 if (trailing_zeros < HOST_BITS_PER_INT)
369 {
370 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
371 if (inner)
372 align = MIN (align, inner);
373 }
374 }
375
376 *alignp = align;
377 *bitposp = bitpos & (*alignp - 1);
378 return known_alignment;
379 }
380
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385
386 bool
387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
389 {
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
391 }
392
393 /* Return the alignment in bits of EXP, an object. */
394
395 unsigned int
396 get_object_alignment (tree exp)
397 {
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
400
401 get_object_alignment_1 (exp, &align, &bitpos);
402
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
405
406 if (bitpos != 0)
407 align = (bitpos & -bitpos);
408 return align;
409 }
410
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
415
416 If EXP is not a pointer, false is returned too. */
417
418 bool
419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
421 {
422 STRIP_NOPS (exp);
423
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 {
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
436 {
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
439 {
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
443 }
444 }
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
448 }
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 {
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 {
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
465 }
466 else
467 {
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
471 }
472 }
473 else if (TREE_CODE (exp) == INTEGER_CST)
474 {
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
479 }
480
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
484 }
485
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
492
493 unsigned int
494 get_pointer_alignment (tree exp)
495 {
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
498
499 get_pointer_alignment_1 (exp, &align, &bitpos);
500
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
503
504 if (bitpos != 0)
505 align = (bitpos & -bitpos);
506
507 return align;
508 }
509
510 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
511 way, because it could contain a zero byte in the middle.
512 TREE_STRING_LENGTH is the size of the character array, not the string.
513
514 ONLY_VALUE should be nonzero if the result is not going to be emitted
515 into the instruction stream and zero if it is going to be expanded.
516 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
517 is returned, otherwise NULL, since
518 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
519 evaluate the side-effects.
520
521 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
522 accesses. Note that this implies the result is not going to be emitted
523 into the instruction stream.
524
525 The value returned is of type `ssizetype'.
526
527 Unfortunately, string_constant can't access the values of const char
528 arrays with initializers, so neither can we do so here. */
529
530 tree
531 c_strlen (tree src, int only_value)
532 {
533 tree offset_node;
534 HOST_WIDE_INT offset;
535 int max;
536 const char *ptr;
537 location_t loc;
538
539 STRIP_NOPS (src);
540 if (TREE_CODE (src) == COND_EXPR
541 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
542 {
543 tree len1, len2;
544
545 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
546 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
547 if (tree_int_cst_equal (len1, len2))
548 return len1;
549 }
550
551 if (TREE_CODE (src) == COMPOUND_EXPR
552 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
553 return c_strlen (TREE_OPERAND (src, 1), only_value);
554
555 loc = EXPR_LOC_OR_LOC (src, input_location);
556
557 src = string_constant (src, &offset_node);
558 if (src == 0)
559 return NULL_TREE;
560
561 max = TREE_STRING_LENGTH (src) - 1;
562 ptr = TREE_STRING_POINTER (src);
563
564 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
565 {
566 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
567 compute the offset to the following null if we don't know where to
568 start searching for it. */
569 int i;
570
571 for (i = 0; i < max; i++)
572 if (ptr[i] == 0)
573 return NULL_TREE;
574
575 /* We don't know the starting offset, but we do know that the string
576 has no internal zero bytes. We can assume that the offset falls
577 within the bounds of the string; otherwise, the programmer deserves
578 what he gets. Subtract the offset from the length of the string,
579 and return that. This would perhaps not be valid if we were dealing
580 with named arrays in addition to literal string constants. */
581
582 return size_diffop_loc (loc, size_int (max), offset_node);
583 }
584
585 /* We have a known offset into the string. Start searching there for
586 a null character if we can represent it as a single HOST_WIDE_INT. */
587 if (offset_node == 0)
588 offset = 0;
589 else if (! tree_fits_shwi_p (offset_node))
590 offset = -1;
591 else
592 offset = tree_to_shwi (offset_node);
593
594 /* If the offset is known to be out of bounds, warn, and call strlen at
595 runtime. */
596 if (offset < 0 || offset > max)
597 {
598 /* Suppress multiple warnings for propagated constant strings. */
599 if (only_value != 2
600 && !TREE_NO_WARNING (src))
601 {
602 warning_at (loc, 0, "offset outside bounds of constant string");
603 TREE_NO_WARNING (src) = 1;
604 }
605 return NULL_TREE;
606 }
607
608 /* Use strlen to search for the first zero byte. Since any strings
609 constructed with build_string will have nulls appended, we win even
610 if we get handed something like (char[4])"abcd".
611
612 Since OFFSET is our starting index into the string, no further
613 calculation is needed. */
614 return ssize_int (strlen (ptr + offset));
615 }
616
617 /* Return a constant integer corresponding to target reading
618 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
619
620 static rtx
621 c_readstr (const char *str, machine_mode mode)
622 {
623 HOST_WIDE_INT ch;
624 unsigned int i, j;
625 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
626
627 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
628 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
629 / HOST_BITS_PER_WIDE_INT;
630
631 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
632 for (i = 0; i < len; i++)
633 tmp[i] = 0;
634
635 ch = 1;
636 for (i = 0; i < GET_MODE_SIZE (mode); i++)
637 {
638 j = i;
639 if (WORDS_BIG_ENDIAN)
640 j = GET_MODE_SIZE (mode) - i - 1;
641 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
642 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
643 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
644 j *= BITS_PER_UNIT;
645
646 if (ch)
647 ch = (unsigned char) str[i];
648 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
649 }
650
651 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
652 return immed_wide_int_const (c, mode);
653 }
654
655 /* Cast a target constant CST to target CHAR and if that value fits into
656 host char type, return zero and put that value into variable pointed to by
657 P. */
658
659 static int
660 target_char_cast (tree cst, char *p)
661 {
662 unsigned HOST_WIDE_INT val, hostval;
663
664 if (TREE_CODE (cst) != INTEGER_CST
665 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
666 return 1;
667
668 /* Do not care if it fits or not right here. */
669 val = TREE_INT_CST_LOW (cst);
670
671 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
672 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
673
674 hostval = val;
675 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
676 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
677
678 if (val != hostval)
679 return 1;
680
681 *p = hostval;
682 return 0;
683 }
684
685 /* Similar to save_expr, but assumes that arbitrary code is not executed
686 in between the multiple evaluations. In particular, we assume that a
687 non-addressable local variable will not be modified. */
688
689 static tree
690 builtin_save_expr (tree exp)
691 {
692 if (TREE_CODE (exp) == SSA_NAME
693 || (TREE_ADDRESSABLE (exp) == 0
694 && (TREE_CODE (exp) == PARM_DECL
695 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
696 return exp;
697
698 return save_expr (exp);
699 }
700
701 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
702 times to get the address of either a higher stack frame, or a return
703 address located within it (depending on FNDECL_CODE). */
704
705 static rtx
706 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
707 {
708 int i;
709 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
710 if (tem == NULL_RTX)
711 {
712 /* For a zero count with __builtin_return_address, we don't care what
713 frame address we return, because target-specific definitions will
714 override us. Therefore frame pointer elimination is OK, and using
715 the soft frame pointer is OK.
716
717 For a nonzero count, or a zero count with __builtin_frame_address,
718 we require a stable offset from the current frame pointer to the
719 previous one, so we must use the hard frame pointer, and
720 we must disable frame pointer elimination. */
721 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
722 tem = frame_pointer_rtx;
723 else
724 {
725 tem = hard_frame_pointer_rtx;
726
727 /* Tell reload not to eliminate the frame pointer. */
728 crtl->accesses_prior_frames = 1;
729 }
730 }
731
732 if (count > 0)
733 SETUP_FRAME_ADDRESSES ();
734
735 /* On the SPARC, the return address is not in the frame, it is in a
736 register. There is no way to access it off of the current frame
737 pointer, but it can be accessed off the previous frame pointer by
738 reading the value from the register window save area. */
739 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
740 count--;
741
742 /* Scan back COUNT frames to the specified frame. */
743 for (i = 0; i < count; i++)
744 {
745 /* Assume the dynamic chain pointer is in the word that the
746 frame address points to, unless otherwise specified. */
747 tem = DYNAMIC_CHAIN_ADDRESS (tem);
748 tem = memory_address (Pmode, tem);
749 tem = gen_frame_mem (Pmode, tem);
750 tem = copy_to_reg (tem);
751 }
752
753 /* For __builtin_frame_address, return what we've got. But, on
754 the SPARC for example, we may have to add a bias. */
755 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
756 return FRAME_ADDR_RTX (tem);
757
758 /* For __builtin_return_address, get the return address from that frame. */
759 #ifdef RETURN_ADDR_RTX
760 tem = RETURN_ADDR_RTX (count, tem);
761 #else
762 tem = memory_address (Pmode,
763 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
764 tem = gen_frame_mem (Pmode, tem);
765 #endif
766 return tem;
767 }
768
769 /* Alias set used for setjmp buffer. */
770 static alias_set_type setjmp_alias_set = -1;
771
772 /* Construct the leading half of a __builtin_setjmp call. Control will
773 return to RECEIVER_LABEL. This is also called directly by the SJLJ
774 exception handling code. */
775
776 void
777 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
778 {
779 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780 rtx stack_save;
781 rtx mem;
782
783 if (setjmp_alias_set == -1)
784 setjmp_alias_set = new_alias_set ();
785
786 buf_addr = convert_memory_address (Pmode, buf_addr);
787
788 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
789
790 /* We store the frame pointer and the address of receiver_label in
791 the buffer and use the rest of it for the stack save area, which
792 is machine-dependent. */
793
794 mem = gen_rtx_MEM (Pmode, buf_addr);
795 set_mem_alias_set (mem, setjmp_alias_set);
796 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
797
798 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
799 GET_MODE_SIZE (Pmode))),
800 set_mem_alias_set (mem, setjmp_alias_set);
801
802 emit_move_insn (validize_mem (mem),
803 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
804
805 stack_save = gen_rtx_MEM (sa_mode,
806 plus_constant (Pmode, buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (stack_save, setjmp_alias_set);
809 emit_stack_save (SAVE_NONLOCAL, &stack_save);
810
811 /* If there is further processing to do, do it. */
812 if (targetm.have_builtin_setjmp_setup ())
813 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
814
815 /* We have a nonlocal label. */
816 cfun->has_nonlocal_label = 1;
817 }
818
819 /* Construct the trailing part of a __builtin_setjmp call. This is
820 also called directly by the SJLJ exception handling code.
821 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
822
823 void
824 expand_builtin_setjmp_receiver (rtx receiver_label)
825 {
826 rtx chain;
827
828 /* Mark the FP as used when we get here, so we have to make sure it's
829 marked as used by this function. */
830 emit_use (hard_frame_pointer_rtx);
831
832 /* Mark the static chain as clobbered here so life information
833 doesn't get messed up for it. */
834 chain = targetm.calls.static_chain (current_function_decl, true);
835 if (chain && REG_P (chain))
836 emit_clobber (chain);
837
838 /* Now put in the code to restore the frame pointer, and argument
839 pointer, if needed. */
840 if (! targetm.have_nonlocal_goto ())
841 {
842 /* First adjust our frame pointer to its actual value. It was
843 previously set to the start of the virtual area corresponding to
844 the stacked variables when we branched here and now needs to be
845 adjusted to the actual hardware fp value.
846
847 Assignments to virtual registers are converted by
848 instantiate_virtual_regs into the corresponding assignment
849 to the underlying register (fp in this case) that makes
850 the original assignment true.
851 So the following insn will actually be decrementing fp by
852 STARTING_FRAME_OFFSET. */
853 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
854
855 /* Restoring the frame pointer also modifies the hard frame pointer.
856 Mark it used (so that the previous assignment remains live once
857 the frame pointer is eliminated) and clobbered (to represent the
858 implicit update from the assignment). */
859 emit_use (hard_frame_pointer_rtx);
860 emit_clobber (hard_frame_pointer_rtx);
861 }
862
863 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
864 {
865 /* If the argument pointer can be eliminated in favor of the
866 frame pointer, we don't need to restore it. We assume here
867 that if such an elimination is present, it can always be used.
868 This is the case on all known machines; if we don't make this
869 assumption, we do unnecessary saving on many machines. */
870 size_t i;
871 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
872
873 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
874 if (elim_regs[i].from == ARG_POINTER_REGNUM
875 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
876 break;
877
878 if (i == ARRAY_SIZE (elim_regs))
879 {
880 /* Now restore our arg pointer from the address at which it
881 was saved in our stack frame. */
882 emit_move_insn (crtl->args.internal_arg_pointer,
883 copy_to_reg (get_arg_pointer_save_area ()));
884 }
885 }
886
887 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
888 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
889 else if (targetm.have_nonlocal_goto_receiver ())
890 emit_insn (targetm.gen_nonlocal_goto_receiver ());
891 else
892 { /* Nothing */ }
893
894 /* We must not allow the code we just generated to be reordered by
895 scheduling. Specifically, the update of the frame pointer must
896 happen immediately, not later. */
897 emit_insn (gen_blockage ());
898 }
899
900 /* __builtin_longjmp is passed a pointer to an array of five words (not
901 all will be used on all machines). It operates similarly to the C
902 library function of the same name, but is more efficient. Much of
903 the code below is copied from the handling of non-local gotos. */
904
905 static void
906 expand_builtin_longjmp (rtx buf_addr, rtx value)
907 {
908 rtx fp, lab, stack;
909 rtx_insn *insn, *last;
910 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
911
912 /* DRAP is needed for stack realign if longjmp is expanded to current
913 function */
914 if (SUPPORTS_STACK_ALIGNMENT)
915 crtl->need_drap = true;
916
917 if (setjmp_alias_set == -1)
918 setjmp_alias_set = new_alias_set ();
919
920 buf_addr = convert_memory_address (Pmode, buf_addr);
921
922 buf_addr = force_reg (Pmode, buf_addr);
923
924 /* We require that the user must pass a second argument of 1, because
925 that is what builtin_setjmp will return. */
926 gcc_assert (value == const1_rtx);
927
928 last = get_last_insn ();
929 if (targetm.have_builtin_longjmp ())
930 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
931 else
932 {
933 fp = gen_rtx_MEM (Pmode, buf_addr);
934 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
935 GET_MODE_SIZE (Pmode)));
936
937 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
938 2 * GET_MODE_SIZE (Pmode)));
939 set_mem_alias_set (fp, setjmp_alias_set);
940 set_mem_alias_set (lab, setjmp_alias_set);
941 set_mem_alias_set (stack, setjmp_alias_set);
942
943 /* Pick up FP, label, and SP from the block and jump. This code is
944 from expand_goto in stmt.c; see there for detailed comments. */
945 if (targetm.have_nonlocal_goto ())
946 /* We have to pass a value to the nonlocal_goto pattern that will
947 get copied into the static_chain pointer, but it does not matter
948 what that value is, because builtin_setjmp does not use it. */
949 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
950 else
951 {
952 lab = copy_to_reg (lab);
953
954 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
955 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
956
957 emit_move_insn (hard_frame_pointer_rtx, fp);
958 emit_stack_restore (SAVE_NONLOCAL, stack);
959
960 emit_use (hard_frame_pointer_rtx);
961 emit_use (stack_pointer_rtx);
962 emit_indirect_jump (lab);
963 }
964 }
965
966 /* Search backwards and mark the jump insn as a non-local goto.
967 Note that this precludes the use of __builtin_longjmp to a
968 __builtin_setjmp target in the same function. However, we've
969 already cautioned the user that these functions are for
970 internal exception handling use only. */
971 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
972 {
973 gcc_assert (insn != last);
974
975 if (JUMP_P (insn))
976 {
977 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
978 break;
979 }
980 else if (CALL_P (insn))
981 break;
982 }
983 }
984
985 static inline bool
986 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
987 {
988 return (iter->i < iter->n);
989 }
990
991 /* This function validates the types of a function call argument list
992 against a specified list of tree_codes. If the last specifier is a 0,
993 that represents an ellipses, otherwise the last specifier must be a
994 VOID_TYPE. */
995
996 static bool
997 validate_arglist (const_tree callexpr, ...)
998 {
999 enum tree_code code;
1000 bool res = 0;
1001 va_list ap;
1002 const_call_expr_arg_iterator iter;
1003 const_tree arg;
1004
1005 va_start (ap, callexpr);
1006 init_const_call_expr_arg_iterator (callexpr, &iter);
1007
1008 do
1009 {
1010 code = (enum tree_code) va_arg (ap, int);
1011 switch (code)
1012 {
1013 case 0:
1014 /* This signifies an ellipses, any further arguments are all ok. */
1015 res = true;
1016 goto end;
1017 case VOID_TYPE:
1018 /* This signifies an endlink, if no arguments remain, return
1019 true, otherwise return false. */
1020 res = !more_const_call_expr_args_p (&iter);
1021 goto end;
1022 default:
1023 /* If no parameters remain or the parameter's code does not
1024 match the specified code, return false. Otherwise continue
1025 checking any remaining arguments. */
1026 arg = next_const_call_expr_arg (&iter);
1027 if (!validate_arg (arg, code))
1028 goto end;
1029 break;
1030 }
1031 }
1032 while (1);
1033
1034 /* We need gotos here since we can only have one VA_CLOSE in a
1035 function. */
1036 end: ;
1037 va_end (ap);
1038
1039 return res;
1040 }
1041
1042 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1043 and the address of the save area. */
1044
1045 static rtx
1046 expand_builtin_nonlocal_goto (tree exp)
1047 {
1048 tree t_label, t_save_area;
1049 rtx r_label, r_save_area, r_fp, r_sp;
1050 rtx_insn *insn;
1051
1052 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1053 return NULL_RTX;
1054
1055 t_label = CALL_EXPR_ARG (exp, 0);
1056 t_save_area = CALL_EXPR_ARG (exp, 1);
1057
1058 r_label = expand_normal (t_label);
1059 r_label = convert_memory_address (Pmode, r_label);
1060 r_save_area = expand_normal (t_save_area);
1061 r_save_area = convert_memory_address (Pmode, r_save_area);
1062 /* Copy the address of the save location to a register just in case it was
1063 based on the frame pointer. */
1064 r_save_area = copy_to_reg (r_save_area);
1065 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1066 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1067 plus_constant (Pmode, r_save_area,
1068 GET_MODE_SIZE (Pmode)));
1069
1070 crtl->has_nonlocal_goto = 1;
1071
1072 /* ??? We no longer need to pass the static chain value, afaik. */
1073 if (targetm.have_nonlocal_goto ())
1074 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1075 else
1076 {
1077 r_label = copy_to_reg (r_label);
1078
1079 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1080 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1081
1082 /* Restore frame pointer for containing function. */
1083 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1084 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1085
1086 /* USE of hard_frame_pointer_rtx added for consistency;
1087 not clear if really needed. */
1088 emit_use (hard_frame_pointer_rtx);
1089 emit_use (stack_pointer_rtx);
1090
1091 /* If the architecture is using a GP register, we must
1092 conservatively assume that the target function makes use of it.
1093 The prologue of functions with nonlocal gotos must therefore
1094 initialize the GP register to the appropriate value, and we
1095 must then make sure that this value is live at the point
1096 of the jump. (Note that this doesn't necessarily apply
1097 to targets with a nonlocal_goto pattern; they are free
1098 to implement it in their own way. Note also that this is
1099 a no-op if the GP register is a global invariant.) */
1100 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1101 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1102 emit_use (pic_offset_table_rtx);
1103
1104 emit_indirect_jump (r_label);
1105 }
1106
1107 /* Search backwards to the jump insn and mark it as a
1108 non-local goto. */
1109 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1110 {
1111 if (JUMP_P (insn))
1112 {
1113 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1114 break;
1115 }
1116 else if (CALL_P (insn))
1117 break;
1118 }
1119
1120 return const0_rtx;
1121 }
1122
1123 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1124 (not all will be used on all machines) that was passed to __builtin_setjmp.
1125 It updates the stack pointer in that block to the current value. This is
1126 also called directly by the SJLJ exception handling code. */
1127
1128 void
1129 expand_builtin_update_setjmp_buf (rtx buf_addr)
1130 {
1131 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1132 rtx stack_save
1133 = gen_rtx_MEM (sa_mode,
1134 memory_address
1135 (sa_mode,
1136 plus_constant (Pmode, buf_addr,
1137 2 * GET_MODE_SIZE (Pmode))));
1138
1139 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1140 }
1141
1142 /* Expand a call to __builtin_prefetch. For a target that does not support
1143 data prefetch, evaluate the memory address argument in case it has side
1144 effects. */
1145
1146 static void
1147 expand_builtin_prefetch (tree exp)
1148 {
1149 tree arg0, arg1, arg2;
1150 int nargs;
1151 rtx op0, op1, op2;
1152
1153 if (!validate_arglist (exp, POINTER_TYPE, 0))
1154 return;
1155
1156 arg0 = CALL_EXPR_ARG (exp, 0);
1157
1158 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1159 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1160 locality). */
1161 nargs = call_expr_nargs (exp);
1162 if (nargs > 1)
1163 arg1 = CALL_EXPR_ARG (exp, 1);
1164 else
1165 arg1 = integer_zero_node;
1166 if (nargs > 2)
1167 arg2 = CALL_EXPR_ARG (exp, 2);
1168 else
1169 arg2 = integer_three_node;
1170
1171 /* Argument 0 is an address. */
1172 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1173
1174 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1175 if (TREE_CODE (arg1) != INTEGER_CST)
1176 {
1177 error ("second argument to %<__builtin_prefetch%> must be a constant");
1178 arg1 = integer_zero_node;
1179 }
1180 op1 = expand_normal (arg1);
1181 /* Argument 1 must be either zero or one. */
1182 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1183 {
1184 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1185 " using zero");
1186 op1 = const0_rtx;
1187 }
1188
1189 /* Argument 2 (locality) must be a compile-time constant int. */
1190 if (TREE_CODE (arg2) != INTEGER_CST)
1191 {
1192 error ("third argument to %<__builtin_prefetch%> must be a constant");
1193 arg2 = integer_zero_node;
1194 }
1195 op2 = expand_normal (arg2);
1196 /* Argument 2 must be 0, 1, 2, or 3. */
1197 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1198 {
1199 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1200 op2 = const0_rtx;
1201 }
1202
1203 if (targetm.have_prefetch ())
1204 {
1205 struct expand_operand ops[3];
1206
1207 create_address_operand (&ops[0], op0);
1208 create_integer_operand (&ops[1], INTVAL (op1));
1209 create_integer_operand (&ops[2], INTVAL (op2));
1210 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1211 return;
1212 }
1213
1214 /* Don't do anything with direct references to volatile memory, but
1215 generate code to handle other side effects. */
1216 if (!MEM_P (op0) && side_effects_p (op0))
1217 emit_insn (op0);
1218 }
1219
1220 /* Get a MEM rtx for expression EXP which is the address of an operand
1221 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1222 the maximum length of the block of memory that might be accessed or
1223 NULL if unknown. */
1224
1225 static rtx
1226 get_memory_rtx (tree exp, tree len)
1227 {
1228 tree orig_exp = exp;
1229 rtx addr, mem;
1230
1231 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1232 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1233 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1234 exp = TREE_OPERAND (exp, 0);
1235
1236 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1237 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1238
1239 /* Get an expression we can use to find the attributes to assign to MEM.
1240 First remove any nops. */
1241 while (CONVERT_EXPR_P (exp)
1242 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1243 exp = TREE_OPERAND (exp, 0);
1244
1245 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1246 (as builtin stringops may alias with anything). */
1247 exp = fold_build2 (MEM_REF,
1248 build_array_type (char_type_node,
1249 build_range_type (sizetype,
1250 size_one_node, len)),
1251 exp, build_int_cst (ptr_type_node, 0));
1252
1253 /* If the MEM_REF has no acceptable address, try to get the base object
1254 from the original address we got, and build an all-aliasing
1255 unknown-sized access to that one. */
1256 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1257 set_mem_attributes (mem, exp, 0);
1258 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1259 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1260 0))))
1261 {
1262 exp = build_fold_addr_expr (exp);
1263 exp = fold_build2 (MEM_REF,
1264 build_array_type (char_type_node,
1265 build_range_type (sizetype,
1266 size_zero_node,
1267 NULL)),
1268 exp, build_int_cst (ptr_type_node, 0));
1269 set_mem_attributes (mem, exp, 0);
1270 }
1271 set_mem_alias_set (mem, 0);
1272 return mem;
1273 }
1274 \f
1275 /* Built-in functions to perform an untyped call and return. */
1276
1277 #define apply_args_mode \
1278 (this_target_builtins->x_apply_args_mode)
1279 #define apply_result_mode \
1280 (this_target_builtins->x_apply_result_mode)
1281
1282 /* Return the size required for the block returned by __builtin_apply_args,
1283 and initialize apply_args_mode. */
1284
1285 static int
1286 apply_args_size (void)
1287 {
1288 static int size = -1;
1289 int align;
1290 unsigned int regno;
1291 machine_mode mode;
1292
1293 /* The values computed by this function never change. */
1294 if (size < 0)
1295 {
1296 /* The first value is the incoming arg-pointer. */
1297 size = GET_MODE_SIZE (Pmode);
1298
1299 /* The second value is the structure value address unless this is
1300 passed as an "invisible" first argument. */
1301 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1302 size += GET_MODE_SIZE (Pmode);
1303
1304 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1305 if (FUNCTION_ARG_REGNO_P (regno))
1306 {
1307 mode = targetm.calls.get_raw_arg_mode (regno);
1308
1309 gcc_assert (mode != VOIDmode);
1310
1311 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1312 if (size % align != 0)
1313 size = CEIL (size, align) * align;
1314 size += GET_MODE_SIZE (mode);
1315 apply_args_mode[regno] = mode;
1316 }
1317 else
1318 {
1319 apply_args_mode[regno] = VOIDmode;
1320 }
1321 }
1322 return size;
1323 }
1324
1325 /* Return the size required for the block returned by __builtin_apply,
1326 and initialize apply_result_mode. */
1327
1328 static int
1329 apply_result_size (void)
1330 {
1331 static int size = -1;
1332 int align, regno;
1333 machine_mode mode;
1334
1335 /* The values computed by this function never change. */
1336 if (size < 0)
1337 {
1338 size = 0;
1339
1340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1341 if (targetm.calls.function_value_regno_p (regno))
1342 {
1343 mode = targetm.calls.get_raw_result_mode (regno);
1344
1345 gcc_assert (mode != VOIDmode);
1346
1347 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1348 if (size % align != 0)
1349 size = CEIL (size, align) * align;
1350 size += GET_MODE_SIZE (mode);
1351 apply_result_mode[regno] = mode;
1352 }
1353 else
1354 apply_result_mode[regno] = VOIDmode;
1355
1356 /* Allow targets that use untyped_call and untyped_return to override
1357 the size so that machine-specific information can be stored here. */
1358 #ifdef APPLY_RESULT_SIZE
1359 size = APPLY_RESULT_SIZE;
1360 #endif
1361 }
1362 return size;
1363 }
1364
1365 /* Create a vector describing the result block RESULT. If SAVEP is true,
1366 the result block is used to save the values; otherwise it is used to
1367 restore the values. */
1368
1369 static rtx
1370 result_vector (int savep, rtx result)
1371 {
1372 int regno, size, align, nelts;
1373 machine_mode mode;
1374 rtx reg, mem;
1375 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1376
1377 size = nelts = 0;
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if ((mode = apply_result_mode[regno]) != VOIDmode)
1380 {
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1385 mem = adjust_address (result, mode, size);
1386 savevec[nelts++] = (savep
1387 ? gen_rtx_SET (mem, reg)
1388 : gen_rtx_SET (reg, mem));
1389 size += GET_MODE_SIZE (mode);
1390 }
1391 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1392 }
1393
1394 /* Save the state required to perform an untyped call with the same
1395 arguments as were passed to the current function. */
1396
1397 static rtx
1398 expand_builtin_apply_args_1 (void)
1399 {
1400 rtx registers, tem;
1401 int size, align, regno;
1402 machine_mode mode;
1403 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1404
1405 /* Create a block where the arg-pointer, structure value address,
1406 and argument registers can be saved. */
1407 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1408
1409 /* Walk past the arg-pointer and structure value address. */
1410 size = GET_MODE_SIZE (Pmode);
1411 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1412 size += GET_MODE_SIZE (Pmode);
1413
1414 /* Save each register used in calling a function to the block. */
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1416 if ((mode = apply_args_mode[regno]) != VOIDmode)
1417 {
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421
1422 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1423
1424 emit_move_insn (adjust_address (registers, mode, size), tem);
1425 size += GET_MODE_SIZE (mode);
1426 }
1427
1428 /* Save the arg pointer to the block. */
1429 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1430 /* We need the pointer as the caller actually passed them to us, not
1431 as we might have pretended they were passed. Make sure it's a valid
1432 operand, as emit_move_insn isn't expected to handle a PLUS. */
1433 if (STACK_GROWS_DOWNWARD)
1434 tem
1435 = force_operand (plus_constant (Pmode, tem,
1436 crtl->args.pretend_args_size),
1437 NULL_RTX);
1438 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1439
1440 size = GET_MODE_SIZE (Pmode);
1441
1442 /* Save the structure value address unless this is passed as an
1443 "invisible" first argument. */
1444 if (struct_incoming_value)
1445 {
1446 emit_move_insn (adjust_address (registers, Pmode, size),
1447 copy_to_reg (struct_incoming_value));
1448 size += GET_MODE_SIZE (Pmode);
1449 }
1450
1451 /* Return the address of the block. */
1452 return copy_addr_to_reg (XEXP (registers, 0));
1453 }
1454
1455 /* __builtin_apply_args returns block of memory allocated on
1456 the stack into which is stored the arg pointer, structure
1457 value address, static chain, and all the registers that might
1458 possibly be used in performing a function call. The code is
1459 moved to the start of the function so the incoming values are
1460 saved. */
1461
1462 static rtx
1463 expand_builtin_apply_args (void)
1464 {
1465 /* Don't do __builtin_apply_args more than once in a function.
1466 Save the result of the first call and reuse it. */
1467 if (apply_args_value != 0)
1468 return apply_args_value;
1469 {
1470 /* When this function is called, it means that registers must be
1471 saved on entry to this function. So we migrate the
1472 call to the first insn of this function. */
1473 rtx temp;
1474
1475 start_sequence ();
1476 temp = expand_builtin_apply_args_1 ();
1477 rtx_insn *seq = get_insns ();
1478 end_sequence ();
1479
1480 apply_args_value = temp;
1481
1482 /* Put the insns after the NOTE that starts the function.
1483 If this is inside a start_sequence, make the outer-level insn
1484 chain current, so the code is placed at the start of the
1485 function. If internal_arg_pointer is a non-virtual pseudo,
1486 it needs to be placed after the function that initializes
1487 that pseudo. */
1488 push_topmost_sequence ();
1489 if (REG_P (crtl->args.internal_arg_pointer)
1490 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1491 emit_insn_before (seq, parm_birth_insn);
1492 else
1493 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1494 pop_topmost_sequence ();
1495 return temp;
1496 }
1497 }
1498
1499 /* Perform an untyped call and save the state required to perform an
1500 untyped return of whatever value was returned by the given function. */
1501
1502 static rtx
1503 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1504 {
1505 int size, align, regno;
1506 machine_mode mode;
1507 rtx incoming_args, result, reg, dest, src;
1508 rtx_call_insn *call_insn;
1509 rtx old_stack_level = 0;
1510 rtx call_fusage = 0;
1511 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1512
1513 arguments = convert_memory_address (Pmode, arguments);
1514
1515 /* Create a block where the return registers can be saved. */
1516 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1517
1518 /* Fetch the arg pointer from the ARGUMENTS block. */
1519 incoming_args = gen_reg_rtx (Pmode);
1520 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1521 if (!STACK_GROWS_DOWNWARD)
1522 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1523 incoming_args, 0, OPTAB_LIB_WIDEN);
1524
1525 /* Push a new argument block and copy the arguments. Do not allow
1526 the (potential) memcpy call below to interfere with our stack
1527 manipulations. */
1528 do_pending_stack_adjust ();
1529 NO_DEFER_POP;
1530
1531 /* Save the stack with nonlocal if available. */
1532 if (targetm.have_save_stack_nonlocal ())
1533 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1534 else
1535 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1536
1537 /* Allocate a block of memory onto the stack and copy the memory
1538 arguments to the outgoing arguments address. We can pass TRUE
1539 as the 4th argument because we just saved the stack pointer
1540 and will restore it right after the call. */
1541 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1542
1543 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1544 may have already set current_function_calls_alloca to true.
1545 current_function_calls_alloca won't be set if argsize is zero,
1546 so we have to guarantee need_drap is true here. */
1547 if (SUPPORTS_STACK_ALIGNMENT)
1548 crtl->need_drap = true;
1549
1550 dest = virtual_outgoing_args_rtx;
1551 if (!STACK_GROWS_DOWNWARD)
1552 {
1553 if (CONST_INT_P (argsize))
1554 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1555 else
1556 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1557 }
1558 dest = gen_rtx_MEM (BLKmode, dest);
1559 set_mem_align (dest, PARM_BOUNDARY);
1560 src = gen_rtx_MEM (BLKmode, incoming_args);
1561 set_mem_align (src, PARM_BOUNDARY);
1562 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1563
1564 /* Refer to the argument block. */
1565 apply_args_size ();
1566 arguments = gen_rtx_MEM (BLKmode, arguments);
1567 set_mem_align (arguments, PARM_BOUNDARY);
1568
1569 /* Walk past the arg-pointer and structure value address. */
1570 size = GET_MODE_SIZE (Pmode);
1571 if (struct_value)
1572 size += GET_MODE_SIZE (Pmode);
1573
1574 /* Restore each of the registers previously saved. Make USE insns
1575 for each of these registers for use in making the call. */
1576 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1577 if ((mode = apply_args_mode[regno]) != VOIDmode)
1578 {
1579 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1580 if (size % align != 0)
1581 size = CEIL (size, align) * align;
1582 reg = gen_rtx_REG (mode, regno);
1583 emit_move_insn (reg, adjust_address (arguments, mode, size));
1584 use_reg (&call_fusage, reg);
1585 size += GET_MODE_SIZE (mode);
1586 }
1587
1588 /* Restore the structure value address unless this is passed as an
1589 "invisible" first argument. */
1590 size = GET_MODE_SIZE (Pmode);
1591 if (struct_value)
1592 {
1593 rtx value = gen_reg_rtx (Pmode);
1594 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1595 emit_move_insn (struct_value, value);
1596 if (REG_P (struct_value))
1597 use_reg (&call_fusage, struct_value);
1598 size += GET_MODE_SIZE (Pmode);
1599 }
1600
1601 /* All arguments and registers used for the call are set up by now! */
1602 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1603
1604 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1605 and we don't want to load it into a register as an optimization,
1606 because prepare_call_address already did it if it should be done. */
1607 if (GET_CODE (function) != SYMBOL_REF)
1608 function = memory_address (FUNCTION_MODE, function);
1609
1610 /* Generate the actual call instruction and save the return value. */
1611 if (targetm.have_untyped_call ())
1612 {
1613 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1614 emit_call_insn (targetm.gen_untyped_call (mem, result,
1615 result_vector (1, result)));
1616 }
1617 else if (targetm.have_call_value ())
1618 {
1619 rtx valreg = 0;
1620
1621 /* Locate the unique return register. It is not possible to
1622 express a call that sets more than one return register using
1623 call_value; use untyped_call for that. In fact, untyped_call
1624 only needs to save the return registers in the given block. */
1625 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1626 if ((mode = apply_result_mode[regno]) != VOIDmode)
1627 {
1628 gcc_assert (!valreg); /* have_untyped_call required. */
1629
1630 valreg = gen_rtx_REG (mode, regno);
1631 }
1632
1633 emit_insn (targetm.gen_call_value (valreg,
1634 gen_rtx_MEM (FUNCTION_MODE, function),
1635 const0_rtx, NULL_RTX, const0_rtx));
1636
1637 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1638 }
1639 else
1640 gcc_unreachable ();
1641
1642 /* Find the CALL insn we just emitted, and attach the register usage
1643 information. */
1644 call_insn = last_call_insn ();
1645 add_function_usage_to (call_insn, call_fusage);
1646
1647 /* Restore the stack. */
1648 if (targetm.have_save_stack_nonlocal ())
1649 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1650 else
1651 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1652 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1653
1654 OK_DEFER_POP;
1655
1656 /* Return the address of the result block. */
1657 result = copy_addr_to_reg (XEXP (result, 0));
1658 return convert_memory_address (ptr_mode, result);
1659 }
1660
1661 /* Perform an untyped return. */
1662
1663 static void
1664 expand_builtin_return (rtx result)
1665 {
1666 int size, align, regno;
1667 machine_mode mode;
1668 rtx reg;
1669 rtx_insn *call_fusage = 0;
1670
1671 result = convert_memory_address (Pmode, result);
1672
1673 apply_result_size ();
1674 result = gen_rtx_MEM (BLKmode, result);
1675
1676 if (targetm.have_untyped_return ())
1677 {
1678 rtx vector = result_vector (0, result);
1679 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1680 emit_barrier ();
1681 return;
1682 }
1683
1684 /* Restore the return value and note that each value is used. */
1685 size = 0;
1686 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1687 if ((mode = apply_result_mode[regno]) != VOIDmode)
1688 {
1689 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1690 if (size % align != 0)
1691 size = CEIL (size, align) * align;
1692 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1693 emit_move_insn (reg, adjust_address (result, mode, size));
1694
1695 push_to_sequence (call_fusage);
1696 emit_use (reg);
1697 call_fusage = get_insns ();
1698 end_sequence ();
1699 size += GET_MODE_SIZE (mode);
1700 }
1701
1702 /* Put the USE insns before the return. */
1703 emit_insn (call_fusage);
1704
1705 /* Return whatever values was restored by jumping directly to the end
1706 of the function. */
1707 expand_naked_return ();
1708 }
1709
1710 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1711
1712 static enum type_class
1713 type_to_class (tree type)
1714 {
1715 switch (TREE_CODE (type))
1716 {
1717 case VOID_TYPE: return void_type_class;
1718 case INTEGER_TYPE: return integer_type_class;
1719 case ENUMERAL_TYPE: return enumeral_type_class;
1720 case BOOLEAN_TYPE: return boolean_type_class;
1721 case POINTER_TYPE: return pointer_type_class;
1722 case REFERENCE_TYPE: return reference_type_class;
1723 case OFFSET_TYPE: return offset_type_class;
1724 case REAL_TYPE: return real_type_class;
1725 case COMPLEX_TYPE: return complex_type_class;
1726 case FUNCTION_TYPE: return function_type_class;
1727 case METHOD_TYPE: return method_type_class;
1728 case RECORD_TYPE: return record_type_class;
1729 case UNION_TYPE:
1730 case QUAL_UNION_TYPE: return union_type_class;
1731 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1732 ? string_type_class : array_type_class);
1733 case LANG_TYPE: return lang_type_class;
1734 default: return no_type_class;
1735 }
1736 }
1737
1738 /* Expand a call EXP to __builtin_classify_type. */
1739
1740 static rtx
1741 expand_builtin_classify_type (tree exp)
1742 {
1743 if (call_expr_nargs (exp))
1744 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1745 return GEN_INT (no_type_class);
1746 }
1747
1748 /* This helper macro, meant to be used in mathfn_built_in below,
1749 determines which among a set of three builtin math functions is
1750 appropriate for a given type mode. The `F' and `L' cases are
1751 automatically generated from the `double' case. */
1752 #define CASE_MATHFN(MATHFN) \
1753 CASE_CFN_##MATHFN: \
1754 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1755 fcodel = BUILT_IN_##MATHFN##L ; break;
1756 /* Similar to above, but appends _R after any F/L suffix. */
1757 #define CASE_MATHFN_REENT(MATHFN) \
1758 case CFN_BUILT_IN_##MATHFN##_R: \
1759 case CFN_BUILT_IN_##MATHFN##F_R: \
1760 case CFN_BUILT_IN_##MATHFN##L_R: \
1761 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1762 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1763
1764 /* Return a function equivalent to FN but operating on floating-point
1765 values of type TYPE, or END_BUILTINS if no such function exists.
1766 This is purely an operation on function codes; it does not guarantee
1767 that the target actually has an implementation of the function. */
1768
1769 static built_in_function
1770 mathfn_built_in_2 (tree type, combined_fn fn)
1771 {
1772 built_in_function fcode, fcodef, fcodel;
1773
1774 switch (fn)
1775 {
1776 CASE_MATHFN (ACOS)
1777 CASE_MATHFN (ACOSH)
1778 CASE_MATHFN (ASIN)
1779 CASE_MATHFN (ASINH)
1780 CASE_MATHFN (ATAN)
1781 CASE_MATHFN (ATAN2)
1782 CASE_MATHFN (ATANH)
1783 CASE_MATHFN (CBRT)
1784 CASE_MATHFN (CEIL)
1785 CASE_MATHFN (CEXPI)
1786 CASE_MATHFN (COPYSIGN)
1787 CASE_MATHFN (COS)
1788 CASE_MATHFN (COSH)
1789 CASE_MATHFN (DREM)
1790 CASE_MATHFN (ERF)
1791 CASE_MATHFN (ERFC)
1792 CASE_MATHFN (EXP)
1793 CASE_MATHFN (EXP10)
1794 CASE_MATHFN (EXP2)
1795 CASE_MATHFN (EXPM1)
1796 CASE_MATHFN (FABS)
1797 CASE_MATHFN (FDIM)
1798 CASE_MATHFN (FLOOR)
1799 CASE_MATHFN (FMA)
1800 CASE_MATHFN (FMAX)
1801 CASE_MATHFN (FMIN)
1802 CASE_MATHFN (FMOD)
1803 CASE_MATHFN (FREXP)
1804 CASE_MATHFN (GAMMA)
1805 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1806 CASE_MATHFN (HUGE_VAL)
1807 CASE_MATHFN (HYPOT)
1808 CASE_MATHFN (ILOGB)
1809 CASE_MATHFN (ICEIL)
1810 CASE_MATHFN (IFLOOR)
1811 CASE_MATHFN (INF)
1812 CASE_MATHFN (IRINT)
1813 CASE_MATHFN (IROUND)
1814 CASE_MATHFN (ISINF)
1815 CASE_MATHFN (J0)
1816 CASE_MATHFN (J1)
1817 CASE_MATHFN (JN)
1818 CASE_MATHFN (LCEIL)
1819 CASE_MATHFN (LDEXP)
1820 CASE_MATHFN (LFLOOR)
1821 CASE_MATHFN (LGAMMA)
1822 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1823 CASE_MATHFN (LLCEIL)
1824 CASE_MATHFN (LLFLOOR)
1825 CASE_MATHFN (LLRINT)
1826 CASE_MATHFN (LLROUND)
1827 CASE_MATHFN (LOG)
1828 CASE_MATHFN (LOG10)
1829 CASE_MATHFN (LOG1P)
1830 CASE_MATHFN (LOG2)
1831 CASE_MATHFN (LOGB)
1832 CASE_MATHFN (LRINT)
1833 CASE_MATHFN (LROUND)
1834 CASE_MATHFN (MODF)
1835 CASE_MATHFN (NAN)
1836 CASE_MATHFN (NANS)
1837 CASE_MATHFN (NEARBYINT)
1838 CASE_MATHFN (NEXTAFTER)
1839 CASE_MATHFN (NEXTTOWARD)
1840 CASE_MATHFN (POW)
1841 CASE_MATHFN (POWI)
1842 CASE_MATHFN (POW10)
1843 CASE_MATHFN (REMAINDER)
1844 CASE_MATHFN (REMQUO)
1845 CASE_MATHFN (RINT)
1846 CASE_MATHFN (ROUND)
1847 CASE_MATHFN (SCALB)
1848 CASE_MATHFN (SCALBLN)
1849 CASE_MATHFN (SCALBN)
1850 CASE_MATHFN (SIGNBIT)
1851 CASE_MATHFN (SIGNIFICAND)
1852 CASE_MATHFN (SIN)
1853 CASE_MATHFN (SINCOS)
1854 CASE_MATHFN (SINH)
1855 CASE_MATHFN (SQRT)
1856 CASE_MATHFN (TAN)
1857 CASE_MATHFN (TANH)
1858 CASE_MATHFN (TGAMMA)
1859 CASE_MATHFN (TRUNC)
1860 CASE_MATHFN (Y0)
1861 CASE_MATHFN (Y1)
1862 CASE_MATHFN (YN)
1863
1864 default:
1865 return END_BUILTINS;
1866 }
1867
1868 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1869 return fcode;
1870 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1871 return fcodef;
1872 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1873 return fcodel;
1874 else
1875 return END_BUILTINS;
1876 }
1877
1878 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1879 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1880 otherwise use the explicit declaration. If we can't do the conversion,
1881 return null. */
1882
1883 static tree
1884 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1885 {
1886 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1887 if (fcode2 == END_BUILTINS)
1888 return NULL_TREE;
1889
1890 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1891 return NULL_TREE;
1892
1893 return builtin_decl_explicit (fcode2);
1894 }
1895
1896 /* Like mathfn_built_in_1, but always use the implicit array. */
1897
1898 tree
1899 mathfn_built_in (tree type, combined_fn fn)
1900 {
1901 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1902 }
1903
1904 /* Like mathfn_built_in_1, but take a built_in_function and
1905 always use the implicit array. */
1906
1907 tree
1908 mathfn_built_in (tree type, enum built_in_function fn)
1909 {
1910 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1911 }
1912
1913 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1914 return its code, otherwise return IFN_LAST. Note that this function
1915 only tests whether the function is defined in internals.def, not whether
1916 it is actually available on the target. */
1917
1918 internal_fn
1919 associated_internal_fn (tree fndecl)
1920 {
1921 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1922 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1923 switch (DECL_FUNCTION_CODE (fndecl))
1924 {
1925 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1926 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1927 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1928 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1929 #include "internal-fn.def"
1930
1931 CASE_FLT_FN (BUILT_IN_POW10):
1932 return IFN_EXP10;
1933
1934 CASE_FLT_FN (BUILT_IN_DREM):
1935 return IFN_REMAINDER;
1936
1937 CASE_FLT_FN (BUILT_IN_SCALBN):
1938 CASE_FLT_FN (BUILT_IN_SCALBLN):
1939 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1940 return IFN_LDEXP;
1941 return IFN_LAST;
1942
1943 default:
1944 return IFN_LAST;
1945 }
1946 }
1947
1948 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1949 on the current target by a call to an internal function, return the
1950 code of that internal function, otherwise return IFN_LAST. The caller
1951 is responsible for ensuring that any side-effects of the built-in
1952 call are dealt with correctly. E.g. if CALL sets errno, the caller
1953 must decide that the errno result isn't needed or make it available
1954 in some other way. */
1955
1956 internal_fn
1957 replacement_internal_fn (gcall *call)
1958 {
1959 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1960 {
1961 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1962 if (ifn != IFN_LAST)
1963 {
1964 tree_pair types = direct_internal_fn_types (ifn, call);
1965 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1966 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1967 return ifn;
1968 }
1969 }
1970 return IFN_LAST;
1971 }
1972
1973 /* Expand a call to the builtin trinary math functions (fma).
1974 Return NULL_RTX if a normal call should be emitted rather than expanding the
1975 function in-line. EXP is the expression that is a call to the builtin
1976 function; if convenient, the result should be placed in TARGET.
1977 SUBTARGET may be used as the target for computing one of EXP's
1978 operands. */
1979
1980 static rtx
1981 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1982 {
1983 optab builtin_optab;
1984 rtx op0, op1, op2, result;
1985 rtx_insn *insns;
1986 tree fndecl = get_callee_fndecl (exp);
1987 tree arg0, arg1, arg2;
1988 machine_mode mode;
1989
1990 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1991 return NULL_RTX;
1992
1993 arg0 = CALL_EXPR_ARG (exp, 0);
1994 arg1 = CALL_EXPR_ARG (exp, 1);
1995 arg2 = CALL_EXPR_ARG (exp, 2);
1996
1997 switch (DECL_FUNCTION_CODE (fndecl))
1998 {
1999 CASE_FLT_FN (BUILT_IN_FMA):
2000 builtin_optab = fma_optab; break;
2001 default:
2002 gcc_unreachable ();
2003 }
2004
2005 /* Make a suitable register to place result in. */
2006 mode = TYPE_MODE (TREE_TYPE (exp));
2007
2008 /* Before working hard, check whether the instruction is available. */
2009 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2010 return NULL_RTX;
2011
2012 result = gen_reg_rtx (mode);
2013
2014 /* Always stabilize the argument list. */
2015 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2016 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2017 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2018
2019 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2020 op1 = expand_normal (arg1);
2021 op2 = expand_normal (arg2);
2022
2023 start_sequence ();
2024
2025 /* Compute into RESULT.
2026 Set RESULT to wherever the result comes back. */
2027 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2028 result, 0);
2029
2030 /* If we were unable to expand via the builtin, stop the sequence
2031 (without outputting the insns) and call to the library function
2032 with the stabilized argument list. */
2033 if (result == 0)
2034 {
2035 end_sequence ();
2036 return expand_call (exp, target, target == const0_rtx);
2037 }
2038
2039 /* Output the entire sequence. */
2040 insns = get_insns ();
2041 end_sequence ();
2042 emit_insn (insns);
2043
2044 return result;
2045 }
2046
2047 /* Expand a call to the builtin sin and cos math functions.
2048 Return NULL_RTX if a normal call should be emitted rather than expanding the
2049 function in-line. EXP is the expression that is a call to the builtin
2050 function; if convenient, the result should be placed in TARGET.
2051 SUBTARGET may be used as the target for computing one of EXP's
2052 operands. */
2053
2054 static rtx
2055 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2056 {
2057 optab builtin_optab;
2058 rtx op0;
2059 rtx_insn *insns;
2060 tree fndecl = get_callee_fndecl (exp);
2061 machine_mode mode;
2062 tree arg;
2063
2064 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2065 return NULL_RTX;
2066
2067 arg = CALL_EXPR_ARG (exp, 0);
2068
2069 switch (DECL_FUNCTION_CODE (fndecl))
2070 {
2071 CASE_FLT_FN (BUILT_IN_SIN):
2072 CASE_FLT_FN (BUILT_IN_COS):
2073 builtin_optab = sincos_optab; break;
2074 default:
2075 gcc_unreachable ();
2076 }
2077
2078 /* Make a suitable register to place result in. */
2079 mode = TYPE_MODE (TREE_TYPE (exp));
2080
2081 /* Check if sincos insn is available, otherwise fallback
2082 to sin or cos insn. */
2083 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2084 switch (DECL_FUNCTION_CODE (fndecl))
2085 {
2086 CASE_FLT_FN (BUILT_IN_SIN):
2087 builtin_optab = sin_optab; break;
2088 CASE_FLT_FN (BUILT_IN_COS):
2089 builtin_optab = cos_optab; break;
2090 default:
2091 gcc_unreachable ();
2092 }
2093
2094 /* Before working hard, check whether the instruction is available. */
2095 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2096 {
2097 rtx result = gen_reg_rtx (mode);
2098
2099 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2100 need to expand the argument again. This way, we will not perform
2101 side-effects more the once. */
2102 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2103
2104 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2105
2106 start_sequence ();
2107
2108 /* Compute into RESULT.
2109 Set RESULT to wherever the result comes back. */
2110 if (builtin_optab == sincos_optab)
2111 {
2112 int ok;
2113
2114 switch (DECL_FUNCTION_CODE (fndecl))
2115 {
2116 CASE_FLT_FN (BUILT_IN_SIN):
2117 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2118 break;
2119 CASE_FLT_FN (BUILT_IN_COS):
2120 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2121 break;
2122 default:
2123 gcc_unreachable ();
2124 }
2125 gcc_assert (ok);
2126 }
2127 else
2128 result = expand_unop (mode, builtin_optab, op0, result, 0);
2129
2130 if (result != 0)
2131 {
2132 /* Output the entire sequence. */
2133 insns = get_insns ();
2134 end_sequence ();
2135 emit_insn (insns);
2136 return result;
2137 }
2138
2139 /* If we were unable to expand via the builtin, stop the sequence
2140 (without outputting the insns) and call to the library function
2141 with the stabilized argument list. */
2142 end_sequence ();
2143 }
2144
2145 return expand_call (exp, target, target == const0_rtx);
2146 }
2147
2148 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2149 return an RTL instruction code that implements the functionality.
2150 If that isn't possible or available return CODE_FOR_nothing. */
2151
2152 static enum insn_code
2153 interclass_mathfn_icode (tree arg, tree fndecl)
2154 {
2155 bool errno_set = false;
2156 optab builtin_optab = unknown_optab;
2157 machine_mode mode;
2158
2159 switch (DECL_FUNCTION_CODE (fndecl))
2160 {
2161 CASE_FLT_FN (BUILT_IN_ILOGB):
2162 errno_set = true; builtin_optab = ilogb_optab; break;
2163 CASE_FLT_FN (BUILT_IN_ISINF):
2164 builtin_optab = isinf_optab; break;
2165 case BUILT_IN_ISNORMAL:
2166 case BUILT_IN_ISFINITE:
2167 CASE_FLT_FN (BUILT_IN_FINITE):
2168 case BUILT_IN_FINITED32:
2169 case BUILT_IN_FINITED64:
2170 case BUILT_IN_FINITED128:
2171 case BUILT_IN_ISINFD32:
2172 case BUILT_IN_ISINFD64:
2173 case BUILT_IN_ISINFD128:
2174 /* These builtins have no optabs (yet). */
2175 break;
2176 default:
2177 gcc_unreachable ();
2178 }
2179
2180 /* There's no easy way to detect the case we need to set EDOM. */
2181 if (flag_errno_math && errno_set)
2182 return CODE_FOR_nothing;
2183
2184 /* Optab mode depends on the mode of the input argument. */
2185 mode = TYPE_MODE (TREE_TYPE (arg));
2186
2187 if (builtin_optab)
2188 return optab_handler (builtin_optab, mode);
2189 return CODE_FOR_nothing;
2190 }
2191
2192 /* Expand a call to one of the builtin math functions that operate on
2193 floating point argument and output an integer result (ilogb, isinf,
2194 isnan, etc).
2195 Return 0 if a normal call should be emitted rather than expanding the
2196 function in-line. EXP is the expression that is a call to the builtin
2197 function; if convenient, the result should be placed in TARGET. */
2198
2199 static rtx
2200 expand_builtin_interclass_mathfn (tree exp, rtx target)
2201 {
2202 enum insn_code icode = CODE_FOR_nothing;
2203 rtx op0;
2204 tree fndecl = get_callee_fndecl (exp);
2205 machine_mode mode;
2206 tree arg;
2207
2208 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2209 return NULL_RTX;
2210
2211 arg = CALL_EXPR_ARG (exp, 0);
2212 icode = interclass_mathfn_icode (arg, fndecl);
2213 mode = TYPE_MODE (TREE_TYPE (arg));
2214
2215 if (icode != CODE_FOR_nothing)
2216 {
2217 struct expand_operand ops[1];
2218 rtx_insn *last = get_last_insn ();
2219 tree orig_arg = arg;
2220
2221 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2222 need to expand the argument again. This way, we will not perform
2223 side-effects more the once. */
2224 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2225
2226 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2227
2228 if (mode != GET_MODE (op0))
2229 op0 = convert_to_mode (mode, op0, 0);
2230
2231 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2232 if (maybe_legitimize_operands (icode, 0, 1, ops)
2233 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2234 return ops[0].value;
2235
2236 delete_insns_since (last);
2237 CALL_EXPR_ARG (exp, 0) = orig_arg;
2238 }
2239
2240 return NULL_RTX;
2241 }
2242
2243 /* Expand a call to the builtin sincos math function.
2244 Return NULL_RTX if a normal call should be emitted rather than expanding the
2245 function in-line. EXP is the expression that is a call to the builtin
2246 function. */
2247
2248 static rtx
2249 expand_builtin_sincos (tree exp)
2250 {
2251 rtx op0, op1, op2, target1, target2;
2252 machine_mode mode;
2253 tree arg, sinp, cosp;
2254 int result;
2255 location_t loc = EXPR_LOCATION (exp);
2256 tree alias_type, alias_off;
2257
2258 if (!validate_arglist (exp, REAL_TYPE,
2259 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2260 return NULL_RTX;
2261
2262 arg = CALL_EXPR_ARG (exp, 0);
2263 sinp = CALL_EXPR_ARG (exp, 1);
2264 cosp = CALL_EXPR_ARG (exp, 2);
2265
2266 /* Make a suitable register to place result in. */
2267 mode = TYPE_MODE (TREE_TYPE (arg));
2268
2269 /* Check if sincos insn is available, otherwise emit the call. */
2270 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2271 return NULL_RTX;
2272
2273 target1 = gen_reg_rtx (mode);
2274 target2 = gen_reg_rtx (mode);
2275
2276 op0 = expand_normal (arg);
2277 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2278 alias_off = build_int_cst (alias_type, 0);
2279 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2280 sinp, alias_off));
2281 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2282 cosp, alias_off));
2283
2284 /* Compute into target1 and target2.
2285 Set TARGET to wherever the result comes back. */
2286 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2287 gcc_assert (result);
2288
2289 /* Move target1 and target2 to the memory locations indicated
2290 by op1 and op2. */
2291 emit_move_insn (op1, target1);
2292 emit_move_insn (op2, target2);
2293
2294 return const0_rtx;
2295 }
2296
2297 /* Expand a call to the internal cexpi builtin to the sincos math function.
2298 EXP is the expression that is a call to the builtin function; if convenient,
2299 the result should be placed in TARGET. */
2300
2301 static rtx
2302 expand_builtin_cexpi (tree exp, rtx target)
2303 {
2304 tree fndecl = get_callee_fndecl (exp);
2305 tree arg, type;
2306 machine_mode mode;
2307 rtx op0, op1, op2;
2308 location_t loc = EXPR_LOCATION (exp);
2309
2310 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2311 return NULL_RTX;
2312
2313 arg = CALL_EXPR_ARG (exp, 0);
2314 type = TREE_TYPE (arg);
2315 mode = TYPE_MODE (TREE_TYPE (arg));
2316
2317 /* Try expanding via a sincos optab, fall back to emitting a libcall
2318 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2319 is only generated from sincos, cexp or if we have either of them. */
2320 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2321 {
2322 op1 = gen_reg_rtx (mode);
2323 op2 = gen_reg_rtx (mode);
2324
2325 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2326
2327 /* Compute into op1 and op2. */
2328 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2329 }
2330 else if (targetm.libc_has_function (function_sincos))
2331 {
2332 tree call, fn = NULL_TREE;
2333 tree top1, top2;
2334 rtx op1a, op2a;
2335
2336 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2337 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2338 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2339 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2340 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2341 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2342 else
2343 gcc_unreachable ();
2344
2345 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2346 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2347 op1a = copy_addr_to_reg (XEXP (op1, 0));
2348 op2a = copy_addr_to_reg (XEXP (op2, 0));
2349 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2350 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2351
2352 /* Make sure not to fold the sincos call again. */
2353 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2354 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2355 call, 3, arg, top1, top2));
2356 }
2357 else
2358 {
2359 tree call, fn = NULL_TREE, narg;
2360 tree ctype = build_complex_type (type);
2361
2362 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2363 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2364 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2365 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2366 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2367 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2368 else
2369 gcc_unreachable ();
2370
2371 /* If we don't have a decl for cexp create one. This is the
2372 friendliest fallback if the user calls __builtin_cexpi
2373 without full target C99 function support. */
2374 if (fn == NULL_TREE)
2375 {
2376 tree fntype;
2377 const char *name = NULL;
2378
2379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2380 name = "cexpf";
2381 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2382 name = "cexp";
2383 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2384 name = "cexpl";
2385
2386 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2387 fn = build_fn_decl (name, fntype);
2388 }
2389
2390 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2391 build_real (type, dconst0), arg);
2392
2393 /* Make sure not to fold the cexp call again. */
2394 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2395 return expand_expr (build_call_nary (ctype, call, 1, narg),
2396 target, VOIDmode, EXPAND_NORMAL);
2397 }
2398
2399 /* Now build the proper return type. */
2400 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2401 make_tree (TREE_TYPE (arg), op2),
2402 make_tree (TREE_TYPE (arg), op1)),
2403 target, VOIDmode, EXPAND_NORMAL);
2404 }
2405
2406 /* Conveniently construct a function call expression. FNDECL names the
2407 function to be called, N is the number of arguments, and the "..."
2408 parameters are the argument expressions. Unlike build_call_exr
2409 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2410
2411 static tree
2412 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2413 {
2414 va_list ap;
2415 tree fntype = TREE_TYPE (fndecl);
2416 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2417
2418 va_start (ap, n);
2419 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2420 va_end (ap);
2421 SET_EXPR_LOCATION (fn, loc);
2422 return fn;
2423 }
2424
2425 /* Expand a call to one of the builtin rounding functions gcc defines
2426 as an extension (lfloor and lceil). As these are gcc extensions we
2427 do not need to worry about setting errno to EDOM.
2428 If expanding via optab fails, lower expression to (int)(floor(x)).
2429 EXP is the expression that is a call to the builtin function;
2430 if convenient, the result should be placed in TARGET. */
2431
2432 static rtx
2433 expand_builtin_int_roundingfn (tree exp, rtx target)
2434 {
2435 convert_optab builtin_optab;
2436 rtx op0, tmp;
2437 rtx_insn *insns;
2438 tree fndecl = get_callee_fndecl (exp);
2439 enum built_in_function fallback_fn;
2440 tree fallback_fndecl;
2441 machine_mode mode;
2442 tree arg;
2443
2444 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2445 gcc_unreachable ();
2446
2447 arg = CALL_EXPR_ARG (exp, 0);
2448
2449 switch (DECL_FUNCTION_CODE (fndecl))
2450 {
2451 CASE_FLT_FN (BUILT_IN_ICEIL):
2452 CASE_FLT_FN (BUILT_IN_LCEIL):
2453 CASE_FLT_FN (BUILT_IN_LLCEIL):
2454 builtin_optab = lceil_optab;
2455 fallback_fn = BUILT_IN_CEIL;
2456 break;
2457
2458 CASE_FLT_FN (BUILT_IN_IFLOOR):
2459 CASE_FLT_FN (BUILT_IN_LFLOOR):
2460 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2461 builtin_optab = lfloor_optab;
2462 fallback_fn = BUILT_IN_FLOOR;
2463 break;
2464
2465 default:
2466 gcc_unreachable ();
2467 }
2468
2469 /* Make a suitable register to place result in. */
2470 mode = TYPE_MODE (TREE_TYPE (exp));
2471
2472 target = gen_reg_rtx (mode);
2473
2474 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2475 need to expand the argument again. This way, we will not perform
2476 side-effects more the once. */
2477 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2478
2479 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2480
2481 start_sequence ();
2482
2483 /* Compute into TARGET. */
2484 if (expand_sfix_optab (target, op0, builtin_optab))
2485 {
2486 /* Output the entire sequence. */
2487 insns = get_insns ();
2488 end_sequence ();
2489 emit_insn (insns);
2490 return target;
2491 }
2492
2493 /* If we were unable to expand via the builtin, stop the sequence
2494 (without outputting the insns). */
2495 end_sequence ();
2496
2497 /* Fall back to floating point rounding optab. */
2498 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2499
2500 /* For non-C99 targets we may end up without a fallback fndecl here
2501 if the user called __builtin_lfloor directly. In this case emit
2502 a call to the floor/ceil variants nevertheless. This should result
2503 in the best user experience for not full C99 targets. */
2504 if (fallback_fndecl == NULL_TREE)
2505 {
2506 tree fntype;
2507 const char *name = NULL;
2508
2509 switch (DECL_FUNCTION_CODE (fndecl))
2510 {
2511 case BUILT_IN_ICEIL:
2512 case BUILT_IN_LCEIL:
2513 case BUILT_IN_LLCEIL:
2514 name = "ceil";
2515 break;
2516 case BUILT_IN_ICEILF:
2517 case BUILT_IN_LCEILF:
2518 case BUILT_IN_LLCEILF:
2519 name = "ceilf";
2520 break;
2521 case BUILT_IN_ICEILL:
2522 case BUILT_IN_LCEILL:
2523 case BUILT_IN_LLCEILL:
2524 name = "ceill";
2525 break;
2526 case BUILT_IN_IFLOOR:
2527 case BUILT_IN_LFLOOR:
2528 case BUILT_IN_LLFLOOR:
2529 name = "floor";
2530 break;
2531 case BUILT_IN_IFLOORF:
2532 case BUILT_IN_LFLOORF:
2533 case BUILT_IN_LLFLOORF:
2534 name = "floorf";
2535 break;
2536 case BUILT_IN_IFLOORL:
2537 case BUILT_IN_LFLOORL:
2538 case BUILT_IN_LLFLOORL:
2539 name = "floorl";
2540 break;
2541 default:
2542 gcc_unreachable ();
2543 }
2544
2545 fntype = build_function_type_list (TREE_TYPE (arg),
2546 TREE_TYPE (arg), NULL_TREE);
2547 fallback_fndecl = build_fn_decl (name, fntype);
2548 }
2549
2550 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2551
2552 tmp = expand_normal (exp);
2553 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2554
2555 /* Truncate the result of floating point optab to integer
2556 via expand_fix (). */
2557 target = gen_reg_rtx (mode);
2558 expand_fix (target, tmp, 0);
2559
2560 return target;
2561 }
2562
2563 /* Expand a call to one of the builtin math functions doing integer
2564 conversion (lrint).
2565 Return 0 if a normal call should be emitted rather than expanding the
2566 function in-line. EXP is the expression that is a call to the builtin
2567 function; if convenient, the result should be placed in TARGET. */
2568
2569 static rtx
2570 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2571 {
2572 convert_optab builtin_optab;
2573 rtx op0;
2574 rtx_insn *insns;
2575 tree fndecl = get_callee_fndecl (exp);
2576 tree arg;
2577 machine_mode mode;
2578 enum built_in_function fallback_fn = BUILT_IN_NONE;
2579
2580 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2581 gcc_unreachable ();
2582
2583 arg = CALL_EXPR_ARG (exp, 0);
2584
2585 switch (DECL_FUNCTION_CODE (fndecl))
2586 {
2587 CASE_FLT_FN (BUILT_IN_IRINT):
2588 fallback_fn = BUILT_IN_LRINT;
2589 /* FALLTHRU */
2590 CASE_FLT_FN (BUILT_IN_LRINT):
2591 CASE_FLT_FN (BUILT_IN_LLRINT):
2592 builtin_optab = lrint_optab;
2593 break;
2594
2595 CASE_FLT_FN (BUILT_IN_IROUND):
2596 fallback_fn = BUILT_IN_LROUND;
2597 /* FALLTHRU */
2598 CASE_FLT_FN (BUILT_IN_LROUND):
2599 CASE_FLT_FN (BUILT_IN_LLROUND):
2600 builtin_optab = lround_optab;
2601 break;
2602
2603 default:
2604 gcc_unreachable ();
2605 }
2606
2607 /* There's no easy way to detect the case we need to set EDOM. */
2608 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2609 return NULL_RTX;
2610
2611 /* Make a suitable register to place result in. */
2612 mode = TYPE_MODE (TREE_TYPE (exp));
2613
2614 /* There's no easy way to detect the case we need to set EDOM. */
2615 if (!flag_errno_math)
2616 {
2617 rtx result = gen_reg_rtx (mode);
2618
2619 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2620 need to expand the argument again. This way, we will not perform
2621 side-effects more the once. */
2622 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2623
2624 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2625
2626 start_sequence ();
2627
2628 if (expand_sfix_optab (result, op0, builtin_optab))
2629 {
2630 /* Output the entire sequence. */
2631 insns = get_insns ();
2632 end_sequence ();
2633 emit_insn (insns);
2634 return result;
2635 }
2636
2637 /* If we were unable to expand via the builtin, stop the sequence
2638 (without outputting the insns) and call to the library function
2639 with the stabilized argument list. */
2640 end_sequence ();
2641 }
2642
2643 if (fallback_fn != BUILT_IN_NONE)
2644 {
2645 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2646 targets, (int) round (x) should never be transformed into
2647 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2648 a call to lround in the hope that the target provides at least some
2649 C99 functions. This should result in the best user experience for
2650 not full C99 targets. */
2651 tree fallback_fndecl = mathfn_built_in_1
2652 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2653
2654 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2655 fallback_fndecl, 1, arg);
2656
2657 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2658 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2659 return convert_to_mode (mode, target, 0);
2660 }
2661
2662 return expand_call (exp, target, target == const0_rtx);
2663 }
2664
2665 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2666 a normal call should be emitted rather than expanding the function
2667 in-line. EXP is the expression that is a call to the builtin
2668 function; if convenient, the result should be placed in TARGET. */
2669
2670 static rtx
2671 expand_builtin_powi (tree exp, rtx target)
2672 {
2673 tree arg0, arg1;
2674 rtx op0, op1;
2675 machine_mode mode;
2676 machine_mode mode2;
2677
2678 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2679 return NULL_RTX;
2680
2681 arg0 = CALL_EXPR_ARG (exp, 0);
2682 arg1 = CALL_EXPR_ARG (exp, 1);
2683 mode = TYPE_MODE (TREE_TYPE (exp));
2684
2685 /* Emit a libcall to libgcc. */
2686
2687 /* Mode of the 2nd argument must match that of an int. */
2688 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2689
2690 if (target == NULL_RTX)
2691 target = gen_reg_rtx (mode);
2692
2693 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2694 if (GET_MODE (op0) != mode)
2695 op0 = convert_to_mode (mode, op0, 0);
2696 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2697 if (GET_MODE (op1) != mode2)
2698 op1 = convert_to_mode (mode2, op1, 0);
2699
2700 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2701 target, LCT_CONST, mode, 2,
2702 op0, mode, op1, mode2);
2703
2704 return target;
2705 }
2706
2707 /* Expand expression EXP which is a call to the strlen builtin. Return
2708 NULL_RTX if we failed the caller should emit a normal call, otherwise
2709 try to get the result in TARGET, if convenient. */
2710
2711 static rtx
2712 expand_builtin_strlen (tree exp, rtx target,
2713 machine_mode target_mode)
2714 {
2715 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2716 return NULL_RTX;
2717 else
2718 {
2719 struct expand_operand ops[4];
2720 rtx pat;
2721 tree len;
2722 tree src = CALL_EXPR_ARG (exp, 0);
2723 rtx src_reg;
2724 rtx_insn *before_strlen;
2725 machine_mode insn_mode = target_mode;
2726 enum insn_code icode = CODE_FOR_nothing;
2727 unsigned int align;
2728
2729 /* If the length can be computed at compile-time, return it. */
2730 len = c_strlen (src, 0);
2731 if (len)
2732 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2733
2734 /* If the length can be computed at compile-time and is constant
2735 integer, but there are side-effects in src, evaluate
2736 src for side-effects, then return len.
2737 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2738 can be optimized into: i++; x = 3; */
2739 len = c_strlen (src, 1);
2740 if (len && TREE_CODE (len) == INTEGER_CST)
2741 {
2742 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2743 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2744 }
2745
2746 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2747
2748 /* If SRC is not a pointer type, don't do this operation inline. */
2749 if (align == 0)
2750 return NULL_RTX;
2751
2752 /* Bail out if we can't compute strlen in the right mode. */
2753 while (insn_mode != VOIDmode)
2754 {
2755 icode = optab_handler (strlen_optab, insn_mode);
2756 if (icode != CODE_FOR_nothing)
2757 break;
2758
2759 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2760 }
2761 if (insn_mode == VOIDmode)
2762 return NULL_RTX;
2763
2764 /* Make a place to hold the source address. We will not expand
2765 the actual source until we are sure that the expansion will
2766 not fail -- there are trees that cannot be expanded twice. */
2767 src_reg = gen_reg_rtx (Pmode);
2768
2769 /* Mark the beginning of the strlen sequence so we can emit the
2770 source operand later. */
2771 before_strlen = get_last_insn ();
2772
2773 create_output_operand (&ops[0], target, insn_mode);
2774 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2775 create_integer_operand (&ops[2], 0);
2776 create_integer_operand (&ops[3], align);
2777 if (!maybe_expand_insn (icode, 4, ops))
2778 return NULL_RTX;
2779
2780 /* Now that we are assured of success, expand the source. */
2781 start_sequence ();
2782 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2783 if (pat != src_reg)
2784 {
2785 #ifdef POINTERS_EXTEND_UNSIGNED
2786 if (GET_MODE (pat) != Pmode)
2787 pat = convert_to_mode (Pmode, pat,
2788 POINTERS_EXTEND_UNSIGNED);
2789 #endif
2790 emit_move_insn (src_reg, pat);
2791 }
2792 pat = get_insns ();
2793 end_sequence ();
2794
2795 if (before_strlen)
2796 emit_insn_after (pat, before_strlen);
2797 else
2798 emit_insn_before (pat, get_insns ());
2799
2800 /* Return the value in the proper mode for this function. */
2801 if (GET_MODE (ops[0].value) == target_mode)
2802 target = ops[0].value;
2803 else if (target != 0)
2804 convert_move (target, ops[0].value, 0);
2805 else
2806 target = convert_to_mode (target_mode, ops[0].value, 0);
2807
2808 return target;
2809 }
2810 }
2811
2812 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2813 bytes from constant string DATA + OFFSET and return it as target
2814 constant. */
2815
2816 static rtx
2817 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2818 machine_mode mode)
2819 {
2820 const char *str = (const char *) data;
2821
2822 gcc_assert (offset >= 0
2823 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2824 <= strlen (str) + 1));
2825
2826 return c_readstr (str + offset, mode);
2827 }
2828
2829 /* LEN specify length of the block of memcpy/memset operation.
2830 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2831 In some cases we can make very likely guess on max size, then we
2832 set it into PROBABLE_MAX_SIZE. */
2833
2834 static void
2835 determine_block_size (tree len, rtx len_rtx,
2836 unsigned HOST_WIDE_INT *min_size,
2837 unsigned HOST_WIDE_INT *max_size,
2838 unsigned HOST_WIDE_INT *probable_max_size)
2839 {
2840 if (CONST_INT_P (len_rtx))
2841 {
2842 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2843 return;
2844 }
2845 else
2846 {
2847 wide_int min, max;
2848 enum value_range_type range_type = VR_UNDEFINED;
2849
2850 /* Determine bounds from the type. */
2851 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2852 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2853 else
2854 *min_size = 0;
2855 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2856 *probable_max_size = *max_size
2857 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2858 else
2859 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2860
2861 if (TREE_CODE (len) == SSA_NAME)
2862 range_type = get_range_info (len, &min, &max);
2863 if (range_type == VR_RANGE)
2864 {
2865 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2866 *min_size = min.to_uhwi ();
2867 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2868 *probable_max_size = *max_size = max.to_uhwi ();
2869 }
2870 else if (range_type == VR_ANTI_RANGE)
2871 {
2872 /* Anti range 0...N lets us to determine minimal size to N+1. */
2873 if (min == 0)
2874 {
2875 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2876 *min_size = max.to_uhwi () + 1;
2877 }
2878 /* Code like
2879
2880 int n;
2881 if (n < 100)
2882 memcpy (a, b, n)
2883
2884 Produce anti range allowing negative values of N. We still
2885 can use the information and make a guess that N is not negative.
2886 */
2887 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2888 *probable_max_size = min.to_uhwi () - 1;
2889 }
2890 }
2891 gcc_checking_assert (*max_size <=
2892 (unsigned HOST_WIDE_INT)
2893 GET_MODE_MASK (GET_MODE (len_rtx)));
2894 }
2895
2896 /* Helper function to do the actual work for expand_builtin_memcpy. */
2897
2898 static rtx
2899 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2900 {
2901 const char *src_str;
2902 unsigned int src_align = get_pointer_alignment (src);
2903 unsigned int dest_align = get_pointer_alignment (dest);
2904 rtx dest_mem, src_mem, dest_addr, len_rtx;
2905 HOST_WIDE_INT expected_size = -1;
2906 unsigned int expected_align = 0;
2907 unsigned HOST_WIDE_INT min_size;
2908 unsigned HOST_WIDE_INT max_size;
2909 unsigned HOST_WIDE_INT probable_max_size;
2910
2911 /* If DEST is not a pointer type, call the normal function. */
2912 if (dest_align == 0)
2913 return NULL_RTX;
2914
2915 /* If either SRC is not a pointer type, don't do this
2916 operation in-line. */
2917 if (src_align == 0)
2918 return NULL_RTX;
2919
2920 if (currently_expanding_gimple_stmt)
2921 stringop_block_profile (currently_expanding_gimple_stmt,
2922 &expected_align, &expected_size);
2923
2924 if (expected_align < dest_align)
2925 expected_align = dest_align;
2926 dest_mem = get_memory_rtx (dest, len);
2927 set_mem_align (dest_mem, dest_align);
2928 len_rtx = expand_normal (len);
2929 determine_block_size (len, len_rtx, &min_size, &max_size,
2930 &probable_max_size);
2931 src_str = c_getstr (src);
2932
2933 /* If SRC is a string constant and block move would be done
2934 by pieces, we can avoid loading the string from memory
2935 and only stored the computed constants. */
2936 if (src_str
2937 && CONST_INT_P (len_rtx)
2938 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2939 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2940 CONST_CAST (char *, src_str),
2941 dest_align, false))
2942 {
2943 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2944 builtin_memcpy_read_str,
2945 CONST_CAST (char *, src_str),
2946 dest_align, false, 0);
2947 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2948 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2949 return dest_mem;
2950 }
2951
2952 src_mem = get_memory_rtx (src, len);
2953 set_mem_align (src_mem, src_align);
2954
2955 /* Copy word part most expediently. */
2956 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2957 CALL_EXPR_TAILCALL (exp)
2958 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2959 expected_align, expected_size,
2960 min_size, max_size, probable_max_size);
2961
2962 if (dest_addr == 0)
2963 {
2964 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2965 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2966 }
2967
2968 return dest_addr;
2969 }
2970
2971 /* Expand a call EXP to the memcpy builtin.
2972 Return NULL_RTX if we failed, the caller should emit a normal call,
2973 otherwise try to get the result in TARGET, if convenient (and in
2974 mode MODE if that's convenient). */
2975
2976 static rtx
2977 expand_builtin_memcpy (tree exp, rtx target)
2978 {
2979 if (!validate_arglist (exp,
2980 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2981 return NULL_RTX;
2982 else
2983 {
2984 tree dest = CALL_EXPR_ARG (exp, 0);
2985 tree src = CALL_EXPR_ARG (exp, 1);
2986 tree len = CALL_EXPR_ARG (exp, 2);
2987 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2988 }
2989 }
2990
2991 /* Expand an instrumented call EXP to the memcpy builtin.
2992 Return NULL_RTX if we failed, the caller should emit a normal call,
2993 otherwise try to get the result in TARGET, if convenient (and in
2994 mode MODE if that's convenient). */
2995
2996 static rtx
2997 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
2998 {
2999 if (!validate_arglist (exp,
3000 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3001 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3002 INTEGER_TYPE, VOID_TYPE))
3003 return NULL_RTX;
3004 else
3005 {
3006 tree dest = CALL_EXPR_ARG (exp, 0);
3007 tree src = CALL_EXPR_ARG (exp, 2);
3008 tree len = CALL_EXPR_ARG (exp, 4);
3009 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3010
3011 /* Return src bounds with the result. */
3012 if (res)
3013 {
3014 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3015 expand_normal (CALL_EXPR_ARG (exp, 1)));
3016 res = chkp_join_splitted_slot (res, bnd);
3017 }
3018 return res;
3019 }
3020 }
3021
3022 /* Expand a call EXP to the mempcpy builtin.
3023 Return NULL_RTX if we failed; the caller should emit a normal call,
3024 otherwise try to get the result in TARGET, if convenient (and in
3025 mode MODE if that's convenient). If ENDP is 0 return the
3026 destination pointer, if ENDP is 1 return the end pointer ala
3027 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3028 stpcpy. */
3029
3030 static rtx
3031 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3032 {
3033 if (!validate_arglist (exp,
3034 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3035 return NULL_RTX;
3036 else
3037 {
3038 tree dest = CALL_EXPR_ARG (exp, 0);
3039 tree src = CALL_EXPR_ARG (exp, 1);
3040 tree len = CALL_EXPR_ARG (exp, 2);
3041 return expand_builtin_mempcpy_args (dest, src, len,
3042 target, mode, /*endp=*/ 1,
3043 exp);
3044 }
3045 }
3046
3047 /* Expand an instrumented call EXP to the mempcpy builtin.
3048 Return NULL_RTX if we failed, the caller should emit a normal call,
3049 otherwise try to get the result in TARGET, if convenient (and in
3050 mode MODE if that's convenient). */
3051
3052 static rtx
3053 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3054 {
3055 if (!validate_arglist (exp,
3056 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3057 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3058 INTEGER_TYPE, VOID_TYPE))
3059 return NULL_RTX;
3060 else
3061 {
3062 tree dest = CALL_EXPR_ARG (exp, 0);
3063 tree src = CALL_EXPR_ARG (exp, 2);
3064 tree len = CALL_EXPR_ARG (exp, 4);
3065 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3066 mode, 1, exp);
3067
3068 /* Return src bounds with the result. */
3069 if (res)
3070 {
3071 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3072 expand_normal (CALL_EXPR_ARG (exp, 1)));
3073 res = chkp_join_splitted_slot (res, bnd);
3074 }
3075 return res;
3076 }
3077 }
3078
3079 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3080 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3081 so that this can also be called without constructing an actual CALL_EXPR.
3082 The other arguments and return value are the same as for
3083 expand_builtin_mempcpy. */
3084
3085 static rtx
3086 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3087 rtx target, machine_mode mode, int endp,
3088 tree orig_exp)
3089 {
3090 tree fndecl = get_callee_fndecl (orig_exp);
3091
3092 /* If return value is ignored, transform mempcpy into memcpy. */
3093 if (target == const0_rtx
3094 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3095 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3096 {
3097 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3098 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3099 dest, src, len);
3100 return expand_expr (result, target, mode, EXPAND_NORMAL);
3101 }
3102 else if (target == const0_rtx
3103 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3104 {
3105 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3106 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3107 dest, src, len);
3108 return expand_expr (result, target, mode, EXPAND_NORMAL);
3109 }
3110 else
3111 {
3112 const char *src_str;
3113 unsigned int src_align = get_pointer_alignment (src);
3114 unsigned int dest_align = get_pointer_alignment (dest);
3115 rtx dest_mem, src_mem, len_rtx;
3116
3117 /* If either SRC or DEST is not a pointer type, don't do this
3118 operation in-line. */
3119 if (dest_align == 0 || src_align == 0)
3120 return NULL_RTX;
3121
3122 /* If LEN is not constant, call the normal function. */
3123 if (! tree_fits_uhwi_p (len))
3124 return NULL_RTX;
3125
3126 len_rtx = expand_normal (len);
3127 src_str = c_getstr (src);
3128
3129 /* If SRC is a string constant and block move would be done
3130 by pieces, we can avoid loading the string from memory
3131 and only stored the computed constants. */
3132 if (src_str
3133 && CONST_INT_P (len_rtx)
3134 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3135 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3136 CONST_CAST (char *, src_str),
3137 dest_align, false))
3138 {
3139 dest_mem = get_memory_rtx (dest, len);
3140 set_mem_align (dest_mem, dest_align);
3141 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3142 builtin_memcpy_read_str,
3143 CONST_CAST (char *, src_str),
3144 dest_align, false, endp);
3145 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3146 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3147 return dest_mem;
3148 }
3149
3150 if (CONST_INT_P (len_rtx)
3151 && can_move_by_pieces (INTVAL (len_rtx),
3152 MIN (dest_align, src_align)))
3153 {
3154 dest_mem = get_memory_rtx (dest, len);
3155 set_mem_align (dest_mem, dest_align);
3156 src_mem = get_memory_rtx (src, len);
3157 set_mem_align (src_mem, src_align);
3158 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3159 MIN (dest_align, src_align), endp);
3160 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3161 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3162 return dest_mem;
3163 }
3164
3165 return NULL_RTX;
3166 }
3167 }
3168
3169 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3170 we failed, the caller should emit a normal call, otherwise try to
3171 get the result in TARGET, if convenient. If ENDP is 0 return the
3172 destination pointer, if ENDP is 1 return the end pointer ala
3173 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3174 stpcpy. */
3175
3176 static rtx
3177 expand_movstr (tree dest, tree src, rtx target, int endp)
3178 {
3179 struct expand_operand ops[3];
3180 rtx dest_mem;
3181 rtx src_mem;
3182
3183 if (!targetm.have_movstr ())
3184 return NULL_RTX;
3185
3186 dest_mem = get_memory_rtx (dest, NULL);
3187 src_mem = get_memory_rtx (src, NULL);
3188 if (!endp)
3189 {
3190 target = force_reg (Pmode, XEXP (dest_mem, 0));
3191 dest_mem = replace_equiv_address (dest_mem, target);
3192 }
3193
3194 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3195 create_fixed_operand (&ops[1], dest_mem);
3196 create_fixed_operand (&ops[2], src_mem);
3197 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3198 return NULL_RTX;
3199
3200 if (endp && target != const0_rtx)
3201 {
3202 target = ops[0].value;
3203 /* movstr is supposed to set end to the address of the NUL
3204 terminator. If the caller requested a mempcpy-like return value,
3205 adjust it. */
3206 if (endp == 1)
3207 {
3208 rtx tem = plus_constant (GET_MODE (target),
3209 gen_lowpart (GET_MODE (target), target), 1);
3210 emit_move_insn (target, force_operand (tem, NULL_RTX));
3211 }
3212 }
3213 return target;
3214 }
3215
3216 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3217 NULL_RTX if we failed the caller should emit a normal call, otherwise
3218 try to get the result in TARGET, if convenient (and in mode MODE if that's
3219 convenient). */
3220
3221 static rtx
3222 expand_builtin_strcpy (tree exp, rtx target)
3223 {
3224 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3225 {
3226 tree dest = CALL_EXPR_ARG (exp, 0);
3227 tree src = CALL_EXPR_ARG (exp, 1);
3228 return expand_builtin_strcpy_args (dest, src, target);
3229 }
3230 return NULL_RTX;
3231 }
3232
3233 /* Helper function to do the actual work for expand_builtin_strcpy. The
3234 arguments to the builtin_strcpy call DEST and SRC are broken out
3235 so that this can also be called without constructing an actual CALL_EXPR.
3236 The other arguments and return value are the same as for
3237 expand_builtin_strcpy. */
3238
3239 static rtx
3240 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3241 {
3242 return expand_movstr (dest, src, target, /*endp=*/0);
3243 }
3244
3245 /* Expand a call EXP to the stpcpy builtin.
3246 Return NULL_RTX if we failed the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3249
3250 static rtx
3251 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3252 {
3253 tree dst, src;
3254 location_t loc = EXPR_LOCATION (exp);
3255
3256 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3257 return NULL_RTX;
3258
3259 dst = CALL_EXPR_ARG (exp, 0);
3260 src = CALL_EXPR_ARG (exp, 1);
3261
3262 /* If return value is ignored, transform stpcpy into strcpy. */
3263 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3264 {
3265 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3266 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3267 return expand_expr (result, target, mode, EXPAND_NORMAL);
3268 }
3269 else
3270 {
3271 tree len, lenp1;
3272 rtx ret;
3273
3274 /* Ensure we get an actual string whose length can be evaluated at
3275 compile-time, not an expression containing a string. This is
3276 because the latter will potentially produce pessimized code
3277 when used to produce the return value. */
3278 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3279 return expand_movstr (dst, src, target, /*endp=*/2);
3280
3281 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3282 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3283 target, mode, /*endp=*/2,
3284 exp);
3285
3286 if (ret)
3287 return ret;
3288
3289 if (TREE_CODE (len) == INTEGER_CST)
3290 {
3291 rtx len_rtx = expand_normal (len);
3292
3293 if (CONST_INT_P (len_rtx))
3294 {
3295 ret = expand_builtin_strcpy_args (dst, src, target);
3296
3297 if (ret)
3298 {
3299 if (! target)
3300 {
3301 if (mode != VOIDmode)
3302 target = gen_reg_rtx (mode);
3303 else
3304 target = gen_reg_rtx (GET_MODE (ret));
3305 }
3306 if (GET_MODE (target) != GET_MODE (ret))
3307 ret = gen_lowpart (GET_MODE (target), ret);
3308
3309 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3310 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3311 gcc_assert (ret);
3312
3313 return target;
3314 }
3315 }
3316 }
3317
3318 return expand_movstr (dst, src, target, /*endp=*/2);
3319 }
3320 }
3321
3322 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3323 bytes from constant string DATA + OFFSET and return it as target
3324 constant. */
3325
3326 rtx
3327 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3328 machine_mode mode)
3329 {
3330 const char *str = (const char *) data;
3331
3332 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3333 return const0_rtx;
3334
3335 return c_readstr (str + offset, mode);
3336 }
3337
3338 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3339 NULL_RTX if we failed the caller should emit a normal call. */
3340
3341 static rtx
3342 expand_builtin_strncpy (tree exp, rtx target)
3343 {
3344 location_t loc = EXPR_LOCATION (exp);
3345
3346 if (validate_arglist (exp,
3347 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3348 {
3349 tree dest = CALL_EXPR_ARG (exp, 0);
3350 tree src = CALL_EXPR_ARG (exp, 1);
3351 tree len = CALL_EXPR_ARG (exp, 2);
3352 tree slen = c_strlen (src, 1);
3353
3354 /* We must be passed a constant len and src parameter. */
3355 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3356 return NULL_RTX;
3357
3358 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3359
3360 /* We're required to pad with trailing zeros if the requested
3361 len is greater than strlen(s2)+1. In that case try to
3362 use store_by_pieces, if it fails, punt. */
3363 if (tree_int_cst_lt (slen, len))
3364 {
3365 unsigned int dest_align = get_pointer_alignment (dest);
3366 const char *p = c_getstr (src);
3367 rtx dest_mem;
3368
3369 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3370 || !can_store_by_pieces (tree_to_uhwi (len),
3371 builtin_strncpy_read_str,
3372 CONST_CAST (char *, p),
3373 dest_align, false))
3374 return NULL_RTX;
3375
3376 dest_mem = get_memory_rtx (dest, len);
3377 store_by_pieces (dest_mem, tree_to_uhwi (len),
3378 builtin_strncpy_read_str,
3379 CONST_CAST (char *, p), dest_align, false, 0);
3380 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3381 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3382 return dest_mem;
3383 }
3384 }
3385 return NULL_RTX;
3386 }
3387
3388 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3389 bytes from constant string DATA + OFFSET and return it as target
3390 constant. */
3391
3392 rtx
3393 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3394 machine_mode mode)
3395 {
3396 const char *c = (const char *) data;
3397 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3398
3399 memset (p, *c, GET_MODE_SIZE (mode));
3400
3401 return c_readstr (p, mode);
3402 }
3403
3404 /* Callback routine for store_by_pieces. Return the RTL of a register
3405 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3406 char value given in the RTL register data. For example, if mode is
3407 4 bytes wide, return the RTL for 0x01010101*data. */
3408
3409 static rtx
3410 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3411 machine_mode mode)
3412 {
3413 rtx target, coeff;
3414 size_t size;
3415 char *p;
3416
3417 size = GET_MODE_SIZE (mode);
3418 if (size == 1)
3419 return (rtx) data;
3420
3421 p = XALLOCAVEC (char, size);
3422 memset (p, 1, size);
3423 coeff = c_readstr (p, mode);
3424
3425 target = convert_to_mode (mode, (rtx) data, 1);
3426 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3427 return force_reg (mode, target);
3428 }
3429
3430 /* Expand expression EXP, which is a call to the memset builtin. Return
3431 NULL_RTX if we failed the caller should emit a normal call, otherwise
3432 try to get the result in TARGET, if convenient (and in mode MODE if that's
3433 convenient). */
3434
3435 static rtx
3436 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3437 {
3438 if (!validate_arglist (exp,
3439 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3440 return NULL_RTX;
3441 else
3442 {
3443 tree dest = CALL_EXPR_ARG (exp, 0);
3444 tree val = CALL_EXPR_ARG (exp, 1);
3445 tree len = CALL_EXPR_ARG (exp, 2);
3446 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3447 }
3448 }
3449
3450 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3451 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3452 try to get the result in TARGET, if convenient (and in mode MODE if that's
3453 convenient). */
3454
3455 static rtx
3456 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3457 {
3458 if (!validate_arglist (exp,
3459 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3460 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3461 return NULL_RTX;
3462 else
3463 {
3464 tree dest = CALL_EXPR_ARG (exp, 0);
3465 tree val = CALL_EXPR_ARG (exp, 2);
3466 tree len = CALL_EXPR_ARG (exp, 3);
3467 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3468
3469 /* Return src bounds with the result. */
3470 if (res)
3471 {
3472 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3473 expand_normal (CALL_EXPR_ARG (exp, 1)));
3474 res = chkp_join_splitted_slot (res, bnd);
3475 }
3476 return res;
3477 }
3478 }
3479
3480 /* Helper function to do the actual work for expand_builtin_memset. The
3481 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3482 so that this can also be called without constructing an actual CALL_EXPR.
3483 The other arguments and return value are the same as for
3484 expand_builtin_memset. */
3485
3486 static rtx
3487 expand_builtin_memset_args (tree dest, tree val, tree len,
3488 rtx target, machine_mode mode, tree orig_exp)
3489 {
3490 tree fndecl, fn;
3491 enum built_in_function fcode;
3492 machine_mode val_mode;
3493 char c;
3494 unsigned int dest_align;
3495 rtx dest_mem, dest_addr, len_rtx;
3496 HOST_WIDE_INT expected_size = -1;
3497 unsigned int expected_align = 0;
3498 unsigned HOST_WIDE_INT min_size;
3499 unsigned HOST_WIDE_INT max_size;
3500 unsigned HOST_WIDE_INT probable_max_size;
3501
3502 dest_align = get_pointer_alignment (dest);
3503
3504 /* If DEST is not a pointer type, don't do this operation in-line. */
3505 if (dest_align == 0)
3506 return NULL_RTX;
3507
3508 if (currently_expanding_gimple_stmt)
3509 stringop_block_profile (currently_expanding_gimple_stmt,
3510 &expected_align, &expected_size);
3511
3512 if (expected_align < dest_align)
3513 expected_align = dest_align;
3514
3515 /* If the LEN parameter is zero, return DEST. */
3516 if (integer_zerop (len))
3517 {
3518 /* Evaluate and ignore VAL in case it has side-effects. */
3519 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3520 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3521 }
3522
3523 /* Stabilize the arguments in case we fail. */
3524 dest = builtin_save_expr (dest);
3525 val = builtin_save_expr (val);
3526 len = builtin_save_expr (len);
3527
3528 len_rtx = expand_normal (len);
3529 determine_block_size (len, len_rtx, &min_size, &max_size,
3530 &probable_max_size);
3531 dest_mem = get_memory_rtx (dest, len);
3532 val_mode = TYPE_MODE (unsigned_char_type_node);
3533
3534 if (TREE_CODE (val) != INTEGER_CST)
3535 {
3536 rtx val_rtx;
3537
3538 val_rtx = expand_normal (val);
3539 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3540
3541 /* Assume that we can memset by pieces if we can store
3542 * the coefficients by pieces (in the required modes).
3543 * We can't pass builtin_memset_gen_str as that emits RTL. */
3544 c = 1;
3545 if (tree_fits_uhwi_p (len)
3546 && can_store_by_pieces (tree_to_uhwi (len),
3547 builtin_memset_read_str, &c, dest_align,
3548 true))
3549 {
3550 val_rtx = force_reg (val_mode, val_rtx);
3551 store_by_pieces (dest_mem, tree_to_uhwi (len),
3552 builtin_memset_gen_str, val_rtx, dest_align,
3553 true, 0);
3554 }
3555 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3556 dest_align, expected_align,
3557 expected_size, min_size, max_size,
3558 probable_max_size))
3559 goto do_libcall;
3560
3561 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3562 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3563 return dest_mem;
3564 }
3565
3566 if (target_char_cast (val, &c))
3567 goto do_libcall;
3568
3569 if (c)
3570 {
3571 if (tree_fits_uhwi_p (len)
3572 && can_store_by_pieces (tree_to_uhwi (len),
3573 builtin_memset_read_str, &c, dest_align,
3574 true))
3575 store_by_pieces (dest_mem, tree_to_uhwi (len),
3576 builtin_memset_read_str, &c, dest_align, true, 0);
3577 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3578 gen_int_mode (c, val_mode),
3579 dest_align, expected_align,
3580 expected_size, min_size, max_size,
3581 probable_max_size))
3582 goto do_libcall;
3583
3584 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3585 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3586 return dest_mem;
3587 }
3588
3589 set_mem_align (dest_mem, dest_align);
3590 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3591 CALL_EXPR_TAILCALL (orig_exp)
3592 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3593 expected_align, expected_size,
3594 min_size, max_size,
3595 probable_max_size);
3596
3597 if (dest_addr == 0)
3598 {
3599 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3600 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3601 }
3602
3603 return dest_addr;
3604
3605 do_libcall:
3606 fndecl = get_callee_fndecl (orig_exp);
3607 fcode = DECL_FUNCTION_CODE (fndecl);
3608 if (fcode == BUILT_IN_MEMSET
3609 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3610 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3611 dest, val, len);
3612 else if (fcode == BUILT_IN_BZERO)
3613 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3614 dest, len);
3615 else
3616 gcc_unreachable ();
3617 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3618 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3619 return expand_call (fn, target, target == const0_rtx);
3620 }
3621
3622 /* Expand expression EXP, which is a call to the bzero builtin. Return
3623 NULL_RTX if we failed the caller should emit a normal call. */
3624
3625 static rtx
3626 expand_builtin_bzero (tree exp)
3627 {
3628 tree dest, size;
3629 location_t loc = EXPR_LOCATION (exp);
3630
3631 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3632 return NULL_RTX;
3633
3634 dest = CALL_EXPR_ARG (exp, 0);
3635 size = CALL_EXPR_ARG (exp, 1);
3636
3637 /* New argument list transforming bzero(ptr x, int y) to
3638 memset(ptr x, int 0, size_t y). This is done this way
3639 so that if it isn't expanded inline, we fallback to
3640 calling bzero instead of memset. */
3641
3642 return expand_builtin_memset_args (dest, integer_zero_node,
3643 fold_convert_loc (loc,
3644 size_type_node, size),
3645 const0_rtx, VOIDmode, exp);
3646 }
3647
3648 /* Try to expand cmpstr operation ICODE with the given operands.
3649 Return the result rtx on success, otherwise return null. */
3650
3651 static rtx
3652 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3653 HOST_WIDE_INT align)
3654 {
3655 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3656
3657 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3658 target = NULL_RTX;
3659
3660 struct expand_operand ops[4];
3661 create_output_operand (&ops[0], target, insn_mode);
3662 create_fixed_operand (&ops[1], arg1_rtx);
3663 create_fixed_operand (&ops[2], arg2_rtx);
3664 create_integer_operand (&ops[3], align);
3665 if (maybe_expand_insn (icode, 4, ops))
3666 return ops[0].value;
3667 return NULL_RTX;
3668 }
3669
3670 /* Expand expression EXP, which is a call to the memcmp built-in function.
3671 Return NULL_RTX if we failed and the caller should emit a normal call,
3672 otherwise try to get the result in TARGET, if convenient.
3673 RESULT_EQ is true if we can relax the returned value to be either zero
3674 or nonzero, without caring about the sign. */
3675
3676 static rtx
3677 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3678 {
3679 if (!validate_arglist (exp,
3680 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3681 return NULL_RTX;
3682
3683 tree arg1 = CALL_EXPR_ARG (exp, 0);
3684 tree arg2 = CALL_EXPR_ARG (exp, 1);
3685 tree len = CALL_EXPR_ARG (exp, 2);
3686 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3687 location_t loc = EXPR_LOCATION (exp);
3688
3689 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3690 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3691
3692 /* If we don't have POINTER_TYPE, call the function. */
3693 if (arg1_align == 0 || arg2_align == 0)
3694 return NULL_RTX;
3695
3696 rtx arg1_rtx = get_memory_rtx (arg1, len);
3697 rtx arg2_rtx = get_memory_rtx (arg2, len);
3698 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3699
3700 /* Set MEM_SIZE as appropriate. */
3701 if (CONST_INT_P (len_rtx))
3702 {
3703 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3704 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3705 }
3706
3707 by_pieces_constfn constfn = NULL;
3708
3709 const char *src_str = c_getstr (arg1);
3710 if (src_str == NULL)
3711 src_str = c_getstr (arg2);
3712 else
3713 std::swap (arg1_rtx, arg2_rtx);
3714
3715 /* If SRC is a string constant and block move would be done
3716 by pieces, we can avoid loading the string from memory
3717 and only stored the computed constants. */
3718 if (src_str
3719 && CONST_INT_P (len_rtx)
3720 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3721 constfn = builtin_memcpy_read_str;
3722
3723 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3724 TREE_TYPE (len), target,
3725 result_eq, constfn,
3726 CONST_CAST (char *, src_str));
3727
3728 if (result)
3729 {
3730 /* Return the value in the proper mode for this function. */
3731 if (GET_MODE (result) == mode)
3732 return result;
3733
3734 if (target != 0)
3735 {
3736 convert_move (target, result, 0);
3737 return target;
3738 }
3739
3740 return convert_to_mode (mode, result, 0);
3741 }
3742
3743 return NULL_RTX;
3744 }
3745
3746 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3747 if we failed the caller should emit a normal call, otherwise try to get
3748 the result in TARGET, if convenient. */
3749
3750 static rtx
3751 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3752 {
3753 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3754 return NULL_RTX;
3755
3756 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3757 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3758 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3759 {
3760 rtx arg1_rtx, arg2_rtx;
3761 tree fndecl, fn;
3762 tree arg1 = CALL_EXPR_ARG (exp, 0);
3763 tree arg2 = CALL_EXPR_ARG (exp, 1);
3764 rtx result = NULL_RTX;
3765
3766 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3767 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3768
3769 /* If we don't have POINTER_TYPE, call the function. */
3770 if (arg1_align == 0 || arg2_align == 0)
3771 return NULL_RTX;
3772
3773 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3774 arg1 = builtin_save_expr (arg1);
3775 arg2 = builtin_save_expr (arg2);
3776
3777 arg1_rtx = get_memory_rtx (arg1, NULL);
3778 arg2_rtx = get_memory_rtx (arg2, NULL);
3779
3780 /* Try to call cmpstrsi. */
3781 if (cmpstr_icode != CODE_FOR_nothing)
3782 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3783 MIN (arg1_align, arg2_align));
3784
3785 /* Try to determine at least one length and call cmpstrnsi. */
3786 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3787 {
3788 tree len;
3789 rtx arg3_rtx;
3790
3791 tree len1 = c_strlen (arg1, 1);
3792 tree len2 = c_strlen (arg2, 1);
3793
3794 if (len1)
3795 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3796 if (len2)
3797 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3798
3799 /* If we don't have a constant length for the first, use the length
3800 of the second, if we know it. We don't require a constant for
3801 this case; some cost analysis could be done if both are available
3802 but neither is constant. For now, assume they're equally cheap,
3803 unless one has side effects. If both strings have constant lengths,
3804 use the smaller. */
3805
3806 if (!len1)
3807 len = len2;
3808 else if (!len2)
3809 len = len1;
3810 else if (TREE_SIDE_EFFECTS (len1))
3811 len = len2;
3812 else if (TREE_SIDE_EFFECTS (len2))
3813 len = len1;
3814 else if (TREE_CODE (len1) != INTEGER_CST)
3815 len = len2;
3816 else if (TREE_CODE (len2) != INTEGER_CST)
3817 len = len1;
3818 else if (tree_int_cst_lt (len1, len2))
3819 len = len1;
3820 else
3821 len = len2;
3822
3823 /* If both arguments have side effects, we cannot optimize. */
3824 if (len && !TREE_SIDE_EFFECTS (len))
3825 {
3826 arg3_rtx = expand_normal (len);
3827 result = expand_cmpstrn_or_cmpmem
3828 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3829 arg3_rtx, MIN (arg1_align, arg2_align));
3830 }
3831 }
3832
3833 if (result)
3834 {
3835 /* Return the value in the proper mode for this function. */
3836 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3837 if (GET_MODE (result) == mode)
3838 return result;
3839 if (target == 0)
3840 return convert_to_mode (mode, result, 0);
3841 convert_move (target, result, 0);
3842 return target;
3843 }
3844
3845 /* Expand the library call ourselves using a stabilized argument
3846 list to avoid re-evaluating the function's arguments twice. */
3847 fndecl = get_callee_fndecl (exp);
3848 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3849 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3850 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3851 return expand_call (fn, target, target == const0_rtx);
3852 }
3853 return NULL_RTX;
3854 }
3855
3856 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3857 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3858 the result in TARGET, if convenient. */
3859
3860 static rtx
3861 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3862 ATTRIBUTE_UNUSED machine_mode mode)
3863 {
3864 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3865
3866 if (!validate_arglist (exp,
3867 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3868 return NULL_RTX;
3869
3870 /* If c_strlen can determine an expression for one of the string
3871 lengths, and it doesn't have side effects, then emit cmpstrnsi
3872 using length MIN(strlen(string)+1, arg3). */
3873 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3874 if (cmpstrn_icode != CODE_FOR_nothing)
3875 {
3876 tree len, len1, len2;
3877 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3878 rtx result;
3879 tree fndecl, fn;
3880 tree arg1 = CALL_EXPR_ARG (exp, 0);
3881 tree arg2 = CALL_EXPR_ARG (exp, 1);
3882 tree arg3 = CALL_EXPR_ARG (exp, 2);
3883
3884 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3885 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3886
3887 len1 = c_strlen (arg1, 1);
3888 len2 = c_strlen (arg2, 1);
3889
3890 if (len1)
3891 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3892 if (len2)
3893 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3894
3895 /* If we don't have a constant length for the first, use the length
3896 of the second, if we know it. We don't require a constant for
3897 this case; some cost analysis could be done if both are available
3898 but neither is constant. For now, assume they're equally cheap,
3899 unless one has side effects. If both strings have constant lengths,
3900 use the smaller. */
3901
3902 if (!len1)
3903 len = len2;
3904 else if (!len2)
3905 len = len1;
3906 else if (TREE_SIDE_EFFECTS (len1))
3907 len = len2;
3908 else if (TREE_SIDE_EFFECTS (len2))
3909 len = len1;
3910 else if (TREE_CODE (len1) != INTEGER_CST)
3911 len = len2;
3912 else if (TREE_CODE (len2) != INTEGER_CST)
3913 len = len1;
3914 else if (tree_int_cst_lt (len1, len2))
3915 len = len1;
3916 else
3917 len = len2;
3918
3919 /* If both arguments have side effects, we cannot optimize. */
3920 if (!len || TREE_SIDE_EFFECTS (len))
3921 return NULL_RTX;
3922
3923 /* The actual new length parameter is MIN(len,arg3). */
3924 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3925 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3926
3927 /* If we don't have POINTER_TYPE, call the function. */
3928 if (arg1_align == 0 || arg2_align == 0)
3929 return NULL_RTX;
3930
3931 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3932 arg1 = builtin_save_expr (arg1);
3933 arg2 = builtin_save_expr (arg2);
3934 len = builtin_save_expr (len);
3935
3936 arg1_rtx = get_memory_rtx (arg1, len);
3937 arg2_rtx = get_memory_rtx (arg2, len);
3938 arg3_rtx = expand_normal (len);
3939 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3940 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3941 MIN (arg1_align, arg2_align));
3942 if (result)
3943 {
3944 /* Return the value in the proper mode for this function. */
3945 mode = TYPE_MODE (TREE_TYPE (exp));
3946 if (GET_MODE (result) == mode)
3947 return result;
3948 if (target == 0)
3949 return convert_to_mode (mode, result, 0);
3950 convert_move (target, result, 0);
3951 return target;
3952 }
3953
3954 /* Expand the library call ourselves using a stabilized argument
3955 list to avoid re-evaluating the function's arguments twice. */
3956 fndecl = get_callee_fndecl (exp);
3957 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3958 arg1, arg2, len);
3959 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3960 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3961 return expand_call (fn, target, target == const0_rtx);
3962 }
3963 return NULL_RTX;
3964 }
3965
3966 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3967 if that's convenient. */
3968
3969 rtx
3970 expand_builtin_saveregs (void)
3971 {
3972 rtx val;
3973 rtx_insn *seq;
3974
3975 /* Don't do __builtin_saveregs more than once in a function.
3976 Save the result of the first call and reuse it. */
3977 if (saveregs_value != 0)
3978 return saveregs_value;
3979
3980 /* When this function is called, it means that registers must be
3981 saved on entry to this function. So we migrate the call to the
3982 first insn of this function. */
3983
3984 start_sequence ();
3985
3986 /* Do whatever the machine needs done in this case. */
3987 val = targetm.calls.expand_builtin_saveregs ();
3988
3989 seq = get_insns ();
3990 end_sequence ();
3991
3992 saveregs_value = val;
3993
3994 /* Put the insns after the NOTE that starts the function. If this
3995 is inside a start_sequence, make the outer-level insn chain current, so
3996 the code is placed at the start of the function. */
3997 push_topmost_sequence ();
3998 emit_insn_after (seq, entry_of_function ());
3999 pop_topmost_sequence ();
4000
4001 return val;
4002 }
4003
4004 /* Expand a call to __builtin_next_arg. */
4005
4006 static rtx
4007 expand_builtin_next_arg (void)
4008 {
4009 /* Checking arguments is already done in fold_builtin_next_arg
4010 that must be called before this function. */
4011 return expand_binop (ptr_mode, add_optab,
4012 crtl->args.internal_arg_pointer,
4013 crtl->args.arg_offset_rtx,
4014 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4015 }
4016
4017 /* Make it easier for the backends by protecting the valist argument
4018 from multiple evaluations. */
4019
4020 static tree
4021 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4022 {
4023 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4024
4025 /* The current way of determining the type of valist is completely
4026 bogus. We should have the information on the va builtin instead. */
4027 if (!vatype)
4028 vatype = targetm.fn_abi_va_list (cfun->decl);
4029
4030 if (TREE_CODE (vatype) == ARRAY_TYPE)
4031 {
4032 if (TREE_SIDE_EFFECTS (valist))
4033 valist = save_expr (valist);
4034
4035 /* For this case, the backends will be expecting a pointer to
4036 vatype, but it's possible we've actually been given an array
4037 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4038 So fix it. */
4039 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4040 {
4041 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4042 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4043 }
4044 }
4045 else
4046 {
4047 tree pt = build_pointer_type (vatype);
4048
4049 if (! needs_lvalue)
4050 {
4051 if (! TREE_SIDE_EFFECTS (valist))
4052 return valist;
4053
4054 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4055 TREE_SIDE_EFFECTS (valist) = 1;
4056 }
4057
4058 if (TREE_SIDE_EFFECTS (valist))
4059 valist = save_expr (valist);
4060 valist = fold_build2_loc (loc, MEM_REF,
4061 vatype, valist, build_int_cst (pt, 0));
4062 }
4063
4064 return valist;
4065 }
4066
4067 /* The "standard" definition of va_list is void*. */
4068
4069 tree
4070 std_build_builtin_va_list (void)
4071 {
4072 return ptr_type_node;
4073 }
4074
4075 /* The "standard" abi va_list is va_list_type_node. */
4076
4077 tree
4078 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4079 {
4080 return va_list_type_node;
4081 }
4082
4083 /* The "standard" type of va_list is va_list_type_node. */
4084
4085 tree
4086 std_canonical_va_list_type (tree type)
4087 {
4088 tree wtype, htype;
4089
4090 wtype = va_list_type_node;
4091 htype = type;
4092 /* Treat structure va_list types. */
4093 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4094 htype = TREE_TYPE (htype);
4095 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4096 {
4097 /* If va_list is an array type, the argument may have decayed
4098 to a pointer type, e.g. by being passed to another function.
4099 In that case, unwrap both types so that we can compare the
4100 underlying records. */
4101 if (TREE_CODE (htype) == ARRAY_TYPE
4102 || POINTER_TYPE_P (htype))
4103 {
4104 wtype = TREE_TYPE (wtype);
4105 htype = TREE_TYPE (htype);
4106 }
4107 }
4108 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4109 return va_list_type_node;
4110
4111 return NULL_TREE;
4112 }
4113
4114 /* The "standard" implementation of va_start: just assign `nextarg' to
4115 the variable. */
4116
4117 void
4118 std_expand_builtin_va_start (tree valist, rtx nextarg)
4119 {
4120 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4121 convert_move (va_r, nextarg, 0);
4122
4123 /* We do not have any valid bounds for the pointer, so
4124 just store zero bounds for it. */
4125 if (chkp_function_instrumented_p (current_function_decl))
4126 chkp_expand_bounds_reset_for_mem (valist,
4127 make_tree (TREE_TYPE (valist),
4128 nextarg));
4129 }
4130
4131 /* Expand EXP, a call to __builtin_va_start. */
4132
4133 static rtx
4134 expand_builtin_va_start (tree exp)
4135 {
4136 rtx nextarg;
4137 tree valist;
4138 location_t loc = EXPR_LOCATION (exp);
4139
4140 if (call_expr_nargs (exp) < 2)
4141 {
4142 error_at (loc, "too few arguments to function %<va_start%>");
4143 return const0_rtx;
4144 }
4145
4146 if (fold_builtin_next_arg (exp, true))
4147 return const0_rtx;
4148
4149 nextarg = expand_builtin_next_arg ();
4150 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4151
4152 if (targetm.expand_builtin_va_start)
4153 targetm.expand_builtin_va_start (valist, nextarg);
4154 else
4155 std_expand_builtin_va_start (valist, nextarg);
4156
4157 return const0_rtx;
4158 }
4159
4160 /* Expand EXP, a call to __builtin_va_end. */
4161
4162 static rtx
4163 expand_builtin_va_end (tree exp)
4164 {
4165 tree valist = CALL_EXPR_ARG (exp, 0);
4166
4167 /* Evaluate for side effects, if needed. I hate macros that don't
4168 do that. */
4169 if (TREE_SIDE_EFFECTS (valist))
4170 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4171
4172 return const0_rtx;
4173 }
4174
4175 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4176 builtin rather than just as an assignment in stdarg.h because of the
4177 nastiness of array-type va_list types. */
4178
4179 static rtx
4180 expand_builtin_va_copy (tree exp)
4181 {
4182 tree dst, src, t;
4183 location_t loc = EXPR_LOCATION (exp);
4184
4185 dst = CALL_EXPR_ARG (exp, 0);
4186 src = CALL_EXPR_ARG (exp, 1);
4187
4188 dst = stabilize_va_list_loc (loc, dst, 1);
4189 src = stabilize_va_list_loc (loc, src, 0);
4190
4191 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4192
4193 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4194 {
4195 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4196 TREE_SIDE_EFFECTS (t) = 1;
4197 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4198 }
4199 else
4200 {
4201 rtx dstb, srcb, size;
4202
4203 /* Evaluate to pointers. */
4204 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4205 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4206 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4207 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4208
4209 dstb = convert_memory_address (Pmode, dstb);
4210 srcb = convert_memory_address (Pmode, srcb);
4211
4212 /* "Dereference" to BLKmode memories. */
4213 dstb = gen_rtx_MEM (BLKmode, dstb);
4214 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4215 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4216 srcb = gen_rtx_MEM (BLKmode, srcb);
4217 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4218 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4219
4220 /* Copy. */
4221 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4222 }
4223
4224 return const0_rtx;
4225 }
4226
4227 /* Expand a call to one of the builtin functions __builtin_frame_address or
4228 __builtin_return_address. */
4229
4230 static rtx
4231 expand_builtin_frame_address (tree fndecl, tree exp)
4232 {
4233 /* The argument must be a nonnegative integer constant.
4234 It counts the number of frames to scan up the stack.
4235 The value is either the frame pointer value or the return
4236 address saved in that frame. */
4237 if (call_expr_nargs (exp) == 0)
4238 /* Warning about missing arg was already issued. */
4239 return const0_rtx;
4240 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4241 {
4242 error ("invalid argument to %qD", fndecl);
4243 return const0_rtx;
4244 }
4245 else
4246 {
4247 /* Number of frames to scan up the stack. */
4248 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4249
4250 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4251
4252 /* Some ports cannot access arbitrary stack frames. */
4253 if (tem == NULL)
4254 {
4255 warning (0, "unsupported argument to %qD", fndecl);
4256 return const0_rtx;
4257 }
4258
4259 if (count)
4260 {
4261 /* Warn since no effort is made to ensure that any frame
4262 beyond the current one exists or can be safely reached. */
4263 warning (OPT_Wframe_address, "calling %qD with "
4264 "a nonzero argument is unsafe", fndecl);
4265 }
4266
4267 /* For __builtin_frame_address, return what we've got. */
4268 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4269 return tem;
4270
4271 if (!REG_P (tem)
4272 && ! CONSTANT_P (tem))
4273 tem = copy_addr_to_reg (tem);
4274 return tem;
4275 }
4276 }
4277
4278 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4279 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4280 is the same as for allocate_dynamic_stack_space. */
4281
4282 static rtx
4283 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4284 {
4285 rtx op0;
4286 rtx result;
4287 bool valid_arglist;
4288 unsigned int align;
4289 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4290 == BUILT_IN_ALLOCA_WITH_ALIGN);
4291
4292 valid_arglist
4293 = (alloca_with_align
4294 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4295 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4296
4297 if (!valid_arglist)
4298 return NULL_RTX;
4299
4300 /* Compute the argument. */
4301 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4302
4303 /* Compute the alignment. */
4304 align = (alloca_with_align
4305 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4306 : BIGGEST_ALIGNMENT);
4307
4308 /* Allocate the desired space. */
4309 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4310 result = convert_memory_address (ptr_mode, result);
4311
4312 return result;
4313 }
4314
4315 /* Expand a call to bswap builtin in EXP.
4316 Return NULL_RTX if a normal call should be emitted rather than expanding the
4317 function in-line. If convenient, the result should be placed in TARGET.
4318 SUBTARGET may be used as the target for computing one of EXP's operands. */
4319
4320 static rtx
4321 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4322 rtx subtarget)
4323 {
4324 tree arg;
4325 rtx op0;
4326
4327 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4328 return NULL_RTX;
4329
4330 arg = CALL_EXPR_ARG (exp, 0);
4331 op0 = expand_expr (arg,
4332 subtarget && GET_MODE (subtarget) == target_mode
4333 ? subtarget : NULL_RTX,
4334 target_mode, EXPAND_NORMAL);
4335 if (GET_MODE (op0) != target_mode)
4336 op0 = convert_to_mode (target_mode, op0, 1);
4337
4338 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4339
4340 gcc_assert (target);
4341
4342 return convert_to_mode (target_mode, target, 1);
4343 }
4344
4345 /* Expand a call to a unary builtin in EXP.
4346 Return NULL_RTX if a normal call should be emitted rather than expanding the
4347 function in-line. If convenient, the result should be placed in TARGET.
4348 SUBTARGET may be used as the target for computing one of EXP's operands. */
4349
4350 static rtx
4351 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4352 rtx subtarget, optab op_optab)
4353 {
4354 rtx op0;
4355
4356 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4357 return NULL_RTX;
4358
4359 /* Compute the argument. */
4360 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4361 (subtarget
4362 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4363 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4364 VOIDmode, EXPAND_NORMAL);
4365 /* Compute op, into TARGET if possible.
4366 Set TARGET to wherever the result comes back. */
4367 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4368 op_optab, op0, target, op_optab != clrsb_optab);
4369 gcc_assert (target);
4370
4371 return convert_to_mode (target_mode, target, 0);
4372 }
4373
4374 /* Expand a call to __builtin_expect. We just return our argument
4375 as the builtin_expect semantic should've been already executed by
4376 tree branch prediction pass. */
4377
4378 static rtx
4379 expand_builtin_expect (tree exp, rtx target)
4380 {
4381 tree arg;
4382
4383 if (call_expr_nargs (exp) < 2)
4384 return const0_rtx;
4385 arg = CALL_EXPR_ARG (exp, 0);
4386
4387 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4388 /* When guessing was done, the hints should be already stripped away. */
4389 gcc_assert (!flag_guess_branch_prob
4390 || optimize == 0 || seen_error ());
4391 return target;
4392 }
4393
4394 /* Expand a call to __builtin_assume_aligned. We just return our first
4395 argument as the builtin_assume_aligned semantic should've been already
4396 executed by CCP. */
4397
4398 static rtx
4399 expand_builtin_assume_aligned (tree exp, rtx target)
4400 {
4401 if (call_expr_nargs (exp) < 2)
4402 return const0_rtx;
4403 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4404 EXPAND_NORMAL);
4405 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4406 && (call_expr_nargs (exp) < 3
4407 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4408 return target;
4409 }
4410
4411 void
4412 expand_builtin_trap (void)
4413 {
4414 if (targetm.have_trap ())
4415 {
4416 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4417 /* For trap insns when not accumulating outgoing args force
4418 REG_ARGS_SIZE note to prevent crossjumping of calls with
4419 different args sizes. */
4420 if (!ACCUMULATE_OUTGOING_ARGS)
4421 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4422 }
4423 else
4424 {
4425 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4426 tree call_expr = build_call_expr (fn, 0);
4427 expand_call (call_expr, NULL_RTX, false);
4428 }
4429
4430 emit_barrier ();
4431 }
4432
4433 /* Expand a call to __builtin_unreachable. We do nothing except emit
4434 a barrier saying that control flow will not pass here.
4435
4436 It is the responsibility of the program being compiled to ensure
4437 that control flow does never reach __builtin_unreachable. */
4438 static void
4439 expand_builtin_unreachable (void)
4440 {
4441 emit_barrier ();
4442 }
4443
4444 /* Expand EXP, a call to fabs, fabsf or fabsl.
4445 Return NULL_RTX if a normal call should be emitted rather than expanding
4446 the function inline. If convenient, the result should be placed
4447 in TARGET. SUBTARGET may be used as the target for computing
4448 the operand. */
4449
4450 static rtx
4451 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4452 {
4453 machine_mode mode;
4454 tree arg;
4455 rtx op0;
4456
4457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4458 return NULL_RTX;
4459
4460 arg = CALL_EXPR_ARG (exp, 0);
4461 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4462 mode = TYPE_MODE (TREE_TYPE (arg));
4463 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4464 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4465 }
4466
4467 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4468 Return NULL is a normal call should be emitted rather than expanding the
4469 function inline. If convenient, the result should be placed in TARGET.
4470 SUBTARGET may be used as the target for computing the operand. */
4471
4472 static rtx
4473 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4474 {
4475 rtx op0, op1;
4476 tree arg;
4477
4478 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4479 return NULL_RTX;
4480
4481 arg = CALL_EXPR_ARG (exp, 0);
4482 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4483
4484 arg = CALL_EXPR_ARG (exp, 1);
4485 op1 = expand_normal (arg);
4486
4487 return expand_copysign (op0, op1, target);
4488 }
4489
4490 /* Expand a call to __builtin___clear_cache. */
4491
4492 static rtx
4493 expand_builtin___clear_cache (tree exp)
4494 {
4495 if (!targetm.code_for_clear_cache)
4496 {
4497 #ifdef CLEAR_INSN_CACHE
4498 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4499 does something. Just do the default expansion to a call to
4500 __clear_cache(). */
4501 return NULL_RTX;
4502 #else
4503 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4504 does nothing. There is no need to call it. Do nothing. */
4505 return const0_rtx;
4506 #endif /* CLEAR_INSN_CACHE */
4507 }
4508
4509 /* We have a "clear_cache" insn, and it will handle everything. */
4510 tree begin, end;
4511 rtx begin_rtx, end_rtx;
4512
4513 /* We must not expand to a library call. If we did, any
4514 fallback library function in libgcc that might contain a call to
4515 __builtin___clear_cache() would recurse infinitely. */
4516 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4517 {
4518 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4519 return const0_rtx;
4520 }
4521
4522 if (targetm.have_clear_cache ())
4523 {
4524 struct expand_operand ops[2];
4525
4526 begin = CALL_EXPR_ARG (exp, 0);
4527 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4528
4529 end = CALL_EXPR_ARG (exp, 1);
4530 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4531
4532 create_address_operand (&ops[0], begin_rtx);
4533 create_address_operand (&ops[1], end_rtx);
4534 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4535 return const0_rtx;
4536 }
4537 return const0_rtx;
4538 }
4539
4540 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4541
4542 static rtx
4543 round_trampoline_addr (rtx tramp)
4544 {
4545 rtx temp, addend, mask;
4546
4547 /* If we don't need too much alignment, we'll have been guaranteed
4548 proper alignment by get_trampoline_type. */
4549 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4550 return tramp;
4551
4552 /* Round address up to desired boundary. */
4553 temp = gen_reg_rtx (Pmode);
4554 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4555 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4556
4557 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4558 temp, 0, OPTAB_LIB_WIDEN);
4559 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4560 temp, 0, OPTAB_LIB_WIDEN);
4561
4562 return tramp;
4563 }
4564
4565 static rtx
4566 expand_builtin_init_trampoline (tree exp, bool onstack)
4567 {
4568 tree t_tramp, t_func, t_chain;
4569 rtx m_tramp, r_tramp, r_chain, tmp;
4570
4571 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4572 POINTER_TYPE, VOID_TYPE))
4573 return NULL_RTX;
4574
4575 t_tramp = CALL_EXPR_ARG (exp, 0);
4576 t_func = CALL_EXPR_ARG (exp, 1);
4577 t_chain = CALL_EXPR_ARG (exp, 2);
4578
4579 r_tramp = expand_normal (t_tramp);
4580 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4581 MEM_NOTRAP_P (m_tramp) = 1;
4582
4583 /* If ONSTACK, the TRAMP argument should be the address of a field
4584 within the local function's FRAME decl. Either way, let's see if
4585 we can fill in the MEM_ATTRs for this memory. */
4586 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4587 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4588
4589 /* Creator of a heap trampoline is responsible for making sure the
4590 address is aligned to at least STACK_BOUNDARY. Normally malloc
4591 will ensure this anyhow. */
4592 tmp = round_trampoline_addr (r_tramp);
4593 if (tmp != r_tramp)
4594 {
4595 m_tramp = change_address (m_tramp, BLKmode, tmp);
4596 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4597 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4598 }
4599
4600 /* The FUNC argument should be the address of the nested function.
4601 Extract the actual function decl to pass to the hook. */
4602 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4603 t_func = TREE_OPERAND (t_func, 0);
4604 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4605
4606 r_chain = expand_normal (t_chain);
4607
4608 /* Generate insns to initialize the trampoline. */
4609 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4610
4611 if (onstack)
4612 {
4613 trampolines_created = 1;
4614
4615 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4616 "trampoline generated for nested function %qD", t_func);
4617 }
4618
4619 return const0_rtx;
4620 }
4621
4622 static rtx
4623 expand_builtin_adjust_trampoline (tree exp)
4624 {
4625 rtx tramp;
4626
4627 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4628 return NULL_RTX;
4629
4630 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4631 tramp = round_trampoline_addr (tramp);
4632 if (targetm.calls.trampoline_adjust_address)
4633 tramp = targetm.calls.trampoline_adjust_address (tramp);
4634
4635 return tramp;
4636 }
4637
4638 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4639 function. The function first checks whether the back end provides
4640 an insn to implement signbit for the respective mode. If not, it
4641 checks whether the floating point format of the value is such that
4642 the sign bit can be extracted. If that is not the case, error out.
4643 EXP is the expression that is a call to the builtin function; if
4644 convenient, the result should be placed in TARGET. */
4645 static rtx
4646 expand_builtin_signbit (tree exp, rtx target)
4647 {
4648 const struct real_format *fmt;
4649 machine_mode fmode, imode, rmode;
4650 tree arg;
4651 int word, bitpos;
4652 enum insn_code icode;
4653 rtx temp;
4654 location_t loc = EXPR_LOCATION (exp);
4655
4656 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4657 return NULL_RTX;
4658
4659 arg = CALL_EXPR_ARG (exp, 0);
4660 fmode = TYPE_MODE (TREE_TYPE (arg));
4661 rmode = TYPE_MODE (TREE_TYPE (exp));
4662 fmt = REAL_MODE_FORMAT (fmode);
4663
4664 arg = builtin_save_expr (arg);
4665
4666 /* Expand the argument yielding a RTX expression. */
4667 temp = expand_normal (arg);
4668
4669 /* Check if the back end provides an insn that handles signbit for the
4670 argument's mode. */
4671 icode = optab_handler (signbit_optab, fmode);
4672 if (icode != CODE_FOR_nothing)
4673 {
4674 rtx_insn *last = get_last_insn ();
4675 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4676 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4677 return target;
4678 delete_insns_since (last);
4679 }
4680
4681 /* For floating point formats without a sign bit, implement signbit
4682 as "ARG < 0.0". */
4683 bitpos = fmt->signbit_ro;
4684 if (bitpos < 0)
4685 {
4686 /* But we can't do this if the format supports signed zero. */
4687 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4688
4689 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4690 build_real (TREE_TYPE (arg), dconst0));
4691 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4692 }
4693
4694 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4695 {
4696 imode = int_mode_for_mode (fmode);
4697 gcc_assert (imode != BLKmode);
4698 temp = gen_lowpart (imode, temp);
4699 }
4700 else
4701 {
4702 imode = word_mode;
4703 /* Handle targets with different FP word orders. */
4704 if (FLOAT_WORDS_BIG_ENDIAN)
4705 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4706 else
4707 word = bitpos / BITS_PER_WORD;
4708 temp = operand_subword_force (temp, word, fmode);
4709 bitpos = bitpos % BITS_PER_WORD;
4710 }
4711
4712 /* Force the intermediate word_mode (or narrower) result into a
4713 register. This avoids attempting to create paradoxical SUBREGs
4714 of floating point modes below. */
4715 temp = force_reg (imode, temp);
4716
4717 /* If the bitpos is within the "result mode" lowpart, the operation
4718 can be implement with a single bitwise AND. Otherwise, we need
4719 a right shift and an AND. */
4720
4721 if (bitpos < GET_MODE_BITSIZE (rmode))
4722 {
4723 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4724
4725 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4726 temp = gen_lowpart (rmode, temp);
4727 temp = expand_binop (rmode, and_optab, temp,
4728 immed_wide_int_const (mask, rmode),
4729 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4730 }
4731 else
4732 {
4733 /* Perform a logical right shift to place the signbit in the least
4734 significant bit, then truncate the result to the desired mode
4735 and mask just this bit. */
4736 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4737 temp = gen_lowpart (rmode, temp);
4738 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4739 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4740 }
4741
4742 return temp;
4743 }
4744
4745 /* Expand fork or exec calls. TARGET is the desired target of the
4746 call. EXP is the call. FN is the
4747 identificator of the actual function. IGNORE is nonzero if the
4748 value is to be ignored. */
4749
4750 static rtx
4751 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4752 {
4753 tree id, decl;
4754 tree call;
4755
4756 /* If we are not profiling, just call the function. */
4757 if (!profile_arc_flag)
4758 return NULL_RTX;
4759
4760 /* Otherwise call the wrapper. This should be equivalent for the rest of
4761 compiler, so the code does not diverge, and the wrapper may run the
4762 code necessary for keeping the profiling sane. */
4763
4764 switch (DECL_FUNCTION_CODE (fn))
4765 {
4766 case BUILT_IN_FORK:
4767 id = get_identifier ("__gcov_fork");
4768 break;
4769
4770 case BUILT_IN_EXECL:
4771 id = get_identifier ("__gcov_execl");
4772 break;
4773
4774 case BUILT_IN_EXECV:
4775 id = get_identifier ("__gcov_execv");
4776 break;
4777
4778 case BUILT_IN_EXECLP:
4779 id = get_identifier ("__gcov_execlp");
4780 break;
4781
4782 case BUILT_IN_EXECLE:
4783 id = get_identifier ("__gcov_execle");
4784 break;
4785
4786 case BUILT_IN_EXECVP:
4787 id = get_identifier ("__gcov_execvp");
4788 break;
4789
4790 case BUILT_IN_EXECVE:
4791 id = get_identifier ("__gcov_execve");
4792 break;
4793
4794 default:
4795 gcc_unreachable ();
4796 }
4797
4798 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4799 FUNCTION_DECL, id, TREE_TYPE (fn));
4800 DECL_EXTERNAL (decl) = 1;
4801 TREE_PUBLIC (decl) = 1;
4802 DECL_ARTIFICIAL (decl) = 1;
4803 TREE_NOTHROW (decl) = 1;
4804 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4805 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4806 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4807 return expand_call (call, target, ignore);
4808 }
4809
4810
4811 \f
4812 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4813 the pointer in these functions is void*, the tree optimizers may remove
4814 casts. The mode computed in expand_builtin isn't reliable either, due
4815 to __sync_bool_compare_and_swap.
4816
4817 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4818 group of builtins. This gives us log2 of the mode size. */
4819
4820 static inline machine_mode
4821 get_builtin_sync_mode (int fcode_diff)
4822 {
4823 /* The size is not negotiable, so ask not to get BLKmode in return
4824 if the target indicates that a smaller size would be better. */
4825 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4826 }
4827
4828 /* Expand the memory expression LOC and return the appropriate memory operand
4829 for the builtin_sync operations. */
4830
4831 static rtx
4832 get_builtin_sync_mem (tree loc, machine_mode mode)
4833 {
4834 rtx addr, mem;
4835
4836 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4837 addr = convert_memory_address (Pmode, addr);
4838
4839 /* Note that we explicitly do not want any alias information for this
4840 memory, so that we kill all other live memories. Otherwise we don't
4841 satisfy the full barrier semantics of the intrinsic. */
4842 mem = validize_mem (gen_rtx_MEM (mode, addr));
4843
4844 /* The alignment needs to be at least according to that of the mode. */
4845 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4846 get_pointer_alignment (loc)));
4847 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4848 MEM_VOLATILE_P (mem) = 1;
4849
4850 return mem;
4851 }
4852
4853 /* Make sure an argument is in the right mode.
4854 EXP is the tree argument.
4855 MODE is the mode it should be in. */
4856
4857 static rtx
4858 expand_expr_force_mode (tree exp, machine_mode mode)
4859 {
4860 rtx val;
4861 machine_mode old_mode;
4862
4863 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4864 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4865 of CONST_INTs, where we know the old_mode only from the call argument. */
4866
4867 old_mode = GET_MODE (val);
4868 if (old_mode == VOIDmode)
4869 old_mode = TYPE_MODE (TREE_TYPE (exp));
4870 val = convert_modes (mode, old_mode, val, 1);
4871 return val;
4872 }
4873
4874
4875 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4876 EXP is the CALL_EXPR. CODE is the rtx code
4877 that corresponds to the arithmetic or logical operation from the name;
4878 an exception here is that NOT actually means NAND. TARGET is an optional
4879 place for us to store the results; AFTER is true if this is the
4880 fetch_and_xxx form. */
4881
4882 static rtx
4883 expand_builtin_sync_operation (machine_mode mode, tree exp,
4884 enum rtx_code code, bool after,
4885 rtx target)
4886 {
4887 rtx val, mem;
4888 location_t loc = EXPR_LOCATION (exp);
4889
4890 if (code == NOT && warn_sync_nand)
4891 {
4892 tree fndecl = get_callee_fndecl (exp);
4893 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4894
4895 static bool warned_f_a_n, warned_n_a_f;
4896
4897 switch (fcode)
4898 {
4899 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4900 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4901 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4902 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4903 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4904 if (warned_f_a_n)
4905 break;
4906
4907 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4908 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4909 warned_f_a_n = true;
4910 break;
4911
4912 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4913 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4914 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4915 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4916 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4917 if (warned_n_a_f)
4918 break;
4919
4920 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4921 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4922 warned_n_a_f = true;
4923 break;
4924
4925 default:
4926 gcc_unreachable ();
4927 }
4928 }
4929
4930 /* Expand the operands. */
4931 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4932 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4933
4934 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4935 after);
4936 }
4937
4938 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4939 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4940 true if this is the boolean form. TARGET is a place for us to store the
4941 results; this is NOT optional if IS_BOOL is true. */
4942
4943 static rtx
4944 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4945 bool is_bool, rtx target)
4946 {
4947 rtx old_val, new_val, mem;
4948 rtx *pbool, *poval;
4949
4950 /* Expand the operands. */
4951 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4952 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4953 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4954
4955 pbool = poval = NULL;
4956 if (target != const0_rtx)
4957 {
4958 if (is_bool)
4959 pbool = &target;
4960 else
4961 poval = &target;
4962 }
4963 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4964 false, MEMMODEL_SYNC_SEQ_CST,
4965 MEMMODEL_SYNC_SEQ_CST))
4966 return NULL_RTX;
4967
4968 return target;
4969 }
4970
4971 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4972 general form is actually an atomic exchange, and some targets only
4973 support a reduced form with the second argument being a constant 1.
4974 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4975 the results. */
4976
4977 static rtx
4978 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
4979 rtx target)
4980 {
4981 rtx val, mem;
4982
4983 /* Expand the operands. */
4984 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4985 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4986
4987 return expand_sync_lock_test_and_set (target, mem, val);
4988 }
4989
4990 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4991
4992 static void
4993 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
4994 {
4995 rtx mem;
4996
4997 /* Expand the operands. */
4998 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4999
5000 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5001 }
5002
5003 /* Given an integer representing an ``enum memmodel'', verify its
5004 correctness and return the memory model enum. */
5005
5006 static enum memmodel
5007 get_memmodel (tree exp)
5008 {
5009 rtx op;
5010 unsigned HOST_WIDE_INT val;
5011 source_location loc
5012 = expansion_point_location_if_in_system_header (input_location);
5013
5014 /* If the parameter is not a constant, it's a run time value so we'll just
5015 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5016 if (TREE_CODE (exp) != INTEGER_CST)
5017 return MEMMODEL_SEQ_CST;
5018
5019 op = expand_normal (exp);
5020
5021 val = INTVAL (op);
5022 if (targetm.memmodel_check)
5023 val = targetm.memmodel_check (val);
5024 else if (val & ~MEMMODEL_MASK)
5025 {
5026 warning_at (loc, OPT_Winvalid_memory_model,
5027 "unknown architecture specifier in memory model to builtin");
5028 return MEMMODEL_SEQ_CST;
5029 }
5030
5031 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5032 if (memmodel_base (val) >= MEMMODEL_LAST)
5033 {
5034 warning_at (loc, OPT_Winvalid_memory_model,
5035 "invalid memory model argument to builtin");
5036 return MEMMODEL_SEQ_CST;
5037 }
5038
5039 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5040 be conservative and promote consume to acquire. */
5041 if (val == MEMMODEL_CONSUME)
5042 val = MEMMODEL_ACQUIRE;
5043
5044 return (enum memmodel) val;
5045 }
5046
5047 /* Expand the __atomic_exchange intrinsic:
5048 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5049 EXP is the CALL_EXPR.
5050 TARGET is an optional place for us to store the results. */
5051
5052 static rtx
5053 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5054 {
5055 rtx val, mem;
5056 enum memmodel model;
5057
5058 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5059
5060 if (!flag_inline_atomics)
5061 return NULL_RTX;
5062
5063 /* Expand the operands. */
5064 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5065 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5066
5067 return expand_atomic_exchange (target, mem, val, model);
5068 }
5069
5070 /* Expand the __atomic_compare_exchange intrinsic:
5071 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5072 TYPE desired, BOOL weak,
5073 enum memmodel success,
5074 enum memmodel failure)
5075 EXP is the CALL_EXPR.
5076 TARGET is an optional place for us to store the results. */
5077
5078 static rtx
5079 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5080 rtx target)
5081 {
5082 rtx expect, desired, mem, oldval;
5083 rtx_code_label *label;
5084 enum memmodel success, failure;
5085 tree weak;
5086 bool is_weak;
5087 source_location loc
5088 = expansion_point_location_if_in_system_header (input_location);
5089
5090 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5091 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5092
5093 if (failure > success)
5094 {
5095 warning_at (loc, OPT_Winvalid_memory_model,
5096 "failure memory model cannot be stronger than success "
5097 "memory model for %<__atomic_compare_exchange%>");
5098 success = MEMMODEL_SEQ_CST;
5099 }
5100
5101 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5102 {
5103 warning_at (loc, OPT_Winvalid_memory_model,
5104 "invalid failure memory model for "
5105 "%<__atomic_compare_exchange%>");
5106 failure = MEMMODEL_SEQ_CST;
5107 success = MEMMODEL_SEQ_CST;
5108 }
5109
5110
5111 if (!flag_inline_atomics)
5112 return NULL_RTX;
5113
5114 /* Expand the operands. */
5115 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5116
5117 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5118 expect = convert_memory_address (Pmode, expect);
5119 expect = gen_rtx_MEM (mode, expect);
5120 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5121
5122 weak = CALL_EXPR_ARG (exp, 3);
5123 is_weak = false;
5124 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5125 is_weak = true;
5126
5127 if (target == const0_rtx)
5128 target = NULL;
5129
5130 /* Lest the rtl backend create a race condition with an imporoper store
5131 to memory, always create a new pseudo for OLDVAL. */
5132 oldval = NULL;
5133
5134 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5135 is_weak, success, failure))
5136 return NULL_RTX;
5137
5138 /* Conditionally store back to EXPECT, lest we create a race condition
5139 with an improper store to memory. */
5140 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5141 the normal case where EXPECT is totally private, i.e. a register. At
5142 which point the store can be unconditional. */
5143 label = gen_label_rtx ();
5144 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5145 GET_MODE (target), 1, label);
5146 emit_move_insn (expect, oldval);
5147 emit_label (label);
5148
5149 return target;
5150 }
5151
5152 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5153 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5154 call. The weak parameter must be dropped to match the expected parameter
5155 list and the expected argument changed from value to pointer to memory
5156 slot. */
5157
5158 static void
5159 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5160 {
5161 unsigned int z;
5162 vec<tree, va_gc> *vec;
5163
5164 vec_alloc (vec, 5);
5165 vec->quick_push (gimple_call_arg (call, 0));
5166 tree expected = gimple_call_arg (call, 1);
5167 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5168 TREE_TYPE (expected));
5169 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5170 if (expd != x)
5171 emit_move_insn (x, expd);
5172 tree v = make_tree (TREE_TYPE (expected), x);
5173 vec->quick_push (build1 (ADDR_EXPR,
5174 build_pointer_type (TREE_TYPE (expected)), v));
5175 vec->quick_push (gimple_call_arg (call, 2));
5176 /* Skip the boolean weak parameter. */
5177 for (z = 4; z < 6; z++)
5178 vec->quick_push (gimple_call_arg (call, z));
5179 built_in_function fncode
5180 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5181 + exact_log2 (GET_MODE_SIZE (mode)));
5182 tree fndecl = builtin_decl_explicit (fncode);
5183 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5184 fndecl);
5185 tree exp = build_call_vec (boolean_type_node, fn, vec);
5186 tree lhs = gimple_call_lhs (call);
5187 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5188 if (lhs)
5189 {
5190 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5191 if (GET_MODE (boolret) != mode)
5192 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5193 x = force_reg (mode, x);
5194 write_complex_part (target, boolret, true);
5195 write_complex_part (target, x, false);
5196 }
5197 }
5198
5199 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5200
5201 void
5202 expand_ifn_atomic_compare_exchange (gcall *call)
5203 {
5204 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5205 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5206 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5207 rtx expect, desired, mem, oldval, boolret;
5208 enum memmodel success, failure;
5209 tree lhs;
5210 bool is_weak;
5211 source_location loc
5212 = expansion_point_location_if_in_system_header (gimple_location (call));
5213
5214 success = get_memmodel (gimple_call_arg (call, 4));
5215 failure = get_memmodel (gimple_call_arg (call, 5));
5216
5217 if (failure > success)
5218 {
5219 warning_at (loc, OPT_Winvalid_memory_model,
5220 "failure memory model cannot be stronger than success "
5221 "memory model for %<__atomic_compare_exchange%>");
5222 success = MEMMODEL_SEQ_CST;
5223 }
5224
5225 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5226 {
5227 warning_at (loc, OPT_Winvalid_memory_model,
5228 "invalid failure memory model for "
5229 "%<__atomic_compare_exchange%>");
5230 failure = MEMMODEL_SEQ_CST;
5231 success = MEMMODEL_SEQ_CST;
5232 }
5233
5234 if (!flag_inline_atomics)
5235 {
5236 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5237 return;
5238 }
5239
5240 /* Expand the operands. */
5241 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5242
5243 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5244 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5245
5246 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5247
5248 boolret = NULL;
5249 oldval = NULL;
5250
5251 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5252 is_weak, success, failure))
5253 {
5254 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5255 return;
5256 }
5257
5258 lhs = gimple_call_lhs (call);
5259 if (lhs)
5260 {
5261 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5262 if (GET_MODE (boolret) != mode)
5263 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5264 write_complex_part (target, boolret, true);
5265 write_complex_part (target, oldval, false);
5266 }
5267 }
5268
5269 /* Expand the __atomic_load intrinsic:
5270 TYPE __atomic_load (TYPE *object, enum memmodel)
5271 EXP is the CALL_EXPR.
5272 TARGET is an optional place for us to store the results. */
5273
5274 static rtx
5275 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5276 {
5277 rtx mem;
5278 enum memmodel model;
5279
5280 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5281 if (is_mm_release (model) || is_mm_acq_rel (model))
5282 {
5283 source_location loc
5284 = expansion_point_location_if_in_system_header (input_location);
5285 warning_at (loc, OPT_Winvalid_memory_model,
5286 "invalid memory model for %<__atomic_load%>");
5287 model = MEMMODEL_SEQ_CST;
5288 }
5289
5290 if (!flag_inline_atomics)
5291 return NULL_RTX;
5292
5293 /* Expand the operand. */
5294 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5295
5296 return expand_atomic_load (target, mem, model);
5297 }
5298
5299
5300 /* Expand the __atomic_store intrinsic:
5301 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5302 EXP is the CALL_EXPR.
5303 TARGET is an optional place for us to store the results. */
5304
5305 static rtx
5306 expand_builtin_atomic_store (machine_mode mode, tree exp)
5307 {
5308 rtx mem, val;
5309 enum memmodel model;
5310
5311 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5312 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5313 || is_mm_release (model)))
5314 {
5315 source_location loc
5316 = expansion_point_location_if_in_system_header (input_location);
5317 warning_at (loc, OPT_Winvalid_memory_model,
5318 "invalid memory model for %<__atomic_store%>");
5319 model = MEMMODEL_SEQ_CST;
5320 }
5321
5322 if (!flag_inline_atomics)
5323 return NULL_RTX;
5324
5325 /* Expand the operands. */
5326 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5327 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5328
5329 return expand_atomic_store (mem, val, model, false);
5330 }
5331
5332 /* Expand the __atomic_fetch_XXX intrinsic:
5333 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5334 EXP is the CALL_EXPR.
5335 TARGET is an optional place for us to store the results.
5336 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5337 FETCH_AFTER is true if returning the result of the operation.
5338 FETCH_AFTER is false if returning the value before the operation.
5339 IGNORE is true if the result is not used.
5340 EXT_CALL is the correct builtin for an external call if this cannot be
5341 resolved to an instruction sequence. */
5342
5343 static rtx
5344 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5345 enum rtx_code code, bool fetch_after,
5346 bool ignore, enum built_in_function ext_call)
5347 {
5348 rtx val, mem, ret;
5349 enum memmodel model;
5350 tree fndecl;
5351 tree addr;
5352
5353 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5354
5355 /* Expand the operands. */
5356 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5357 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5358
5359 /* Only try generating instructions if inlining is turned on. */
5360 if (flag_inline_atomics)
5361 {
5362 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5363 if (ret)
5364 return ret;
5365 }
5366
5367 /* Return if a different routine isn't needed for the library call. */
5368 if (ext_call == BUILT_IN_NONE)
5369 return NULL_RTX;
5370
5371 /* Change the call to the specified function. */
5372 fndecl = get_callee_fndecl (exp);
5373 addr = CALL_EXPR_FN (exp);
5374 STRIP_NOPS (addr);
5375
5376 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5377 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5378
5379 /* Expand the call here so we can emit trailing code. */
5380 ret = expand_call (exp, target, ignore);
5381
5382 /* Replace the original function just in case it matters. */
5383 TREE_OPERAND (addr, 0) = fndecl;
5384
5385 /* Then issue the arithmetic correction to return the right result. */
5386 if (!ignore)
5387 {
5388 if (code == NOT)
5389 {
5390 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5391 OPTAB_LIB_WIDEN);
5392 ret = expand_simple_unop (mode, NOT, ret, target, true);
5393 }
5394 else
5395 ret = expand_simple_binop (mode, code, ret, val, target, true,
5396 OPTAB_LIB_WIDEN);
5397 }
5398 return ret;
5399 }
5400
5401 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5402
5403 void
5404 expand_ifn_atomic_bit_test_and (gcall *call)
5405 {
5406 tree ptr = gimple_call_arg (call, 0);
5407 tree bit = gimple_call_arg (call, 1);
5408 tree flag = gimple_call_arg (call, 2);
5409 tree lhs = gimple_call_lhs (call);
5410 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5411 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5412 enum rtx_code code;
5413 optab optab;
5414 struct expand_operand ops[5];
5415
5416 gcc_assert (flag_inline_atomics);
5417
5418 if (gimple_call_num_args (call) == 4)
5419 model = get_memmodel (gimple_call_arg (call, 3));
5420
5421 rtx mem = get_builtin_sync_mem (ptr, mode);
5422 rtx val = expand_expr_force_mode (bit, mode);
5423
5424 switch (gimple_call_internal_fn (call))
5425 {
5426 case IFN_ATOMIC_BIT_TEST_AND_SET:
5427 code = IOR;
5428 optab = atomic_bit_test_and_set_optab;
5429 break;
5430 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5431 code = XOR;
5432 optab = atomic_bit_test_and_complement_optab;
5433 break;
5434 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5435 code = AND;
5436 optab = atomic_bit_test_and_reset_optab;
5437 break;
5438 default:
5439 gcc_unreachable ();
5440 }
5441
5442 if (lhs == NULL_TREE)
5443 {
5444 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5445 val, NULL_RTX, true, OPTAB_DIRECT);
5446 if (code == AND)
5447 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5448 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5449 return;
5450 }
5451
5452 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5453 enum insn_code icode = direct_optab_handler (optab, mode);
5454 gcc_assert (icode != CODE_FOR_nothing);
5455 create_output_operand (&ops[0], target, mode);
5456 create_fixed_operand (&ops[1], mem);
5457 create_convert_operand_to (&ops[2], val, mode, true);
5458 create_integer_operand (&ops[3], model);
5459 create_integer_operand (&ops[4], integer_onep (flag));
5460 if (maybe_expand_insn (icode, 5, ops))
5461 return;
5462
5463 rtx bitval = val;
5464 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5465 val, NULL_RTX, true, OPTAB_DIRECT);
5466 rtx maskval = val;
5467 if (code == AND)
5468 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5469 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5470 code, model, false);
5471 if (integer_onep (flag))
5472 {
5473 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5474 NULL_RTX, true, OPTAB_DIRECT);
5475 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5476 true, OPTAB_DIRECT);
5477 }
5478 else
5479 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5480 OPTAB_DIRECT);
5481 if (result != target)
5482 emit_move_insn (target, result);
5483 }
5484
5485 /* Expand an atomic clear operation.
5486 void _atomic_clear (BOOL *obj, enum memmodel)
5487 EXP is the call expression. */
5488
5489 static rtx
5490 expand_builtin_atomic_clear (tree exp)
5491 {
5492 machine_mode mode;
5493 rtx mem, ret;
5494 enum memmodel model;
5495
5496 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5499
5500 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5501 {
5502 source_location loc
5503 = expansion_point_location_if_in_system_header (input_location);
5504 warning_at (loc, OPT_Winvalid_memory_model,
5505 "invalid memory model for %<__atomic_store%>");
5506 model = MEMMODEL_SEQ_CST;
5507 }
5508
5509 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5510 Failing that, a store is issued by __atomic_store. The only way this can
5511 fail is if the bool type is larger than a word size. Unlikely, but
5512 handle it anyway for completeness. Assume a single threaded model since
5513 there is no atomic support in this case, and no barriers are required. */
5514 ret = expand_atomic_store (mem, const0_rtx, model, true);
5515 if (!ret)
5516 emit_move_insn (mem, const0_rtx);
5517 return const0_rtx;
5518 }
5519
5520 /* Expand an atomic test_and_set operation.
5521 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5522 EXP is the call expression. */
5523
5524 static rtx
5525 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5526 {
5527 rtx mem;
5528 enum memmodel model;
5529 machine_mode mode;
5530
5531 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5532 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5533 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5534
5535 return expand_atomic_test_and_set (target, mem, model);
5536 }
5537
5538
5539 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5540 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5541
5542 static tree
5543 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5544 {
5545 int size;
5546 machine_mode mode;
5547 unsigned int mode_align, type_align;
5548
5549 if (TREE_CODE (arg0) != INTEGER_CST)
5550 return NULL_TREE;
5551
5552 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5553 mode = mode_for_size (size, MODE_INT, 0);
5554 mode_align = GET_MODE_ALIGNMENT (mode);
5555
5556 if (TREE_CODE (arg1) == INTEGER_CST)
5557 {
5558 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5559
5560 /* Either this argument is null, or it's a fake pointer encoding
5561 the alignment of the object. */
5562 val = val & -val;
5563 val *= BITS_PER_UNIT;
5564
5565 if (val == 0 || mode_align < val)
5566 type_align = mode_align;
5567 else
5568 type_align = val;
5569 }
5570 else
5571 {
5572 tree ttype = TREE_TYPE (arg1);
5573
5574 /* This function is usually invoked and folded immediately by the front
5575 end before anything else has a chance to look at it. The pointer
5576 parameter at this point is usually cast to a void *, so check for that
5577 and look past the cast. */
5578 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5579 && VOID_TYPE_P (TREE_TYPE (ttype)))
5580 arg1 = TREE_OPERAND (arg1, 0);
5581
5582 ttype = TREE_TYPE (arg1);
5583 gcc_assert (POINTER_TYPE_P (ttype));
5584
5585 /* Get the underlying type of the object. */
5586 ttype = TREE_TYPE (ttype);
5587 type_align = TYPE_ALIGN (ttype);
5588 }
5589
5590 /* If the object has smaller alignment, the lock free routines cannot
5591 be used. */
5592 if (type_align < mode_align)
5593 return boolean_false_node;
5594
5595 /* Check if a compare_and_swap pattern exists for the mode which represents
5596 the required size. The pattern is not allowed to fail, so the existence
5597 of the pattern indicates support is present. */
5598 if (can_compare_and_swap_p (mode, true))
5599 return boolean_true_node;
5600 else
5601 return boolean_false_node;
5602 }
5603
5604 /* Return true if the parameters to call EXP represent an object which will
5605 always generate lock free instructions. The first argument represents the
5606 size of the object, and the second parameter is a pointer to the object
5607 itself. If NULL is passed for the object, then the result is based on
5608 typical alignment for an object of the specified size. Otherwise return
5609 false. */
5610
5611 static rtx
5612 expand_builtin_atomic_always_lock_free (tree exp)
5613 {
5614 tree size;
5615 tree arg0 = CALL_EXPR_ARG (exp, 0);
5616 tree arg1 = CALL_EXPR_ARG (exp, 1);
5617
5618 if (TREE_CODE (arg0) != INTEGER_CST)
5619 {
5620 error ("non-constant argument 1 to __atomic_always_lock_free");
5621 return const0_rtx;
5622 }
5623
5624 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5625 if (size == boolean_true_node)
5626 return const1_rtx;
5627 return const0_rtx;
5628 }
5629
5630 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5631 is lock free on this architecture. */
5632
5633 static tree
5634 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5635 {
5636 if (!flag_inline_atomics)
5637 return NULL_TREE;
5638
5639 /* If it isn't always lock free, don't generate a result. */
5640 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5641 return boolean_true_node;
5642
5643 return NULL_TREE;
5644 }
5645
5646 /* Return true if the parameters to call EXP represent an object which will
5647 always generate lock free instructions. The first argument represents the
5648 size of the object, and the second parameter is a pointer to the object
5649 itself. If NULL is passed for the object, then the result is based on
5650 typical alignment for an object of the specified size. Otherwise return
5651 NULL*/
5652
5653 static rtx
5654 expand_builtin_atomic_is_lock_free (tree exp)
5655 {
5656 tree size;
5657 tree arg0 = CALL_EXPR_ARG (exp, 0);
5658 tree arg1 = CALL_EXPR_ARG (exp, 1);
5659
5660 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5661 {
5662 error ("non-integer argument 1 to __atomic_is_lock_free");
5663 return NULL_RTX;
5664 }
5665
5666 if (!flag_inline_atomics)
5667 return NULL_RTX;
5668
5669 /* If the value is known at compile time, return the RTX for it. */
5670 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5671 if (size == boolean_true_node)
5672 return const1_rtx;
5673
5674 return NULL_RTX;
5675 }
5676
5677 /* Expand the __atomic_thread_fence intrinsic:
5678 void __atomic_thread_fence (enum memmodel)
5679 EXP is the CALL_EXPR. */
5680
5681 static void
5682 expand_builtin_atomic_thread_fence (tree exp)
5683 {
5684 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5685 expand_mem_thread_fence (model);
5686 }
5687
5688 /* Expand the __atomic_signal_fence intrinsic:
5689 void __atomic_signal_fence (enum memmodel)
5690 EXP is the CALL_EXPR. */
5691
5692 static void
5693 expand_builtin_atomic_signal_fence (tree exp)
5694 {
5695 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5696 expand_mem_signal_fence (model);
5697 }
5698
5699 /* Expand the __sync_synchronize intrinsic. */
5700
5701 static void
5702 expand_builtin_sync_synchronize (void)
5703 {
5704 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5705 }
5706
5707 static rtx
5708 expand_builtin_thread_pointer (tree exp, rtx target)
5709 {
5710 enum insn_code icode;
5711 if (!validate_arglist (exp, VOID_TYPE))
5712 return const0_rtx;
5713 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5714 if (icode != CODE_FOR_nothing)
5715 {
5716 struct expand_operand op;
5717 /* If the target is not sutitable then create a new target. */
5718 if (target == NULL_RTX
5719 || !REG_P (target)
5720 || GET_MODE (target) != Pmode)
5721 target = gen_reg_rtx (Pmode);
5722 create_output_operand (&op, target, Pmode);
5723 expand_insn (icode, 1, &op);
5724 return target;
5725 }
5726 error ("__builtin_thread_pointer is not supported on this target");
5727 return const0_rtx;
5728 }
5729
5730 static void
5731 expand_builtin_set_thread_pointer (tree exp)
5732 {
5733 enum insn_code icode;
5734 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5735 return;
5736 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5737 if (icode != CODE_FOR_nothing)
5738 {
5739 struct expand_operand op;
5740 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5741 Pmode, EXPAND_NORMAL);
5742 create_input_operand (&op, val, Pmode);
5743 expand_insn (icode, 1, &op);
5744 return;
5745 }
5746 error ("__builtin_set_thread_pointer is not supported on this target");
5747 }
5748
5749 \f
5750 /* Emit code to restore the current value of stack. */
5751
5752 static void
5753 expand_stack_restore (tree var)
5754 {
5755 rtx_insn *prev;
5756 rtx sa = expand_normal (var);
5757
5758 sa = convert_memory_address (Pmode, sa);
5759
5760 prev = get_last_insn ();
5761 emit_stack_restore (SAVE_BLOCK, sa);
5762
5763 record_new_stack_level ();
5764
5765 fixup_args_size_notes (prev, get_last_insn (), 0);
5766 }
5767
5768 /* Emit code to save the current value of stack. */
5769
5770 static rtx
5771 expand_stack_save (void)
5772 {
5773 rtx ret = NULL_RTX;
5774
5775 emit_stack_save (SAVE_BLOCK, &ret);
5776 return ret;
5777 }
5778
5779
5780 /* Expand an expression EXP that calls a built-in function,
5781 with result going to TARGET if that's convenient
5782 (and in mode MODE if that's convenient).
5783 SUBTARGET may be used as the target for computing one of EXP's operands.
5784 IGNORE is nonzero if the value is to be ignored. */
5785
5786 rtx
5787 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5788 int ignore)
5789 {
5790 tree fndecl = get_callee_fndecl (exp);
5791 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5792 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5793 int flags;
5794
5795 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5796 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5797
5798 /* When ASan is enabled, we don't want to expand some memory/string
5799 builtins and rely on libsanitizer's hooks. This allows us to avoid
5800 redundant checks and be sure, that possible overflow will be detected
5801 by ASan. */
5802
5803 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5804 return expand_call (exp, target, ignore);
5805
5806 /* When not optimizing, generate calls to library functions for a certain
5807 set of builtins. */
5808 if (!optimize
5809 && !called_as_built_in (fndecl)
5810 && fcode != BUILT_IN_FORK
5811 && fcode != BUILT_IN_EXECL
5812 && fcode != BUILT_IN_EXECV
5813 && fcode != BUILT_IN_EXECLP
5814 && fcode != BUILT_IN_EXECLE
5815 && fcode != BUILT_IN_EXECVP
5816 && fcode != BUILT_IN_EXECVE
5817 && fcode != BUILT_IN_ALLOCA
5818 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5819 && fcode != BUILT_IN_FREE
5820 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5821 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5822 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5823 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5824 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5825 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5826 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5827 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5828 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5829 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5830 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5831 && fcode != BUILT_IN_CHKP_BNDRET)
5832 return expand_call (exp, target, ignore);
5833
5834 /* The built-in function expanders test for target == const0_rtx
5835 to determine whether the function's result will be ignored. */
5836 if (ignore)
5837 target = const0_rtx;
5838
5839 /* If the result of a pure or const built-in function is ignored, and
5840 none of its arguments are volatile, we can avoid expanding the
5841 built-in call and just evaluate the arguments for side-effects. */
5842 if (target == const0_rtx
5843 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5844 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5845 {
5846 bool volatilep = false;
5847 tree arg;
5848 call_expr_arg_iterator iter;
5849
5850 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5851 if (TREE_THIS_VOLATILE (arg))
5852 {
5853 volatilep = true;
5854 break;
5855 }
5856
5857 if (! volatilep)
5858 {
5859 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5860 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5861 return const0_rtx;
5862 }
5863 }
5864
5865 /* expand_builtin_with_bounds is supposed to be used for
5866 instrumented builtin calls. */
5867 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5868
5869 switch (fcode)
5870 {
5871 CASE_FLT_FN (BUILT_IN_FABS):
5872 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5873 case BUILT_IN_FABSD32:
5874 case BUILT_IN_FABSD64:
5875 case BUILT_IN_FABSD128:
5876 target = expand_builtin_fabs (exp, target, subtarget);
5877 if (target)
5878 return target;
5879 break;
5880
5881 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5882 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5883 target = expand_builtin_copysign (exp, target, subtarget);
5884 if (target)
5885 return target;
5886 break;
5887
5888 /* Just do a normal library call if we were unable to fold
5889 the values. */
5890 CASE_FLT_FN (BUILT_IN_CABS):
5891 break;
5892
5893 CASE_FLT_FN (BUILT_IN_FMA):
5894 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5895 if (target)
5896 return target;
5897 break;
5898
5899 CASE_FLT_FN (BUILT_IN_ILOGB):
5900 if (! flag_unsafe_math_optimizations)
5901 break;
5902 CASE_FLT_FN (BUILT_IN_ISINF):
5903 CASE_FLT_FN (BUILT_IN_FINITE):
5904 case BUILT_IN_ISFINITE:
5905 case BUILT_IN_ISNORMAL:
5906 target = expand_builtin_interclass_mathfn (exp, target);
5907 if (target)
5908 return target;
5909 break;
5910
5911 CASE_FLT_FN (BUILT_IN_ICEIL):
5912 CASE_FLT_FN (BUILT_IN_LCEIL):
5913 CASE_FLT_FN (BUILT_IN_LLCEIL):
5914 CASE_FLT_FN (BUILT_IN_LFLOOR):
5915 CASE_FLT_FN (BUILT_IN_IFLOOR):
5916 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5917 target = expand_builtin_int_roundingfn (exp, target);
5918 if (target)
5919 return target;
5920 break;
5921
5922 CASE_FLT_FN (BUILT_IN_IRINT):
5923 CASE_FLT_FN (BUILT_IN_LRINT):
5924 CASE_FLT_FN (BUILT_IN_LLRINT):
5925 CASE_FLT_FN (BUILT_IN_IROUND):
5926 CASE_FLT_FN (BUILT_IN_LROUND):
5927 CASE_FLT_FN (BUILT_IN_LLROUND):
5928 target = expand_builtin_int_roundingfn_2 (exp, target);
5929 if (target)
5930 return target;
5931 break;
5932
5933 CASE_FLT_FN (BUILT_IN_POWI):
5934 target = expand_builtin_powi (exp, target);
5935 if (target)
5936 return target;
5937 break;
5938
5939 CASE_FLT_FN (BUILT_IN_CEXPI):
5940 target = expand_builtin_cexpi (exp, target);
5941 gcc_assert (target);
5942 return target;
5943
5944 CASE_FLT_FN (BUILT_IN_SIN):
5945 CASE_FLT_FN (BUILT_IN_COS):
5946 if (! flag_unsafe_math_optimizations)
5947 break;
5948 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5949 if (target)
5950 return target;
5951 break;
5952
5953 CASE_FLT_FN (BUILT_IN_SINCOS):
5954 if (! flag_unsafe_math_optimizations)
5955 break;
5956 target = expand_builtin_sincos (exp);
5957 if (target)
5958 return target;
5959 break;
5960
5961 case BUILT_IN_APPLY_ARGS:
5962 return expand_builtin_apply_args ();
5963
5964 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5965 FUNCTION with a copy of the parameters described by
5966 ARGUMENTS, and ARGSIZE. It returns a block of memory
5967 allocated on the stack into which is stored all the registers
5968 that might possibly be used for returning the result of a
5969 function. ARGUMENTS is the value returned by
5970 __builtin_apply_args. ARGSIZE is the number of bytes of
5971 arguments that must be copied. ??? How should this value be
5972 computed? We'll also need a safe worst case value for varargs
5973 functions. */
5974 case BUILT_IN_APPLY:
5975 if (!validate_arglist (exp, POINTER_TYPE,
5976 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5977 && !validate_arglist (exp, REFERENCE_TYPE,
5978 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5979 return const0_rtx;
5980 else
5981 {
5982 rtx ops[3];
5983
5984 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5985 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5986 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5987
5988 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5989 }
5990
5991 /* __builtin_return (RESULT) causes the function to return the
5992 value described by RESULT. RESULT is address of the block of
5993 memory returned by __builtin_apply. */
5994 case BUILT_IN_RETURN:
5995 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5996 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5997 return const0_rtx;
5998
5999 case BUILT_IN_SAVEREGS:
6000 return expand_builtin_saveregs ();
6001
6002 case BUILT_IN_VA_ARG_PACK:
6003 /* All valid uses of __builtin_va_arg_pack () are removed during
6004 inlining. */
6005 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6006 return const0_rtx;
6007
6008 case BUILT_IN_VA_ARG_PACK_LEN:
6009 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6010 inlining. */
6011 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6012 return const0_rtx;
6013
6014 /* Return the address of the first anonymous stack arg. */
6015 case BUILT_IN_NEXT_ARG:
6016 if (fold_builtin_next_arg (exp, false))
6017 return const0_rtx;
6018 return expand_builtin_next_arg ();
6019
6020 case BUILT_IN_CLEAR_CACHE:
6021 target = expand_builtin___clear_cache (exp);
6022 if (target)
6023 return target;
6024 break;
6025
6026 case BUILT_IN_CLASSIFY_TYPE:
6027 return expand_builtin_classify_type (exp);
6028
6029 case BUILT_IN_CONSTANT_P:
6030 return const0_rtx;
6031
6032 case BUILT_IN_FRAME_ADDRESS:
6033 case BUILT_IN_RETURN_ADDRESS:
6034 return expand_builtin_frame_address (fndecl, exp);
6035
6036 /* Returns the address of the area where the structure is returned.
6037 0 otherwise. */
6038 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6039 if (call_expr_nargs (exp) != 0
6040 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6041 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6042 return const0_rtx;
6043 else
6044 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6045
6046 case BUILT_IN_ALLOCA:
6047 case BUILT_IN_ALLOCA_WITH_ALIGN:
6048 /* If the allocation stems from the declaration of a variable-sized
6049 object, it cannot accumulate. */
6050 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6051 if (target)
6052 return target;
6053 break;
6054
6055 case BUILT_IN_STACK_SAVE:
6056 return expand_stack_save ();
6057
6058 case BUILT_IN_STACK_RESTORE:
6059 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6060 return const0_rtx;
6061
6062 case BUILT_IN_BSWAP16:
6063 case BUILT_IN_BSWAP32:
6064 case BUILT_IN_BSWAP64:
6065 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6066 if (target)
6067 return target;
6068 break;
6069
6070 CASE_INT_FN (BUILT_IN_FFS):
6071 target = expand_builtin_unop (target_mode, exp, target,
6072 subtarget, ffs_optab);
6073 if (target)
6074 return target;
6075 break;
6076
6077 CASE_INT_FN (BUILT_IN_CLZ):
6078 target = expand_builtin_unop (target_mode, exp, target,
6079 subtarget, clz_optab);
6080 if (target)
6081 return target;
6082 break;
6083
6084 CASE_INT_FN (BUILT_IN_CTZ):
6085 target = expand_builtin_unop (target_mode, exp, target,
6086 subtarget, ctz_optab);
6087 if (target)
6088 return target;
6089 break;
6090
6091 CASE_INT_FN (BUILT_IN_CLRSB):
6092 target = expand_builtin_unop (target_mode, exp, target,
6093 subtarget, clrsb_optab);
6094 if (target)
6095 return target;
6096 break;
6097
6098 CASE_INT_FN (BUILT_IN_POPCOUNT):
6099 target = expand_builtin_unop (target_mode, exp, target,
6100 subtarget, popcount_optab);
6101 if (target)
6102 return target;
6103 break;
6104
6105 CASE_INT_FN (BUILT_IN_PARITY):
6106 target = expand_builtin_unop (target_mode, exp, target,
6107 subtarget, parity_optab);
6108 if (target)
6109 return target;
6110 break;
6111
6112 case BUILT_IN_STRLEN:
6113 target = expand_builtin_strlen (exp, target, target_mode);
6114 if (target)
6115 return target;
6116 break;
6117
6118 case BUILT_IN_STRCPY:
6119 target = expand_builtin_strcpy (exp, target);
6120 if (target)
6121 return target;
6122 break;
6123
6124 case BUILT_IN_STRNCPY:
6125 target = expand_builtin_strncpy (exp, target);
6126 if (target)
6127 return target;
6128 break;
6129
6130 case BUILT_IN_STPCPY:
6131 target = expand_builtin_stpcpy (exp, target, mode);
6132 if (target)
6133 return target;
6134 break;
6135
6136 case BUILT_IN_MEMCPY:
6137 target = expand_builtin_memcpy (exp, target);
6138 if (target)
6139 return target;
6140 break;
6141
6142 case BUILT_IN_MEMPCPY:
6143 target = expand_builtin_mempcpy (exp, target, mode);
6144 if (target)
6145 return target;
6146 break;
6147
6148 case BUILT_IN_MEMSET:
6149 target = expand_builtin_memset (exp, target, mode);
6150 if (target)
6151 return target;
6152 break;
6153
6154 case BUILT_IN_BZERO:
6155 target = expand_builtin_bzero (exp);
6156 if (target)
6157 return target;
6158 break;
6159
6160 case BUILT_IN_STRCMP:
6161 target = expand_builtin_strcmp (exp, target);
6162 if (target)
6163 return target;
6164 break;
6165
6166 case BUILT_IN_STRNCMP:
6167 target = expand_builtin_strncmp (exp, target, mode);
6168 if (target)
6169 return target;
6170 break;
6171
6172 case BUILT_IN_BCMP:
6173 case BUILT_IN_MEMCMP:
6174 case BUILT_IN_MEMCMP_EQ:
6175 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6176 if (target)
6177 return target;
6178 if (fcode == BUILT_IN_MEMCMP_EQ)
6179 {
6180 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6181 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6182 }
6183 break;
6184
6185 case BUILT_IN_SETJMP:
6186 /* This should have been lowered to the builtins below. */
6187 gcc_unreachable ();
6188
6189 case BUILT_IN_SETJMP_SETUP:
6190 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6191 and the receiver label. */
6192 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6193 {
6194 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6195 VOIDmode, EXPAND_NORMAL);
6196 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6197 rtx_insn *label_r = label_rtx (label);
6198
6199 /* This is copied from the handling of non-local gotos. */
6200 expand_builtin_setjmp_setup (buf_addr, label_r);
6201 nonlocal_goto_handler_labels
6202 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6203 nonlocal_goto_handler_labels);
6204 /* ??? Do not let expand_label treat us as such since we would
6205 not want to be both on the list of non-local labels and on
6206 the list of forced labels. */
6207 FORCED_LABEL (label) = 0;
6208 return const0_rtx;
6209 }
6210 break;
6211
6212 case BUILT_IN_SETJMP_RECEIVER:
6213 /* __builtin_setjmp_receiver is passed the receiver label. */
6214 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6215 {
6216 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6217 rtx_insn *label_r = label_rtx (label);
6218
6219 expand_builtin_setjmp_receiver (label_r);
6220 return const0_rtx;
6221 }
6222 break;
6223
6224 /* __builtin_longjmp is passed a pointer to an array of five words.
6225 It's similar to the C library longjmp function but works with
6226 __builtin_setjmp above. */
6227 case BUILT_IN_LONGJMP:
6228 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6229 {
6230 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6231 VOIDmode, EXPAND_NORMAL);
6232 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6233
6234 if (value != const1_rtx)
6235 {
6236 error ("%<__builtin_longjmp%> second argument must be 1");
6237 return const0_rtx;
6238 }
6239
6240 expand_builtin_longjmp (buf_addr, value);
6241 return const0_rtx;
6242 }
6243 break;
6244
6245 case BUILT_IN_NONLOCAL_GOTO:
6246 target = expand_builtin_nonlocal_goto (exp);
6247 if (target)
6248 return target;
6249 break;
6250
6251 /* This updates the setjmp buffer that is its argument with the value
6252 of the current stack pointer. */
6253 case BUILT_IN_UPDATE_SETJMP_BUF:
6254 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6255 {
6256 rtx buf_addr
6257 = expand_normal (CALL_EXPR_ARG (exp, 0));
6258
6259 expand_builtin_update_setjmp_buf (buf_addr);
6260 return const0_rtx;
6261 }
6262 break;
6263
6264 case BUILT_IN_TRAP:
6265 expand_builtin_trap ();
6266 return const0_rtx;
6267
6268 case BUILT_IN_UNREACHABLE:
6269 expand_builtin_unreachable ();
6270 return const0_rtx;
6271
6272 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6273 case BUILT_IN_SIGNBITD32:
6274 case BUILT_IN_SIGNBITD64:
6275 case BUILT_IN_SIGNBITD128:
6276 target = expand_builtin_signbit (exp, target);
6277 if (target)
6278 return target;
6279 break;
6280
6281 /* Various hooks for the DWARF 2 __throw routine. */
6282 case BUILT_IN_UNWIND_INIT:
6283 expand_builtin_unwind_init ();
6284 return const0_rtx;
6285 case BUILT_IN_DWARF_CFA:
6286 return virtual_cfa_rtx;
6287 #ifdef DWARF2_UNWIND_INFO
6288 case BUILT_IN_DWARF_SP_COLUMN:
6289 return expand_builtin_dwarf_sp_column ();
6290 case BUILT_IN_INIT_DWARF_REG_SIZES:
6291 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6292 return const0_rtx;
6293 #endif
6294 case BUILT_IN_FROB_RETURN_ADDR:
6295 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6296 case BUILT_IN_EXTRACT_RETURN_ADDR:
6297 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6298 case BUILT_IN_EH_RETURN:
6299 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6300 CALL_EXPR_ARG (exp, 1));
6301 return const0_rtx;
6302 case BUILT_IN_EH_RETURN_DATA_REGNO:
6303 return expand_builtin_eh_return_data_regno (exp);
6304 case BUILT_IN_EXTEND_POINTER:
6305 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6306 case BUILT_IN_EH_POINTER:
6307 return expand_builtin_eh_pointer (exp);
6308 case BUILT_IN_EH_FILTER:
6309 return expand_builtin_eh_filter (exp);
6310 case BUILT_IN_EH_COPY_VALUES:
6311 return expand_builtin_eh_copy_values (exp);
6312
6313 case BUILT_IN_VA_START:
6314 return expand_builtin_va_start (exp);
6315 case BUILT_IN_VA_END:
6316 return expand_builtin_va_end (exp);
6317 case BUILT_IN_VA_COPY:
6318 return expand_builtin_va_copy (exp);
6319 case BUILT_IN_EXPECT:
6320 return expand_builtin_expect (exp, target);
6321 case BUILT_IN_ASSUME_ALIGNED:
6322 return expand_builtin_assume_aligned (exp, target);
6323 case BUILT_IN_PREFETCH:
6324 expand_builtin_prefetch (exp);
6325 return const0_rtx;
6326
6327 case BUILT_IN_INIT_TRAMPOLINE:
6328 return expand_builtin_init_trampoline (exp, true);
6329 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6330 return expand_builtin_init_trampoline (exp, false);
6331 case BUILT_IN_ADJUST_TRAMPOLINE:
6332 return expand_builtin_adjust_trampoline (exp);
6333
6334 case BUILT_IN_FORK:
6335 case BUILT_IN_EXECL:
6336 case BUILT_IN_EXECV:
6337 case BUILT_IN_EXECLP:
6338 case BUILT_IN_EXECLE:
6339 case BUILT_IN_EXECVP:
6340 case BUILT_IN_EXECVE:
6341 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6342 if (target)
6343 return target;
6344 break;
6345
6346 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6347 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6348 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6349 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6351 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6352 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6353 if (target)
6354 return target;
6355 break;
6356
6357 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6358 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6359 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6360 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6362 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6363 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6364 if (target)
6365 return target;
6366 break;
6367
6368 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6369 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6370 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6371 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6372 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6373 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6374 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6375 if (target)
6376 return target;
6377 break;
6378
6379 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6380 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6381 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6382 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6383 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6384 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6385 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6386 if (target)
6387 return target;
6388 break;
6389
6390 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6391 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6392 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6393 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6395 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6396 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6397 if (target)
6398 return target;
6399 break;
6400
6401 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6402 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6403 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6404 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6406 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6407 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6408 if (target)
6409 return target;
6410 break;
6411
6412 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6413 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6414 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6415 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6417 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6418 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6419 if (target)
6420 return target;
6421 break;
6422
6423 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6424 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6425 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6426 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6428 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6429 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6430 if (target)
6431 return target;
6432 break;
6433
6434 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6435 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6436 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6437 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6438 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6439 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6440 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6441 if (target)
6442 return target;
6443 break;
6444
6445 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6446 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6447 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6448 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6449 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6450 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6451 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6452 if (target)
6453 return target;
6454 break;
6455
6456 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6457 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6458 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6459 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6461 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6462 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6463 if (target)
6464 return target;
6465 break;
6466
6467 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6468 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6469 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6470 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6472 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6473 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6474 if (target)
6475 return target;
6476 break;
6477
6478 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6479 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6480 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6481 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6483 if (mode == VOIDmode)
6484 mode = TYPE_MODE (boolean_type_node);
6485 if (!target || !register_operand (target, mode))
6486 target = gen_reg_rtx (mode);
6487
6488 mode = get_builtin_sync_mode
6489 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6490 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6491 if (target)
6492 return target;
6493 break;
6494
6495 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6496 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6497 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6498 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6500 mode = get_builtin_sync_mode
6501 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6502 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6503 if (target)
6504 return target;
6505 break;
6506
6507 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6508 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6509 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6510 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6513 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6514 if (target)
6515 return target;
6516 break;
6517
6518 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6519 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6520 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6521 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6522 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6523 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6524 expand_builtin_sync_lock_release (mode, exp);
6525 return const0_rtx;
6526
6527 case BUILT_IN_SYNC_SYNCHRONIZE:
6528 expand_builtin_sync_synchronize ();
6529 return const0_rtx;
6530
6531 case BUILT_IN_ATOMIC_EXCHANGE_1:
6532 case BUILT_IN_ATOMIC_EXCHANGE_2:
6533 case BUILT_IN_ATOMIC_EXCHANGE_4:
6534 case BUILT_IN_ATOMIC_EXCHANGE_8:
6535 case BUILT_IN_ATOMIC_EXCHANGE_16:
6536 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6537 target = expand_builtin_atomic_exchange (mode, exp, target);
6538 if (target)
6539 return target;
6540 break;
6541
6542 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6543 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6544 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6545 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6547 {
6548 unsigned int nargs, z;
6549 vec<tree, va_gc> *vec;
6550
6551 mode =
6552 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6553 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6554 if (target)
6555 return target;
6556
6557 /* If this is turned into an external library call, the weak parameter
6558 must be dropped to match the expected parameter list. */
6559 nargs = call_expr_nargs (exp);
6560 vec_alloc (vec, nargs - 1);
6561 for (z = 0; z < 3; z++)
6562 vec->quick_push (CALL_EXPR_ARG (exp, z));
6563 /* Skip the boolean weak parameter. */
6564 for (z = 4; z < 6; z++)
6565 vec->quick_push (CALL_EXPR_ARG (exp, z));
6566 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6567 break;
6568 }
6569
6570 case BUILT_IN_ATOMIC_LOAD_1:
6571 case BUILT_IN_ATOMIC_LOAD_2:
6572 case BUILT_IN_ATOMIC_LOAD_4:
6573 case BUILT_IN_ATOMIC_LOAD_8:
6574 case BUILT_IN_ATOMIC_LOAD_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6576 target = expand_builtin_atomic_load (mode, exp, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_ATOMIC_STORE_1:
6582 case BUILT_IN_ATOMIC_STORE_2:
6583 case BUILT_IN_ATOMIC_STORE_4:
6584 case BUILT_IN_ATOMIC_STORE_8:
6585 case BUILT_IN_ATOMIC_STORE_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6587 target = expand_builtin_atomic_store (mode, exp);
6588 if (target)
6589 return const0_rtx;
6590 break;
6591
6592 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6593 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6594 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6595 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6596 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6597 {
6598 enum built_in_function lib;
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6600 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6601 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6602 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6603 ignore, lib);
6604 if (target)
6605 return target;
6606 break;
6607 }
6608 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6609 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6610 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6611 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6612 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6613 {
6614 enum built_in_function lib;
6615 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6616 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6617 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6618 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6619 ignore, lib);
6620 if (target)
6621 return target;
6622 break;
6623 }
6624 case BUILT_IN_ATOMIC_AND_FETCH_1:
6625 case BUILT_IN_ATOMIC_AND_FETCH_2:
6626 case BUILT_IN_ATOMIC_AND_FETCH_4:
6627 case BUILT_IN_ATOMIC_AND_FETCH_8:
6628 case BUILT_IN_ATOMIC_AND_FETCH_16:
6629 {
6630 enum built_in_function lib;
6631 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6632 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6633 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6634 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6635 ignore, lib);
6636 if (target)
6637 return target;
6638 break;
6639 }
6640 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6641 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6642 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6643 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6644 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6645 {
6646 enum built_in_function lib;
6647 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6648 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6649 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6650 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6651 ignore, lib);
6652 if (target)
6653 return target;
6654 break;
6655 }
6656 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6657 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6658 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6659 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6660 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6661 {
6662 enum built_in_function lib;
6663 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6664 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6665 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6666 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6667 ignore, lib);
6668 if (target)
6669 return target;
6670 break;
6671 }
6672 case BUILT_IN_ATOMIC_OR_FETCH_1:
6673 case BUILT_IN_ATOMIC_OR_FETCH_2:
6674 case BUILT_IN_ATOMIC_OR_FETCH_4:
6675 case BUILT_IN_ATOMIC_OR_FETCH_8:
6676 case BUILT_IN_ATOMIC_OR_FETCH_16:
6677 {
6678 enum built_in_function lib;
6679 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6680 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6681 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6682 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6683 ignore, lib);
6684 if (target)
6685 return target;
6686 break;
6687 }
6688 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6689 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6690 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6691 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6692 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6693 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6694 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6695 ignore, BUILT_IN_NONE);
6696 if (target)
6697 return target;
6698 break;
6699
6700 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6701 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6702 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6703 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6704 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6706 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6707 ignore, BUILT_IN_NONE);
6708 if (target)
6709 return target;
6710 break;
6711
6712 case BUILT_IN_ATOMIC_FETCH_AND_1:
6713 case BUILT_IN_ATOMIC_FETCH_AND_2:
6714 case BUILT_IN_ATOMIC_FETCH_AND_4:
6715 case BUILT_IN_ATOMIC_FETCH_AND_8:
6716 case BUILT_IN_ATOMIC_FETCH_AND_16:
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6718 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6719 ignore, BUILT_IN_NONE);
6720 if (target)
6721 return target;
6722 break;
6723
6724 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6725 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6726 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6727 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6728 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6730 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6731 ignore, BUILT_IN_NONE);
6732 if (target)
6733 return target;
6734 break;
6735
6736 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6737 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6738 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6739 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6740 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6741 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6742 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6743 ignore, BUILT_IN_NONE);
6744 if (target)
6745 return target;
6746 break;
6747
6748 case BUILT_IN_ATOMIC_FETCH_OR_1:
6749 case BUILT_IN_ATOMIC_FETCH_OR_2:
6750 case BUILT_IN_ATOMIC_FETCH_OR_4:
6751 case BUILT_IN_ATOMIC_FETCH_OR_8:
6752 case BUILT_IN_ATOMIC_FETCH_OR_16:
6753 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6754 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6755 ignore, BUILT_IN_NONE);
6756 if (target)
6757 return target;
6758 break;
6759
6760 case BUILT_IN_ATOMIC_TEST_AND_SET:
6761 return expand_builtin_atomic_test_and_set (exp, target);
6762
6763 case BUILT_IN_ATOMIC_CLEAR:
6764 return expand_builtin_atomic_clear (exp);
6765
6766 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6767 return expand_builtin_atomic_always_lock_free (exp);
6768
6769 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6770 target = expand_builtin_atomic_is_lock_free (exp);
6771 if (target)
6772 return target;
6773 break;
6774
6775 case BUILT_IN_ATOMIC_THREAD_FENCE:
6776 expand_builtin_atomic_thread_fence (exp);
6777 return const0_rtx;
6778
6779 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6780 expand_builtin_atomic_signal_fence (exp);
6781 return const0_rtx;
6782
6783 case BUILT_IN_OBJECT_SIZE:
6784 return expand_builtin_object_size (exp);
6785
6786 case BUILT_IN_MEMCPY_CHK:
6787 case BUILT_IN_MEMPCPY_CHK:
6788 case BUILT_IN_MEMMOVE_CHK:
6789 case BUILT_IN_MEMSET_CHK:
6790 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6791 if (target)
6792 return target;
6793 break;
6794
6795 case BUILT_IN_STRCPY_CHK:
6796 case BUILT_IN_STPCPY_CHK:
6797 case BUILT_IN_STRNCPY_CHK:
6798 case BUILT_IN_STPNCPY_CHK:
6799 case BUILT_IN_STRCAT_CHK:
6800 case BUILT_IN_STRNCAT_CHK:
6801 case BUILT_IN_SNPRINTF_CHK:
6802 case BUILT_IN_VSNPRINTF_CHK:
6803 maybe_emit_chk_warning (exp, fcode);
6804 break;
6805
6806 case BUILT_IN_SPRINTF_CHK:
6807 case BUILT_IN_VSPRINTF_CHK:
6808 maybe_emit_sprintf_chk_warning (exp, fcode);
6809 break;
6810
6811 case BUILT_IN_FREE:
6812 if (warn_free_nonheap_object)
6813 maybe_emit_free_warning (exp);
6814 break;
6815
6816 case BUILT_IN_THREAD_POINTER:
6817 return expand_builtin_thread_pointer (exp, target);
6818
6819 case BUILT_IN_SET_THREAD_POINTER:
6820 expand_builtin_set_thread_pointer (exp);
6821 return const0_rtx;
6822
6823 case BUILT_IN_CILK_DETACH:
6824 expand_builtin_cilk_detach (exp);
6825 return const0_rtx;
6826
6827 case BUILT_IN_CILK_POP_FRAME:
6828 expand_builtin_cilk_pop_frame (exp);
6829 return const0_rtx;
6830
6831 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6832 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6833 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6834 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6835 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6836 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6837 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6838 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6839 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6840 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6841 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6842 /* We allow user CHKP builtins if Pointer Bounds
6843 Checker is off. */
6844 if (!chkp_function_instrumented_p (current_function_decl))
6845 {
6846 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6847 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6848 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6849 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6850 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6851 return expand_normal (CALL_EXPR_ARG (exp, 0));
6852 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6853 return expand_normal (size_zero_node);
6854 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6855 return expand_normal (size_int (-1));
6856 else
6857 return const0_rtx;
6858 }
6859 /* FALLTHROUGH */
6860
6861 case BUILT_IN_CHKP_BNDMK:
6862 case BUILT_IN_CHKP_BNDSTX:
6863 case BUILT_IN_CHKP_BNDCL:
6864 case BUILT_IN_CHKP_BNDCU:
6865 case BUILT_IN_CHKP_BNDLDX:
6866 case BUILT_IN_CHKP_BNDRET:
6867 case BUILT_IN_CHKP_INTERSECT:
6868 case BUILT_IN_CHKP_NARROW:
6869 case BUILT_IN_CHKP_EXTRACT_LOWER:
6870 case BUILT_IN_CHKP_EXTRACT_UPPER:
6871 /* Software implementation of Pointer Bounds Checker is NYI.
6872 Target support is required. */
6873 error ("Your target platform does not support -fcheck-pointer-bounds");
6874 break;
6875
6876 case BUILT_IN_ACC_ON_DEVICE:
6877 /* Do library call, if we failed to expand the builtin when
6878 folding. */
6879 break;
6880
6881 default: /* just do library call, if unknown builtin */
6882 break;
6883 }
6884
6885 /* The switch statement above can drop through to cause the function
6886 to be called normally. */
6887 return expand_call (exp, target, ignore);
6888 }
6889
6890 /* Similar to expand_builtin but is used for instrumented calls. */
6891
6892 rtx
6893 expand_builtin_with_bounds (tree exp, rtx target,
6894 rtx subtarget ATTRIBUTE_UNUSED,
6895 machine_mode mode, int ignore)
6896 {
6897 tree fndecl = get_callee_fndecl (exp);
6898 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6899
6900 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6901
6902 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6903 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6904
6905 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6906 && fcode < END_CHKP_BUILTINS);
6907
6908 switch (fcode)
6909 {
6910 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6911 target = expand_builtin_memcpy_with_bounds (exp, target);
6912 if (target)
6913 return target;
6914 break;
6915
6916 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6917 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6918 if (target)
6919 return target;
6920 break;
6921
6922 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6923 target = expand_builtin_memset_with_bounds (exp, target, mode);
6924 if (target)
6925 return target;
6926 break;
6927
6928 default:
6929 break;
6930 }
6931
6932 /* The switch statement above can drop through to cause the function
6933 to be called normally. */
6934 return expand_call (exp, target, ignore);
6935 }
6936
6937 /* Determine whether a tree node represents a call to a built-in
6938 function. If the tree T is a call to a built-in function with
6939 the right number of arguments of the appropriate types, return
6940 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6941 Otherwise the return value is END_BUILTINS. */
6942
6943 enum built_in_function
6944 builtin_mathfn_code (const_tree t)
6945 {
6946 const_tree fndecl, arg, parmlist;
6947 const_tree argtype, parmtype;
6948 const_call_expr_arg_iterator iter;
6949
6950 if (TREE_CODE (t) != CALL_EXPR
6951 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6952 return END_BUILTINS;
6953
6954 fndecl = get_callee_fndecl (t);
6955 if (fndecl == NULL_TREE
6956 || TREE_CODE (fndecl) != FUNCTION_DECL
6957 || ! DECL_BUILT_IN (fndecl)
6958 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6959 return END_BUILTINS;
6960
6961 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6962 init_const_call_expr_arg_iterator (t, &iter);
6963 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6964 {
6965 /* If a function doesn't take a variable number of arguments,
6966 the last element in the list will have type `void'. */
6967 parmtype = TREE_VALUE (parmlist);
6968 if (VOID_TYPE_P (parmtype))
6969 {
6970 if (more_const_call_expr_args_p (&iter))
6971 return END_BUILTINS;
6972 return DECL_FUNCTION_CODE (fndecl);
6973 }
6974
6975 if (! more_const_call_expr_args_p (&iter))
6976 return END_BUILTINS;
6977
6978 arg = next_const_call_expr_arg (&iter);
6979 argtype = TREE_TYPE (arg);
6980
6981 if (SCALAR_FLOAT_TYPE_P (parmtype))
6982 {
6983 if (! SCALAR_FLOAT_TYPE_P (argtype))
6984 return END_BUILTINS;
6985 }
6986 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6987 {
6988 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6989 return END_BUILTINS;
6990 }
6991 else if (POINTER_TYPE_P (parmtype))
6992 {
6993 if (! POINTER_TYPE_P (argtype))
6994 return END_BUILTINS;
6995 }
6996 else if (INTEGRAL_TYPE_P (parmtype))
6997 {
6998 if (! INTEGRAL_TYPE_P (argtype))
6999 return END_BUILTINS;
7000 }
7001 else
7002 return END_BUILTINS;
7003 }
7004
7005 /* Variable-length argument list. */
7006 return DECL_FUNCTION_CODE (fndecl);
7007 }
7008
7009 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7010 evaluate to a constant. */
7011
7012 static tree
7013 fold_builtin_constant_p (tree arg)
7014 {
7015 /* We return 1 for a numeric type that's known to be a constant
7016 value at compile-time or for an aggregate type that's a
7017 literal constant. */
7018 STRIP_NOPS (arg);
7019
7020 /* If we know this is a constant, emit the constant of one. */
7021 if (CONSTANT_CLASS_P (arg)
7022 || (TREE_CODE (arg) == CONSTRUCTOR
7023 && TREE_CONSTANT (arg)))
7024 return integer_one_node;
7025 if (TREE_CODE (arg) == ADDR_EXPR)
7026 {
7027 tree op = TREE_OPERAND (arg, 0);
7028 if (TREE_CODE (op) == STRING_CST
7029 || (TREE_CODE (op) == ARRAY_REF
7030 && integer_zerop (TREE_OPERAND (op, 1))
7031 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7032 return integer_one_node;
7033 }
7034
7035 /* If this expression has side effects, show we don't know it to be a
7036 constant. Likewise if it's a pointer or aggregate type since in
7037 those case we only want literals, since those are only optimized
7038 when generating RTL, not later.
7039 And finally, if we are compiling an initializer, not code, we
7040 need to return a definite result now; there's not going to be any
7041 more optimization done. */
7042 if (TREE_SIDE_EFFECTS (arg)
7043 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7044 || POINTER_TYPE_P (TREE_TYPE (arg))
7045 || cfun == 0
7046 || folding_initializer
7047 || force_folding_builtin_constant_p)
7048 return integer_zero_node;
7049
7050 return NULL_TREE;
7051 }
7052
7053 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7054 return it as a truthvalue. */
7055
7056 static tree
7057 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7058 tree predictor)
7059 {
7060 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7061
7062 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7063 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7064 ret_type = TREE_TYPE (TREE_TYPE (fn));
7065 pred_type = TREE_VALUE (arg_types);
7066 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7067
7068 pred = fold_convert_loc (loc, pred_type, pred);
7069 expected = fold_convert_loc (loc, expected_type, expected);
7070 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7071 predictor);
7072
7073 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7074 build_int_cst (ret_type, 0));
7075 }
7076
7077 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7078 NULL_TREE if no simplification is possible. */
7079
7080 tree
7081 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7082 {
7083 tree inner, fndecl, inner_arg0;
7084 enum tree_code code;
7085
7086 /* Distribute the expected value over short-circuiting operators.
7087 See through the cast from truthvalue_type_node to long. */
7088 inner_arg0 = arg0;
7089 while (CONVERT_EXPR_P (inner_arg0)
7090 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7091 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7092 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7093
7094 /* If this is a builtin_expect within a builtin_expect keep the
7095 inner one. See through a comparison against a constant. It
7096 might have been added to create a thruthvalue. */
7097 inner = inner_arg0;
7098
7099 if (COMPARISON_CLASS_P (inner)
7100 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7101 inner = TREE_OPERAND (inner, 0);
7102
7103 if (TREE_CODE (inner) == CALL_EXPR
7104 && (fndecl = get_callee_fndecl (inner))
7105 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7106 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7107 return arg0;
7108
7109 inner = inner_arg0;
7110 code = TREE_CODE (inner);
7111 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7112 {
7113 tree op0 = TREE_OPERAND (inner, 0);
7114 tree op1 = TREE_OPERAND (inner, 1);
7115
7116 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7117 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7118 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7119
7120 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7121 }
7122
7123 /* If the argument isn't invariant then there's nothing else we can do. */
7124 if (!TREE_CONSTANT (inner_arg0))
7125 return NULL_TREE;
7126
7127 /* If we expect that a comparison against the argument will fold to
7128 a constant return the constant. In practice, this means a true
7129 constant or the address of a non-weak symbol. */
7130 inner = inner_arg0;
7131 STRIP_NOPS (inner);
7132 if (TREE_CODE (inner) == ADDR_EXPR)
7133 {
7134 do
7135 {
7136 inner = TREE_OPERAND (inner, 0);
7137 }
7138 while (TREE_CODE (inner) == COMPONENT_REF
7139 || TREE_CODE (inner) == ARRAY_REF);
7140 if ((TREE_CODE (inner) == VAR_DECL
7141 || TREE_CODE (inner) == FUNCTION_DECL)
7142 && DECL_WEAK (inner))
7143 return NULL_TREE;
7144 }
7145
7146 /* Otherwise, ARG0 already has the proper type for the return value. */
7147 return arg0;
7148 }
7149
7150 /* Fold a call to __builtin_classify_type with argument ARG. */
7151
7152 static tree
7153 fold_builtin_classify_type (tree arg)
7154 {
7155 if (arg == 0)
7156 return build_int_cst (integer_type_node, no_type_class);
7157
7158 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7159 }
7160
7161 /* Fold a call to __builtin_strlen with argument ARG. */
7162
7163 static tree
7164 fold_builtin_strlen (location_t loc, tree type, tree arg)
7165 {
7166 if (!validate_arg (arg, POINTER_TYPE))
7167 return NULL_TREE;
7168 else
7169 {
7170 tree len = c_strlen (arg, 0);
7171
7172 if (len)
7173 return fold_convert_loc (loc, type, len);
7174
7175 return NULL_TREE;
7176 }
7177 }
7178
7179 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7180
7181 static tree
7182 fold_builtin_inf (location_t loc, tree type, int warn)
7183 {
7184 REAL_VALUE_TYPE real;
7185
7186 /* __builtin_inff is intended to be usable to define INFINITY on all
7187 targets. If an infinity is not available, INFINITY expands "to a
7188 positive constant of type float that overflows at translation
7189 time", footnote "In this case, using INFINITY will violate the
7190 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7191 Thus we pedwarn to ensure this constraint violation is
7192 diagnosed. */
7193 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7194 pedwarn (loc, 0, "target format does not support infinity");
7195
7196 real_inf (&real);
7197 return build_real (type, real);
7198 }
7199
7200 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7201 NULL_TREE if no simplification can be made. */
7202
7203 static tree
7204 fold_builtin_sincos (location_t loc,
7205 tree arg0, tree arg1, tree arg2)
7206 {
7207 tree type;
7208 tree fndecl, call = NULL_TREE;
7209
7210 if (!validate_arg (arg0, REAL_TYPE)
7211 || !validate_arg (arg1, POINTER_TYPE)
7212 || !validate_arg (arg2, POINTER_TYPE))
7213 return NULL_TREE;
7214
7215 type = TREE_TYPE (arg0);
7216
7217 /* Calculate the result when the argument is a constant. */
7218 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7219 if (fn == END_BUILTINS)
7220 return NULL_TREE;
7221
7222 /* Canonicalize sincos to cexpi. */
7223 if (TREE_CODE (arg0) == REAL_CST)
7224 {
7225 tree complex_type = build_complex_type (type);
7226 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7227 }
7228 if (!call)
7229 {
7230 if (!targetm.libc_has_function (function_c99_math_complex)
7231 || !builtin_decl_implicit_p (fn))
7232 return NULL_TREE;
7233 fndecl = builtin_decl_explicit (fn);
7234 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7235 call = builtin_save_expr (call);
7236 }
7237
7238 return build2 (COMPOUND_EXPR, void_type_node,
7239 build2 (MODIFY_EXPR, void_type_node,
7240 build_fold_indirect_ref_loc (loc, arg1),
7241 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7242 build2 (MODIFY_EXPR, void_type_node,
7243 build_fold_indirect_ref_loc (loc, arg2),
7244 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7245 }
7246
7247 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7248 arguments to the call, and TYPE is its return type.
7249 Return NULL_TREE if no simplification can be made. */
7250
7251 static tree
7252 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7253 {
7254 if (!validate_arg (arg1, POINTER_TYPE)
7255 || !validate_arg (arg2, INTEGER_TYPE)
7256 || !validate_arg (len, INTEGER_TYPE))
7257 return NULL_TREE;
7258 else
7259 {
7260 const char *p1;
7261
7262 if (TREE_CODE (arg2) != INTEGER_CST
7263 || !tree_fits_uhwi_p (len))
7264 return NULL_TREE;
7265
7266 p1 = c_getstr (arg1);
7267 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7268 {
7269 char c;
7270 const char *r;
7271 tree tem;
7272
7273 if (target_char_cast (arg2, &c))
7274 return NULL_TREE;
7275
7276 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7277
7278 if (r == NULL)
7279 return build_int_cst (TREE_TYPE (arg1), 0);
7280
7281 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7282 return fold_convert_loc (loc, type, tem);
7283 }
7284 return NULL_TREE;
7285 }
7286 }
7287
7288 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7289 Return NULL_TREE if no simplification can be made. */
7290
7291 static tree
7292 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7293 {
7294 if (!validate_arg (arg1, POINTER_TYPE)
7295 || !validate_arg (arg2, POINTER_TYPE)
7296 || !validate_arg (len, INTEGER_TYPE))
7297 return NULL_TREE;
7298
7299 /* If the LEN parameter is zero, return zero. */
7300 if (integer_zerop (len))
7301 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7302 arg1, arg2);
7303
7304 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7305 if (operand_equal_p (arg1, arg2, 0))
7306 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7307
7308 /* If len parameter is one, return an expression corresponding to
7309 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7310 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7311 {
7312 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7313 tree cst_uchar_ptr_node
7314 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7315
7316 tree ind1
7317 = fold_convert_loc (loc, integer_type_node,
7318 build1 (INDIRECT_REF, cst_uchar_node,
7319 fold_convert_loc (loc,
7320 cst_uchar_ptr_node,
7321 arg1)));
7322 tree ind2
7323 = fold_convert_loc (loc, integer_type_node,
7324 build1 (INDIRECT_REF, cst_uchar_node,
7325 fold_convert_loc (loc,
7326 cst_uchar_ptr_node,
7327 arg2)));
7328 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7329 }
7330
7331 return NULL_TREE;
7332 }
7333
7334 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7335 Return NULL_TREE if no simplification can be made. */
7336
7337 static tree
7338 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7339 {
7340 if (!validate_arg (arg1, POINTER_TYPE)
7341 || !validate_arg (arg2, POINTER_TYPE))
7342 return NULL_TREE;
7343
7344 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7345 if (operand_equal_p (arg1, arg2, 0))
7346 return integer_zero_node;
7347
7348 /* If the second arg is "", return *(const unsigned char*)arg1. */
7349 const char *p2 = c_getstr (arg2);
7350 if (p2 && *p2 == '\0')
7351 {
7352 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7353 tree cst_uchar_ptr_node
7354 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7355
7356 return fold_convert_loc (loc, integer_type_node,
7357 build1 (INDIRECT_REF, cst_uchar_node,
7358 fold_convert_loc (loc,
7359 cst_uchar_ptr_node,
7360 arg1)));
7361 }
7362
7363 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7364 const char *p1 = c_getstr (arg1);
7365 if (p1 && *p1 == '\0')
7366 {
7367 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7368 tree cst_uchar_ptr_node
7369 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7370
7371 tree temp
7372 = fold_convert_loc (loc, integer_type_node,
7373 build1 (INDIRECT_REF, cst_uchar_node,
7374 fold_convert_loc (loc,
7375 cst_uchar_ptr_node,
7376 arg2)));
7377 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7378 }
7379
7380 return NULL_TREE;
7381 }
7382
7383 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7384 Return NULL_TREE if no simplification can be made. */
7385
7386 static tree
7387 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7388 {
7389 if (!validate_arg (arg1, POINTER_TYPE)
7390 || !validate_arg (arg2, POINTER_TYPE)
7391 || !validate_arg (len, INTEGER_TYPE))
7392 return NULL_TREE;
7393
7394 /* If the LEN parameter is zero, return zero. */
7395 if (integer_zerop (len))
7396 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7397 arg1, arg2);
7398
7399 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7400 if (operand_equal_p (arg1, arg2, 0))
7401 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7402
7403 /* If the second arg is "", and the length is greater than zero,
7404 return *(const unsigned char*)arg1. */
7405 const char *p2 = c_getstr (arg2);
7406 if (p2 && *p2 == '\0'
7407 && TREE_CODE (len) == INTEGER_CST
7408 && tree_int_cst_sgn (len) == 1)
7409 {
7410 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7411 tree cst_uchar_ptr_node
7412 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7413
7414 return fold_convert_loc (loc, integer_type_node,
7415 build1 (INDIRECT_REF, cst_uchar_node,
7416 fold_convert_loc (loc,
7417 cst_uchar_ptr_node,
7418 arg1)));
7419 }
7420
7421 /* If the first arg is "", and the length is greater than zero,
7422 return -*(const unsigned char*)arg2. */
7423 const char *p1 = c_getstr (arg1);
7424 if (p1 && *p1 == '\0'
7425 && TREE_CODE (len) == INTEGER_CST
7426 && tree_int_cst_sgn (len) == 1)
7427 {
7428 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7429 tree cst_uchar_ptr_node
7430 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7431
7432 tree temp = fold_convert_loc (loc, integer_type_node,
7433 build1 (INDIRECT_REF, cst_uchar_node,
7434 fold_convert_loc (loc,
7435 cst_uchar_ptr_node,
7436 arg2)));
7437 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7438 }
7439
7440 /* If len parameter is one, return an expression corresponding to
7441 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7442 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7443 {
7444 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7445 tree cst_uchar_ptr_node
7446 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7447
7448 tree ind1 = fold_convert_loc (loc, integer_type_node,
7449 build1 (INDIRECT_REF, cst_uchar_node,
7450 fold_convert_loc (loc,
7451 cst_uchar_ptr_node,
7452 arg1)));
7453 tree ind2 = fold_convert_loc (loc, integer_type_node,
7454 build1 (INDIRECT_REF, cst_uchar_node,
7455 fold_convert_loc (loc,
7456 cst_uchar_ptr_node,
7457 arg2)));
7458 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7459 }
7460
7461 return NULL_TREE;
7462 }
7463
7464 /* Fold a call to builtin isascii with argument ARG. */
7465
7466 static tree
7467 fold_builtin_isascii (location_t loc, tree arg)
7468 {
7469 if (!validate_arg (arg, INTEGER_TYPE))
7470 return NULL_TREE;
7471 else
7472 {
7473 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7474 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7475 build_int_cst (integer_type_node,
7476 ~ (unsigned HOST_WIDE_INT) 0x7f));
7477 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7478 arg, integer_zero_node);
7479 }
7480 }
7481
7482 /* Fold a call to builtin toascii with argument ARG. */
7483
7484 static tree
7485 fold_builtin_toascii (location_t loc, tree arg)
7486 {
7487 if (!validate_arg (arg, INTEGER_TYPE))
7488 return NULL_TREE;
7489
7490 /* Transform toascii(c) -> (c & 0x7f). */
7491 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7492 build_int_cst (integer_type_node, 0x7f));
7493 }
7494
7495 /* Fold a call to builtin isdigit with argument ARG. */
7496
7497 static tree
7498 fold_builtin_isdigit (location_t loc, tree arg)
7499 {
7500 if (!validate_arg (arg, INTEGER_TYPE))
7501 return NULL_TREE;
7502 else
7503 {
7504 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7505 /* According to the C standard, isdigit is unaffected by locale.
7506 However, it definitely is affected by the target character set. */
7507 unsigned HOST_WIDE_INT target_digit0
7508 = lang_hooks.to_target_charset ('0');
7509
7510 if (target_digit0 == 0)
7511 return NULL_TREE;
7512
7513 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7514 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7515 build_int_cst (unsigned_type_node, target_digit0));
7516 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7517 build_int_cst (unsigned_type_node, 9));
7518 }
7519 }
7520
7521 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7522
7523 static tree
7524 fold_builtin_fabs (location_t loc, tree arg, tree type)
7525 {
7526 if (!validate_arg (arg, REAL_TYPE))
7527 return NULL_TREE;
7528
7529 arg = fold_convert_loc (loc, type, arg);
7530 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7531 }
7532
7533 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7534
7535 static tree
7536 fold_builtin_abs (location_t loc, tree arg, tree type)
7537 {
7538 if (!validate_arg (arg, INTEGER_TYPE))
7539 return NULL_TREE;
7540
7541 arg = fold_convert_loc (loc, type, arg);
7542 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7543 }
7544
7545 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7546
7547 static tree
7548 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7549 {
7550 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7551 if (validate_arg (arg0, REAL_TYPE)
7552 && validate_arg (arg1, REAL_TYPE)
7553 && validate_arg (arg2, REAL_TYPE)
7554 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7555 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7556
7557 return NULL_TREE;
7558 }
7559
7560 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7561
7562 static tree
7563 fold_builtin_carg (location_t loc, tree arg, tree type)
7564 {
7565 if (validate_arg (arg, COMPLEX_TYPE)
7566 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7567 {
7568 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7569
7570 if (atan2_fn)
7571 {
7572 tree new_arg = builtin_save_expr (arg);
7573 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7574 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7575 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7576 }
7577 }
7578
7579 return NULL_TREE;
7580 }
7581
7582 /* Fold a call to builtin frexp, we can assume the base is 2. */
7583
7584 static tree
7585 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7586 {
7587 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7588 return NULL_TREE;
7589
7590 STRIP_NOPS (arg0);
7591
7592 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7593 return NULL_TREE;
7594
7595 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7596
7597 /* Proceed if a valid pointer type was passed in. */
7598 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7599 {
7600 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7601 tree frac, exp;
7602
7603 switch (value->cl)
7604 {
7605 case rvc_zero:
7606 /* For +-0, return (*exp = 0, +-0). */
7607 exp = integer_zero_node;
7608 frac = arg0;
7609 break;
7610 case rvc_nan:
7611 case rvc_inf:
7612 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7613 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7614 case rvc_normal:
7615 {
7616 /* Since the frexp function always expects base 2, and in
7617 GCC normalized significands are already in the range
7618 [0.5, 1.0), we have exactly what frexp wants. */
7619 REAL_VALUE_TYPE frac_rvt = *value;
7620 SET_REAL_EXP (&frac_rvt, 0);
7621 frac = build_real (rettype, frac_rvt);
7622 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7623 }
7624 break;
7625 default:
7626 gcc_unreachable ();
7627 }
7628
7629 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7630 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7631 TREE_SIDE_EFFECTS (arg1) = 1;
7632 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7633 }
7634
7635 return NULL_TREE;
7636 }
7637
7638 /* Fold a call to builtin modf. */
7639
7640 static tree
7641 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7642 {
7643 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7644 return NULL_TREE;
7645
7646 STRIP_NOPS (arg0);
7647
7648 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7649 return NULL_TREE;
7650
7651 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7652
7653 /* Proceed if a valid pointer type was passed in. */
7654 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7655 {
7656 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7657 REAL_VALUE_TYPE trunc, frac;
7658
7659 switch (value->cl)
7660 {
7661 case rvc_nan:
7662 case rvc_zero:
7663 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7664 trunc = frac = *value;
7665 break;
7666 case rvc_inf:
7667 /* For +-Inf, return (*arg1 = arg0, +-0). */
7668 frac = dconst0;
7669 frac.sign = value->sign;
7670 trunc = *value;
7671 break;
7672 case rvc_normal:
7673 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7674 real_trunc (&trunc, VOIDmode, value);
7675 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7676 /* If the original number was negative and already
7677 integral, then the fractional part is -0.0. */
7678 if (value->sign && frac.cl == rvc_zero)
7679 frac.sign = value->sign;
7680 break;
7681 }
7682
7683 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7684 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7685 build_real (rettype, trunc));
7686 TREE_SIDE_EFFECTS (arg1) = 1;
7687 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7688 build_real (rettype, frac));
7689 }
7690
7691 return NULL_TREE;
7692 }
7693
7694 /* Given a location LOC, an interclass builtin function decl FNDECL
7695 and its single argument ARG, return an folded expression computing
7696 the same, or NULL_TREE if we either couldn't or didn't want to fold
7697 (the latter happen if there's an RTL instruction available). */
7698
7699 static tree
7700 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7701 {
7702 machine_mode mode;
7703
7704 if (!validate_arg (arg, REAL_TYPE))
7705 return NULL_TREE;
7706
7707 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7708 return NULL_TREE;
7709
7710 mode = TYPE_MODE (TREE_TYPE (arg));
7711
7712 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7713
7714 /* If there is no optab, try generic code. */
7715 switch (DECL_FUNCTION_CODE (fndecl))
7716 {
7717 tree result;
7718
7719 CASE_FLT_FN (BUILT_IN_ISINF):
7720 {
7721 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7722 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7723 tree type = TREE_TYPE (arg);
7724 REAL_VALUE_TYPE r;
7725 char buf[128];
7726
7727 if (is_ibm_extended)
7728 {
7729 /* NaN and Inf are encoded in the high-order double value
7730 only. The low-order value is not significant. */
7731 type = double_type_node;
7732 mode = DFmode;
7733 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7734 }
7735 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7736 real_from_string (&r, buf);
7737 result = build_call_expr (isgr_fn, 2,
7738 fold_build1_loc (loc, ABS_EXPR, type, arg),
7739 build_real (type, r));
7740 return result;
7741 }
7742 CASE_FLT_FN (BUILT_IN_FINITE):
7743 case BUILT_IN_ISFINITE:
7744 {
7745 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7746 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7747 tree type = TREE_TYPE (arg);
7748 REAL_VALUE_TYPE r;
7749 char buf[128];
7750
7751 if (is_ibm_extended)
7752 {
7753 /* NaN and Inf are encoded in the high-order double value
7754 only. The low-order value is not significant. */
7755 type = double_type_node;
7756 mode = DFmode;
7757 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7758 }
7759 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7760 real_from_string (&r, buf);
7761 result = build_call_expr (isle_fn, 2,
7762 fold_build1_loc (loc, ABS_EXPR, type, arg),
7763 build_real (type, r));
7764 /*result = fold_build2_loc (loc, UNGT_EXPR,
7765 TREE_TYPE (TREE_TYPE (fndecl)),
7766 fold_build1_loc (loc, ABS_EXPR, type, arg),
7767 build_real (type, r));
7768 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7769 TREE_TYPE (TREE_TYPE (fndecl)),
7770 result);*/
7771 return result;
7772 }
7773 case BUILT_IN_ISNORMAL:
7774 {
7775 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7776 islessequal(fabs(x),DBL_MAX). */
7777 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7778 tree type = TREE_TYPE (arg);
7779 tree orig_arg, max_exp, min_exp;
7780 machine_mode orig_mode = mode;
7781 REAL_VALUE_TYPE rmax, rmin;
7782 char buf[128];
7783
7784 orig_arg = arg = builtin_save_expr (arg);
7785 if (is_ibm_extended)
7786 {
7787 /* Use double to test the normal range of IBM extended
7788 precision. Emin for IBM extended precision is
7789 different to emin for IEEE double, being 53 higher
7790 since the low double exponent is at least 53 lower
7791 than the high double exponent. */
7792 type = double_type_node;
7793 mode = DFmode;
7794 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7795 }
7796 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7797
7798 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7799 real_from_string (&rmax, buf);
7800 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7801 real_from_string (&rmin, buf);
7802 max_exp = build_real (type, rmax);
7803 min_exp = build_real (type, rmin);
7804
7805 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7806 if (is_ibm_extended)
7807 {
7808 /* Testing the high end of the range is done just using
7809 the high double, using the same test as isfinite().
7810 For the subnormal end of the range we first test the
7811 high double, then if its magnitude is equal to the
7812 limit of 0x1p-969, we test whether the low double is
7813 non-zero and opposite sign to the high double. */
7814 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7815 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7816 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7817 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7818 arg, min_exp);
7819 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7820 complex_double_type_node, orig_arg);
7821 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7822 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7823 tree zero = build_real (type, dconst0);
7824 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7825 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7826 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7827 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7828 fold_build3 (COND_EXPR,
7829 integer_type_node,
7830 hilt, logt, lolt));
7831 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7832 eq_min, ok_lo);
7833 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7834 gt_min, eq_min);
7835 }
7836 else
7837 {
7838 tree const isge_fn
7839 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7840 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7841 }
7842 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7843 max_exp, min_exp);
7844 return result;
7845 }
7846 default:
7847 break;
7848 }
7849
7850 return NULL_TREE;
7851 }
7852
7853 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7854 ARG is the argument for the call. */
7855
7856 static tree
7857 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7858 {
7859 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7860
7861 if (!validate_arg (arg, REAL_TYPE))
7862 return NULL_TREE;
7863
7864 switch (builtin_index)
7865 {
7866 case BUILT_IN_ISINF:
7867 if (!HONOR_INFINITIES (arg))
7868 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7869
7870 return NULL_TREE;
7871
7872 case BUILT_IN_ISINF_SIGN:
7873 {
7874 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7875 /* In a boolean context, GCC will fold the inner COND_EXPR to
7876 1. So e.g. "if (isinf_sign(x))" would be folded to just
7877 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7878 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7879 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7880 tree tmp = NULL_TREE;
7881
7882 arg = builtin_save_expr (arg);
7883
7884 if (signbit_fn && isinf_fn)
7885 {
7886 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7887 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7888
7889 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7890 signbit_call, integer_zero_node);
7891 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7892 isinf_call, integer_zero_node);
7893
7894 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7895 integer_minus_one_node, integer_one_node);
7896 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7897 isinf_call, tmp,
7898 integer_zero_node);
7899 }
7900
7901 return tmp;
7902 }
7903
7904 case BUILT_IN_ISFINITE:
7905 if (!HONOR_NANS (arg)
7906 && !HONOR_INFINITIES (arg))
7907 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7908
7909 return NULL_TREE;
7910
7911 case BUILT_IN_ISNAN:
7912 if (!HONOR_NANS (arg))
7913 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7914
7915 {
7916 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7917 if (is_ibm_extended)
7918 {
7919 /* NaN and Inf are encoded in the high-order double value
7920 only. The low-order value is not significant. */
7921 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7922 }
7923 }
7924 arg = builtin_save_expr (arg);
7925 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7926
7927 default:
7928 gcc_unreachable ();
7929 }
7930 }
7931
7932 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7933 This builtin will generate code to return the appropriate floating
7934 point classification depending on the value of the floating point
7935 number passed in. The possible return values must be supplied as
7936 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7937 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7938 one floating point argument which is "type generic". */
7939
7940 static tree
7941 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7942 {
7943 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7944 arg, type, res, tmp;
7945 machine_mode mode;
7946 REAL_VALUE_TYPE r;
7947 char buf[128];
7948
7949 /* Verify the required arguments in the original call. */
7950 if (nargs != 6
7951 || !validate_arg (args[0], INTEGER_TYPE)
7952 || !validate_arg (args[1], INTEGER_TYPE)
7953 || !validate_arg (args[2], INTEGER_TYPE)
7954 || !validate_arg (args[3], INTEGER_TYPE)
7955 || !validate_arg (args[4], INTEGER_TYPE)
7956 || !validate_arg (args[5], REAL_TYPE))
7957 return NULL_TREE;
7958
7959 fp_nan = args[0];
7960 fp_infinite = args[1];
7961 fp_normal = args[2];
7962 fp_subnormal = args[3];
7963 fp_zero = args[4];
7964 arg = args[5];
7965 type = TREE_TYPE (arg);
7966 mode = TYPE_MODE (type);
7967 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7968
7969 /* fpclassify(x) ->
7970 isnan(x) ? FP_NAN :
7971 (fabs(x) == Inf ? FP_INFINITE :
7972 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7973 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7974
7975 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7976 build_real (type, dconst0));
7977 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7978 tmp, fp_zero, fp_subnormal);
7979
7980 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7981 real_from_string (&r, buf);
7982 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7983 arg, build_real (type, r));
7984 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7985
7986 if (HONOR_INFINITIES (mode))
7987 {
7988 real_inf (&r);
7989 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7990 build_real (type, r));
7991 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7992 fp_infinite, res);
7993 }
7994
7995 if (HONOR_NANS (mode))
7996 {
7997 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7998 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7999 }
8000
8001 return res;
8002 }
8003
8004 /* Fold a call to an unordered comparison function such as
8005 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8006 being called and ARG0 and ARG1 are the arguments for the call.
8007 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8008 the opposite of the desired result. UNORDERED_CODE is used
8009 for modes that can hold NaNs and ORDERED_CODE is used for
8010 the rest. */
8011
8012 static tree
8013 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8014 enum tree_code unordered_code,
8015 enum tree_code ordered_code)
8016 {
8017 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8018 enum tree_code code;
8019 tree type0, type1;
8020 enum tree_code code0, code1;
8021 tree cmp_type = NULL_TREE;
8022
8023 type0 = TREE_TYPE (arg0);
8024 type1 = TREE_TYPE (arg1);
8025
8026 code0 = TREE_CODE (type0);
8027 code1 = TREE_CODE (type1);
8028
8029 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8030 /* Choose the wider of two real types. */
8031 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8032 ? type0 : type1;
8033 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8034 cmp_type = type0;
8035 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8036 cmp_type = type1;
8037
8038 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8039 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8040
8041 if (unordered_code == UNORDERED_EXPR)
8042 {
8043 if (!HONOR_NANS (arg0))
8044 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8045 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8046 }
8047
8048 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8049 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8050 fold_build2_loc (loc, code, type, arg0, arg1));
8051 }
8052
8053 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8054 arithmetics if it can never overflow, or into internal functions that
8055 return both result of arithmetics and overflowed boolean flag in
8056 a complex integer result, or some other check for overflow.
8057 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8058 checking part of that. */
8059
8060 static tree
8061 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8062 tree arg0, tree arg1, tree arg2)
8063 {
8064 enum internal_fn ifn = IFN_LAST;
8065 /* The code of the expression corresponding to the type-generic
8066 built-in, or ERROR_MARK for the type-specific ones. */
8067 enum tree_code opcode = ERROR_MARK;
8068 bool ovf_only = false;
8069
8070 switch (fcode)
8071 {
8072 case BUILT_IN_ADD_OVERFLOW_P:
8073 ovf_only = true;
8074 /* FALLTHRU */
8075 case BUILT_IN_ADD_OVERFLOW:
8076 opcode = PLUS_EXPR;
8077 /* FALLTHRU */
8078 case BUILT_IN_SADD_OVERFLOW:
8079 case BUILT_IN_SADDL_OVERFLOW:
8080 case BUILT_IN_SADDLL_OVERFLOW:
8081 case BUILT_IN_UADD_OVERFLOW:
8082 case BUILT_IN_UADDL_OVERFLOW:
8083 case BUILT_IN_UADDLL_OVERFLOW:
8084 ifn = IFN_ADD_OVERFLOW;
8085 break;
8086 case BUILT_IN_SUB_OVERFLOW_P:
8087 ovf_only = true;
8088 /* FALLTHRU */
8089 case BUILT_IN_SUB_OVERFLOW:
8090 opcode = MINUS_EXPR;
8091 /* FALLTHRU */
8092 case BUILT_IN_SSUB_OVERFLOW:
8093 case BUILT_IN_SSUBL_OVERFLOW:
8094 case BUILT_IN_SSUBLL_OVERFLOW:
8095 case BUILT_IN_USUB_OVERFLOW:
8096 case BUILT_IN_USUBL_OVERFLOW:
8097 case BUILT_IN_USUBLL_OVERFLOW:
8098 ifn = IFN_SUB_OVERFLOW;
8099 break;
8100 case BUILT_IN_MUL_OVERFLOW_P:
8101 ovf_only = true;
8102 /* FALLTHRU */
8103 case BUILT_IN_MUL_OVERFLOW:
8104 opcode = MULT_EXPR;
8105 /* FALLTHRU */
8106 case BUILT_IN_SMUL_OVERFLOW:
8107 case BUILT_IN_SMULL_OVERFLOW:
8108 case BUILT_IN_SMULLL_OVERFLOW:
8109 case BUILT_IN_UMUL_OVERFLOW:
8110 case BUILT_IN_UMULL_OVERFLOW:
8111 case BUILT_IN_UMULLL_OVERFLOW:
8112 ifn = IFN_MUL_OVERFLOW;
8113 break;
8114 default:
8115 gcc_unreachable ();
8116 }
8117
8118 /* For the "generic" overloads, the first two arguments can have different
8119 types and the last argument determines the target type to use to check
8120 for overflow. The arguments of the other overloads all have the same
8121 type. */
8122 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8123
8124 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8125 arguments are constant, attempt to fold the built-in call into a constant
8126 expression indicating whether or not it detected an overflow. */
8127 if (ovf_only
8128 && TREE_CODE (arg0) == INTEGER_CST
8129 && TREE_CODE (arg1) == INTEGER_CST)
8130 /* Perform the computation in the target type and check for overflow. */
8131 return omit_one_operand_loc (loc, boolean_type_node,
8132 arith_overflowed_p (opcode, type, arg0, arg1)
8133 ? boolean_true_node : boolean_false_node,
8134 arg2);
8135
8136 tree ctype = build_complex_type (type);
8137 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8138 2, arg0, arg1);
8139 tree tgt = save_expr (call);
8140 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8141 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8142 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8143
8144 if (ovf_only)
8145 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8146
8147 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8148 tree store
8149 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8150 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8151 }
8152
8153 /* Fold a call to __builtin_FILE to a constant string. */
8154
8155 static inline tree
8156 fold_builtin_FILE (location_t loc)
8157 {
8158 if (const char *fname = LOCATION_FILE (loc))
8159 return build_string_literal (strlen (fname) + 1, fname);
8160
8161 return build_string_literal (1, "");
8162 }
8163
8164 /* Fold a call to __builtin_FUNCTION to a constant string. */
8165
8166 static inline tree
8167 fold_builtin_FUNCTION ()
8168 {
8169 if (current_function_decl)
8170 {
8171 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8172 return build_string_literal (strlen (name) + 1, name);
8173 }
8174
8175 return build_string_literal (1, "");
8176 }
8177
8178 /* Fold a call to __builtin_LINE to an integer constant. */
8179
8180 static inline tree
8181 fold_builtin_LINE (location_t loc, tree type)
8182 {
8183 return build_int_cst (type, LOCATION_LINE (loc));
8184 }
8185
8186 /* Fold a call to built-in function FNDECL with 0 arguments.
8187 This function returns NULL_TREE if no simplification was possible. */
8188
8189 static tree
8190 fold_builtin_0 (location_t loc, tree fndecl)
8191 {
8192 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8193 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8194 switch (fcode)
8195 {
8196 case BUILT_IN_FILE:
8197 return fold_builtin_FILE (loc);
8198
8199 case BUILT_IN_FUNCTION:
8200 return fold_builtin_FUNCTION ();
8201
8202 case BUILT_IN_LINE:
8203 return fold_builtin_LINE (loc, type);
8204
8205 CASE_FLT_FN (BUILT_IN_INF):
8206 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8207 case BUILT_IN_INFD32:
8208 case BUILT_IN_INFD64:
8209 case BUILT_IN_INFD128:
8210 return fold_builtin_inf (loc, type, true);
8211
8212 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8213 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8214 return fold_builtin_inf (loc, type, false);
8215
8216 case BUILT_IN_CLASSIFY_TYPE:
8217 return fold_builtin_classify_type (NULL_TREE);
8218
8219 default:
8220 break;
8221 }
8222 return NULL_TREE;
8223 }
8224
8225 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8226 This function returns NULL_TREE if no simplification was possible. */
8227
8228 static tree
8229 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8230 {
8231 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8232 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8233
8234 if (TREE_CODE (arg0) == ERROR_MARK)
8235 return NULL_TREE;
8236
8237 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8238 return ret;
8239
8240 switch (fcode)
8241 {
8242 case BUILT_IN_CONSTANT_P:
8243 {
8244 tree val = fold_builtin_constant_p (arg0);
8245
8246 /* Gimplification will pull the CALL_EXPR for the builtin out of
8247 an if condition. When not optimizing, we'll not CSE it back.
8248 To avoid link error types of regressions, return false now. */
8249 if (!val && !optimize)
8250 val = integer_zero_node;
8251
8252 return val;
8253 }
8254
8255 case BUILT_IN_CLASSIFY_TYPE:
8256 return fold_builtin_classify_type (arg0);
8257
8258 case BUILT_IN_STRLEN:
8259 return fold_builtin_strlen (loc, type, arg0);
8260
8261 CASE_FLT_FN (BUILT_IN_FABS):
8262 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8263 case BUILT_IN_FABSD32:
8264 case BUILT_IN_FABSD64:
8265 case BUILT_IN_FABSD128:
8266 return fold_builtin_fabs (loc, arg0, type);
8267
8268 case BUILT_IN_ABS:
8269 case BUILT_IN_LABS:
8270 case BUILT_IN_LLABS:
8271 case BUILT_IN_IMAXABS:
8272 return fold_builtin_abs (loc, arg0, type);
8273
8274 CASE_FLT_FN (BUILT_IN_CONJ):
8275 if (validate_arg (arg0, COMPLEX_TYPE)
8276 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8277 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8278 break;
8279
8280 CASE_FLT_FN (BUILT_IN_CREAL):
8281 if (validate_arg (arg0, COMPLEX_TYPE)
8282 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8283 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8284 break;
8285
8286 CASE_FLT_FN (BUILT_IN_CIMAG):
8287 if (validate_arg (arg0, COMPLEX_TYPE)
8288 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8289 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8290 break;
8291
8292 CASE_FLT_FN (BUILT_IN_CARG):
8293 return fold_builtin_carg (loc, arg0, type);
8294
8295 case BUILT_IN_ISASCII:
8296 return fold_builtin_isascii (loc, arg0);
8297
8298 case BUILT_IN_TOASCII:
8299 return fold_builtin_toascii (loc, arg0);
8300
8301 case BUILT_IN_ISDIGIT:
8302 return fold_builtin_isdigit (loc, arg0);
8303
8304 CASE_FLT_FN (BUILT_IN_FINITE):
8305 case BUILT_IN_FINITED32:
8306 case BUILT_IN_FINITED64:
8307 case BUILT_IN_FINITED128:
8308 case BUILT_IN_ISFINITE:
8309 {
8310 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8311 if (ret)
8312 return ret;
8313 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8314 }
8315
8316 CASE_FLT_FN (BUILT_IN_ISINF):
8317 case BUILT_IN_ISINFD32:
8318 case BUILT_IN_ISINFD64:
8319 case BUILT_IN_ISINFD128:
8320 {
8321 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8322 if (ret)
8323 return ret;
8324 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8325 }
8326
8327 case BUILT_IN_ISNORMAL:
8328 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8329
8330 case BUILT_IN_ISINF_SIGN:
8331 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8332
8333 CASE_FLT_FN (BUILT_IN_ISNAN):
8334 case BUILT_IN_ISNAND32:
8335 case BUILT_IN_ISNAND64:
8336 case BUILT_IN_ISNAND128:
8337 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8338
8339 case BUILT_IN_FREE:
8340 if (integer_zerop (arg0))
8341 return build_empty_stmt (loc);
8342 break;
8343
8344 default:
8345 break;
8346 }
8347
8348 return NULL_TREE;
8349
8350 }
8351
8352 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8353 This function returns NULL_TREE if no simplification was possible. */
8354
8355 static tree
8356 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8357 {
8358 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8359 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8360
8361 if (TREE_CODE (arg0) == ERROR_MARK
8362 || TREE_CODE (arg1) == ERROR_MARK)
8363 return NULL_TREE;
8364
8365 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8366 return ret;
8367
8368 switch (fcode)
8369 {
8370 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8371 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8372 if (validate_arg (arg0, REAL_TYPE)
8373 && validate_arg (arg1, POINTER_TYPE))
8374 return do_mpfr_lgamma_r (arg0, arg1, type);
8375 break;
8376
8377 CASE_FLT_FN (BUILT_IN_FREXP):
8378 return fold_builtin_frexp (loc, arg0, arg1, type);
8379
8380 CASE_FLT_FN (BUILT_IN_MODF):
8381 return fold_builtin_modf (loc, arg0, arg1, type);
8382
8383 case BUILT_IN_STRSTR:
8384 return fold_builtin_strstr (loc, arg0, arg1, type);
8385
8386 case BUILT_IN_STRSPN:
8387 return fold_builtin_strspn (loc, arg0, arg1);
8388
8389 case BUILT_IN_STRCSPN:
8390 return fold_builtin_strcspn (loc, arg0, arg1);
8391
8392 case BUILT_IN_STRCHR:
8393 case BUILT_IN_INDEX:
8394 return fold_builtin_strchr (loc, arg0, arg1, type);
8395
8396 case BUILT_IN_STRRCHR:
8397 case BUILT_IN_RINDEX:
8398 return fold_builtin_strrchr (loc, arg0, arg1, type);
8399
8400 case BUILT_IN_STRCMP:
8401 return fold_builtin_strcmp (loc, arg0, arg1);
8402
8403 case BUILT_IN_STRPBRK:
8404 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8405
8406 case BUILT_IN_EXPECT:
8407 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8408
8409 case BUILT_IN_ISGREATER:
8410 return fold_builtin_unordered_cmp (loc, fndecl,
8411 arg0, arg1, UNLE_EXPR, LE_EXPR);
8412 case BUILT_IN_ISGREATEREQUAL:
8413 return fold_builtin_unordered_cmp (loc, fndecl,
8414 arg0, arg1, UNLT_EXPR, LT_EXPR);
8415 case BUILT_IN_ISLESS:
8416 return fold_builtin_unordered_cmp (loc, fndecl,
8417 arg0, arg1, UNGE_EXPR, GE_EXPR);
8418 case BUILT_IN_ISLESSEQUAL:
8419 return fold_builtin_unordered_cmp (loc, fndecl,
8420 arg0, arg1, UNGT_EXPR, GT_EXPR);
8421 case BUILT_IN_ISLESSGREATER:
8422 return fold_builtin_unordered_cmp (loc, fndecl,
8423 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8424 case BUILT_IN_ISUNORDERED:
8425 return fold_builtin_unordered_cmp (loc, fndecl,
8426 arg0, arg1, UNORDERED_EXPR,
8427 NOP_EXPR);
8428
8429 /* We do the folding for va_start in the expander. */
8430 case BUILT_IN_VA_START:
8431 break;
8432
8433 case BUILT_IN_OBJECT_SIZE:
8434 return fold_builtin_object_size (arg0, arg1);
8435
8436 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8437 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8438
8439 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8440 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8441
8442 default:
8443 break;
8444 }
8445 return NULL_TREE;
8446 }
8447
8448 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8449 and ARG2.
8450 This function returns NULL_TREE if no simplification was possible. */
8451
8452 static tree
8453 fold_builtin_3 (location_t loc, tree fndecl,
8454 tree arg0, tree arg1, tree arg2)
8455 {
8456 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8457 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8458
8459 if (TREE_CODE (arg0) == ERROR_MARK
8460 || TREE_CODE (arg1) == ERROR_MARK
8461 || TREE_CODE (arg2) == ERROR_MARK)
8462 return NULL_TREE;
8463
8464 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8465 arg0, arg1, arg2))
8466 return ret;
8467
8468 switch (fcode)
8469 {
8470
8471 CASE_FLT_FN (BUILT_IN_SINCOS):
8472 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8473
8474 CASE_FLT_FN (BUILT_IN_FMA):
8475 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8476
8477 CASE_FLT_FN (BUILT_IN_REMQUO):
8478 if (validate_arg (arg0, REAL_TYPE)
8479 && validate_arg (arg1, REAL_TYPE)
8480 && validate_arg (arg2, POINTER_TYPE))
8481 return do_mpfr_remquo (arg0, arg1, arg2);
8482 break;
8483
8484 case BUILT_IN_STRNCMP:
8485 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8486
8487 case BUILT_IN_MEMCHR:
8488 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8489
8490 case BUILT_IN_BCMP:
8491 case BUILT_IN_MEMCMP:
8492 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8493
8494 case BUILT_IN_EXPECT:
8495 return fold_builtin_expect (loc, arg0, arg1, arg2);
8496
8497 case BUILT_IN_ADD_OVERFLOW:
8498 case BUILT_IN_SUB_OVERFLOW:
8499 case BUILT_IN_MUL_OVERFLOW:
8500 case BUILT_IN_ADD_OVERFLOW_P:
8501 case BUILT_IN_SUB_OVERFLOW_P:
8502 case BUILT_IN_MUL_OVERFLOW_P:
8503 case BUILT_IN_SADD_OVERFLOW:
8504 case BUILT_IN_SADDL_OVERFLOW:
8505 case BUILT_IN_SADDLL_OVERFLOW:
8506 case BUILT_IN_SSUB_OVERFLOW:
8507 case BUILT_IN_SSUBL_OVERFLOW:
8508 case BUILT_IN_SSUBLL_OVERFLOW:
8509 case BUILT_IN_SMUL_OVERFLOW:
8510 case BUILT_IN_SMULL_OVERFLOW:
8511 case BUILT_IN_SMULLL_OVERFLOW:
8512 case BUILT_IN_UADD_OVERFLOW:
8513 case BUILT_IN_UADDL_OVERFLOW:
8514 case BUILT_IN_UADDLL_OVERFLOW:
8515 case BUILT_IN_USUB_OVERFLOW:
8516 case BUILT_IN_USUBL_OVERFLOW:
8517 case BUILT_IN_USUBLL_OVERFLOW:
8518 case BUILT_IN_UMUL_OVERFLOW:
8519 case BUILT_IN_UMULL_OVERFLOW:
8520 case BUILT_IN_UMULLL_OVERFLOW:
8521 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8522
8523 default:
8524 break;
8525 }
8526 return NULL_TREE;
8527 }
8528
8529 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8530 arguments. IGNORE is true if the result of the
8531 function call is ignored. This function returns NULL_TREE if no
8532 simplification was possible. */
8533
8534 tree
8535 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8536 {
8537 tree ret = NULL_TREE;
8538
8539 switch (nargs)
8540 {
8541 case 0:
8542 ret = fold_builtin_0 (loc, fndecl);
8543 break;
8544 case 1:
8545 ret = fold_builtin_1 (loc, fndecl, args[0]);
8546 break;
8547 case 2:
8548 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8549 break;
8550 case 3:
8551 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8552 break;
8553 default:
8554 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8555 break;
8556 }
8557 if (ret)
8558 {
8559 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8560 SET_EXPR_LOCATION (ret, loc);
8561 TREE_NO_WARNING (ret) = 1;
8562 return ret;
8563 }
8564 return NULL_TREE;
8565 }
8566
8567 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8568 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8569 of arguments in ARGS to be omitted. OLDNARGS is the number of
8570 elements in ARGS. */
8571
8572 static tree
8573 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8574 int skip, tree fndecl, int n, va_list newargs)
8575 {
8576 int nargs = oldnargs - skip + n;
8577 tree *buffer;
8578
8579 if (n > 0)
8580 {
8581 int i, j;
8582
8583 buffer = XALLOCAVEC (tree, nargs);
8584 for (i = 0; i < n; i++)
8585 buffer[i] = va_arg (newargs, tree);
8586 for (j = skip; j < oldnargs; j++, i++)
8587 buffer[i] = args[j];
8588 }
8589 else
8590 buffer = args + skip;
8591
8592 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8593 }
8594
8595 /* Return true if FNDECL shouldn't be folded right now.
8596 If a built-in function has an inline attribute always_inline
8597 wrapper, defer folding it after always_inline functions have
8598 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8599 might not be performed. */
8600
8601 bool
8602 avoid_folding_inline_builtin (tree fndecl)
8603 {
8604 return (DECL_DECLARED_INLINE_P (fndecl)
8605 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8606 && cfun
8607 && !cfun->always_inline_functions_inlined
8608 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8609 }
8610
8611 /* A wrapper function for builtin folding that prevents warnings for
8612 "statement without effect" and the like, caused by removing the
8613 call node earlier than the warning is generated. */
8614
8615 tree
8616 fold_call_expr (location_t loc, tree exp, bool ignore)
8617 {
8618 tree ret = NULL_TREE;
8619 tree fndecl = get_callee_fndecl (exp);
8620 if (fndecl
8621 && TREE_CODE (fndecl) == FUNCTION_DECL
8622 && DECL_BUILT_IN (fndecl)
8623 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8624 yet. Defer folding until we see all the arguments
8625 (after inlining). */
8626 && !CALL_EXPR_VA_ARG_PACK (exp))
8627 {
8628 int nargs = call_expr_nargs (exp);
8629
8630 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8631 instead last argument is __builtin_va_arg_pack (). Defer folding
8632 even in that case, until arguments are finalized. */
8633 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8634 {
8635 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8636 if (fndecl2
8637 && TREE_CODE (fndecl2) == FUNCTION_DECL
8638 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8639 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8640 return NULL_TREE;
8641 }
8642
8643 if (avoid_folding_inline_builtin (fndecl))
8644 return NULL_TREE;
8645
8646 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8647 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8648 CALL_EXPR_ARGP (exp), ignore);
8649 else
8650 {
8651 tree *args = CALL_EXPR_ARGP (exp);
8652 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8653 if (ret)
8654 return ret;
8655 }
8656 }
8657 return NULL_TREE;
8658 }
8659
8660 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8661 N arguments are passed in the array ARGARRAY. Return a folded
8662 expression or NULL_TREE if no simplification was possible. */
8663
8664 tree
8665 fold_builtin_call_array (location_t loc, tree,
8666 tree fn,
8667 int n,
8668 tree *argarray)
8669 {
8670 if (TREE_CODE (fn) != ADDR_EXPR)
8671 return NULL_TREE;
8672
8673 tree fndecl = TREE_OPERAND (fn, 0);
8674 if (TREE_CODE (fndecl) == FUNCTION_DECL
8675 && DECL_BUILT_IN (fndecl))
8676 {
8677 /* If last argument is __builtin_va_arg_pack (), arguments to this
8678 function are not finalized yet. Defer folding until they are. */
8679 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8680 {
8681 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8682 if (fndecl2
8683 && TREE_CODE (fndecl2) == FUNCTION_DECL
8684 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8685 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8686 return NULL_TREE;
8687 }
8688 if (avoid_folding_inline_builtin (fndecl))
8689 return NULL_TREE;
8690 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8691 return targetm.fold_builtin (fndecl, n, argarray, false);
8692 else
8693 return fold_builtin_n (loc, fndecl, argarray, n, false);
8694 }
8695
8696 return NULL_TREE;
8697 }
8698
8699 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8700 along with N new arguments specified as the "..." parameters. SKIP
8701 is the number of arguments in EXP to be omitted. This function is used
8702 to do varargs-to-varargs transformations. */
8703
8704 static tree
8705 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8706 {
8707 va_list ap;
8708 tree t;
8709
8710 va_start (ap, n);
8711 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8712 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8713 va_end (ap);
8714
8715 return t;
8716 }
8717
8718 /* Validate a single argument ARG against a tree code CODE representing
8719 a type. */
8720
8721 static bool
8722 validate_arg (const_tree arg, enum tree_code code)
8723 {
8724 if (!arg)
8725 return false;
8726 else if (code == POINTER_TYPE)
8727 return POINTER_TYPE_P (TREE_TYPE (arg));
8728 else if (code == INTEGER_TYPE)
8729 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8730 return code == TREE_CODE (TREE_TYPE (arg));
8731 }
8732
8733 /* This function validates the types of a function call argument list
8734 against a specified list of tree_codes. If the last specifier is a 0,
8735 that represents an ellipses, otherwise the last specifier must be a
8736 VOID_TYPE.
8737
8738 This is the GIMPLE version of validate_arglist. Eventually we want to
8739 completely convert builtins.c to work from GIMPLEs and the tree based
8740 validate_arglist will then be removed. */
8741
8742 bool
8743 validate_gimple_arglist (const gcall *call, ...)
8744 {
8745 enum tree_code code;
8746 bool res = 0;
8747 va_list ap;
8748 const_tree arg;
8749 size_t i;
8750
8751 va_start (ap, call);
8752 i = 0;
8753
8754 do
8755 {
8756 code = (enum tree_code) va_arg (ap, int);
8757 switch (code)
8758 {
8759 case 0:
8760 /* This signifies an ellipses, any further arguments are all ok. */
8761 res = true;
8762 goto end;
8763 case VOID_TYPE:
8764 /* This signifies an endlink, if no arguments remain, return
8765 true, otherwise return false. */
8766 res = (i == gimple_call_num_args (call));
8767 goto end;
8768 default:
8769 /* If no parameters remain or the parameter's code does not
8770 match the specified code, return false. Otherwise continue
8771 checking any remaining arguments. */
8772 arg = gimple_call_arg (call, i++);
8773 if (!validate_arg (arg, code))
8774 goto end;
8775 break;
8776 }
8777 }
8778 while (1);
8779
8780 /* We need gotos here since we can only have one VA_CLOSE in a
8781 function. */
8782 end: ;
8783 va_end (ap);
8784
8785 return res;
8786 }
8787
8788 /* Default target-specific builtin expander that does nothing. */
8789
8790 rtx
8791 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8792 rtx target ATTRIBUTE_UNUSED,
8793 rtx subtarget ATTRIBUTE_UNUSED,
8794 machine_mode mode ATTRIBUTE_UNUSED,
8795 int ignore ATTRIBUTE_UNUSED)
8796 {
8797 return NULL_RTX;
8798 }
8799
8800 /* Returns true is EXP represents data that would potentially reside
8801 in a readonly section. */
8802
8803 bool
8804 readonly_data_expr (tree exp)
8805 {
8806 STRIP_NOPS (exp);
8807
8808 if (TREE_CODE (exp) != ADDR_EXPR)
8809 return false;
8810
8811 exp = get_base_address (TREE_OPERAND (exp, 0));
8812 if (!exp)
8813 return false;
8814
8815 /* Make sure we call decl_readonly_section only for trees it
8816 can handle (since it returns true for everything it doesn't
8817 understand). */
8818 if (TREE_CODE (exp) == STRING_CST
8819 || TREE_CODE (exp) == CONSTRUCTOR
8820 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8821 return decl_readonly_section (exp, 0);
8822 else
8823 return false;
8824 }
8825
8826 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8827 to the call, and TYPE is its return type.
8828
8829 Return NULL_TREE if no simplification was possible, otherwise return the
8830 simplified form of the call as a tree.
8831
8832 The simplified form may be a constant or other expression which
8833 computes the same value, but in a more efficient manner (including
8834 calls to other builtin functions).
8835
8836 The call may contain arguments which need to be evaluated, but
8837 which are not useful to determine the result of the call. In
8838 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8839 COMPOUND_EXPR will be an argument which must be evaluated.
8840 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8841 COMPOUND_EXPR in the chain will contain the tree for the simplified
8842 form of the builtin function call. */
8843
8844 static tree
8845 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8846 {
8847 if (!validate_arg (s1, POINTER_TYPE)
8848 || !validate_arg (s2, POINTER_TYPE))
8849 return NULL_TREE;
8850 else
8851 {
8852 tree fn;
8853 const char *p1, *p2;
8854
8855 p2 = c_getstr (s2);
8856 if (p2 == NULL)
8857 return NULL_TREE;
8858
8859 p1 = c_getstr (s1);
8860 if (p1 != NULL)
8861 {
8862 const char *r = strstr (p1, p2);
8863 tree tem;
8864
8865 if (r == NULL)
8866 return build_int_cst (TREE_TYPE (s1), 0);
8867
8868 /* Return an offset into the constant string argument. */
8869 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8870 return fold_convert_loc (loc, type, tem);
8871 }
8872
8873 /* The argument is const char *, and the result is char *, so we need
8874 a type conversion here to avoid a warning. */
8875 if (p2[0] == '\0')
8876 return fold_convert_loc (loc, type, s1);
8877
8878 if (p2[1] != '\0')
8879 return NULL_TREE;
8880
8881 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8882 if (!fn)
8883 return NULL_TREE;
8884
8885 /* New argument list transforming strstr(s1, s2) to
8886 strchr(s1, s2[0]). */
8887 return build_call_expr_loc (loc, fn, 2, s1,
8888 build_int_cst (integer_type_node, p2[0]));
8889 }
8890 }
8891
8892 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8893 the call, and TYPE is its return type.
8894
8895 Return NULL_TREE if no simplification was possible, otherwise return the
8896 simplified form of the call as a tree.
8897
8898 The simplified form may be a constant or other expression which
8899 computes the same value, but in a more efficient manner (including
8900 calls to other builtin functions).
8901
8902 The call may contain arguments which need to be evaluated, but
8903 which are not useful to determine the result of the call. In
8904 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8905 COMPOUND_EXPR will be an argument which must be evaluated.
8906 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8907 COMPOUND_EXPR in the chain will contain the tree for the simplified
8908 form of the builtin function call. */
8909
8910 static tree
8911 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8912 {
8913 if (!validate_arg (s1, POINTER_TYPE)
8914 || !validate_arg (s2, INTEGER_TYPE))
8915 return NULL_TREE;
8916 else
8917 {
8918 const char *p1;
8919
8920 if (TREE_CODE (s2) != INTEGER_CST)
8921 return NULL_TREE;
8922
8923 p1 = c_getstr (s1);
8924 if (p1 != NULL)
8925 {
8926 char c;
8927 const char *r;
8928 tree tem;
8929
8930 if (target_char_cast (s2, &c))
8931 return NULL_TREE;
8932
8933 r = strchr (p1, c);
8934
8935 if (r == NULL)
8936 return build_int_cst (TREE_TYPE (s1), 0);
8937
8938 /* Return an offset into the constant string argument. */
8939 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8940 return fold_convert_loc (loc, type, tem);
8941 }
8942 return NULL_TREE;
8943 }
8944 }
8945
8946 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8947 the call, and TYPE is its return type.
8948
8949 Return NULL_TREE if no simplification was possible, otherwise return the
8950 simplified form of the call as a tree.
8951
8952 The simplified form may be a constant or other expression which
8953 computes the same value, but in a more efficient manner (including
8954 calls to other builtin functions).
8955
8956 The call may contain arguments which need to be evaluated, but
8957 which are not useful to determine the result of the call. In
8958 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8959 COMPOUND_EXPR will be an argument which must be evaluated.
8960 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8961 COMPOUND_EXPR in the chain will contain the tree for the simplified
8962 form of the builtin function call. */
8963
8964 static tree
8965 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8966 {
8967 if (!validate_arg (s1, POINTER_TYPE)
8968 || !validate_arg (s2, INTEGER_TYPE))
8969 return NULL_TREE;
8970 else
8971 {
8972 tree fn;
8973 const char *p1;
8974
8975 if (TREE_CODE (s2) != INTEGER_CST)
8976 return NULL_TREE;
8977
8978 p1 = c_getstr (s1);
8979 if (p1 != NULL)
8980 {
8981 char c;
8982 const char *r;
8983 tree tem;
8984
8985 if (target_char_cast (s2, &c))
8986 return NULL_TREE;
8987
8988 r = strrchr (p1, c);
8989
8990 if (r == NULL)
8991 return build_int_cst (TREE_TYPE (s1), 0);
8992
8993 /* Return an offset into the constant string argument. */
8994 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8995 return fold_convert_loc (loc, type, tem);
8996 }
8997
8998 if (! integer_zerop (s2))
8999 return NULL_TREE;
9000
9001 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9002 if (!fn)
9003 return NULL_TREE;
9004
9005 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9006 return build_call_expr_loc (loc, fn, 2, s1, s2);
9007 }
9008 }
9009
9010 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9011 to the call, and TYPE is its return type.
9012
9013 Return NULL_TREE if no simplification was possible, otherwise return the
9014 simplified form of the call as a tree.
9015
9016 The simplified form may be a constant or other expression which
9017 computes the same value, but in a more efficient manner (including
9018 calls to other builtin functions).
9019
9020 The call may contain arguments which need to be evaluated, but
9021 which are not useful to determine the result of the call. In
9022 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9023 COMPOUND_EXPR will be an argument which must be evaluated.
9024 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9025 COMPOUND_EXPR in the chain will contain the tree for the simplified
9026 form of the builtin function call. */
9027
9028 static tree
9029 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9030 {
9031 if (!validate_arg (s1, POINTER_TYPE)
9032 || !validate_arg (s2, POINTER_TYPE))
9033 return NULL_TREE;
9034 else
9035 {
9036 tree fn;
9037 const char *p1, *p2;
9038
9039 p2 = c_getstr (s2);
9040 if (p2 == NULL)
9041 return NULL_TREE;
9042
9043 p1 = c_getstr (s1);
9044 if (p1 != NULL)
9045 {
9046 const char *r = strpbrk (p1, p2);
9047 tree tem;
9048
9049 if (r == NULL)
9050 return build_int_cst (TREE_TYPE (s1), 0);
9051
9052 /* Return an offset into the constant string argument. */
9053 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9054 return fold_convert_loc (loc, type, tem);
9055 }
9056
9057 if (p2[0] == '\0')
9058 /* strpbrk(x, "") == NULL.
9059 Evaluate and ignore s1 in case it had side-effects. */
9060 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9061
9062 if (p2[1] != '\0')
9063 return NULL_TREE; /* Really call strpbrk. */
9064
9065 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9066 if (!fn)
9067 return NULL_TREE;
9068
9069 /* New argument list transforming strpbrk(s1, s2) to
9070 strchr(s1, s2[0]). */
9071 return build_call_expr_loc (loc, fn, 2, s1,
9072 build_int_cst (integer_type_node, p2[0]));
9073 }
9074 }
9075
9076 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9077 to the call.
9078
9079 Return NULL_TREE if no simplification was possible, otherwise return the
9080 simplified form of the call as a tree.
9081
9082 The simplified form may be a constant or other expression which
9083 computes the same value, but in a more efficient manner (including
9084 calls to other builtin functions).
9085
9086 The call may contain arguments which need to be evaluated, but
9087 which are not useful to determine the result of the call. In
9088 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9089 COMPOUND_EXPR will be an argument which must be evaluated.
9090 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9091 COMPOUND_EXPR in the chain will contain the tree for the simplified
9092 form of the builtin function call. */
9093
9094 static tree
9095 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9096 {
9097 if (!validate_arg (s1, POINTER_TYPE)
9098 || !validate_arg (s2, POINTER_TYPE))
9099 return NULL_TREE;
9100 else
9101 {
9102 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9103
9104 /* If either argument is "", return NULL_TREE. */
9105 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9106 /* Evaluate and ignore both arguments in case either one has
9107 side-effects. */
9108 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9109 s1, s2);
9110 return NULL_TREE;
9111 }
9112 }
9113
9114 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9115 to the call.
9116
9117 Return NULL_TREE if no simplification was possible, otherwise return the
9118 simplified form of the call as a tree.
9119
9120 The simplified form may be a constant or other expression which
9121 computes the same value, but in a more efficient manner (including
9122 calls to other builtin functions).
9123
9124 The call may contain arguments which need to be evaluated, but
9125 which are not useful to determine the result of the call. In
9126 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9127 COMPOUND_EXPR will be an argument which must be evaluated.
9128 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9129 COMPOUND_EXPR in the chain will contain the tree for the simplified
9130 form of the builtin function call. */
9131
9132 static tree
9133 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9134 {
9135 if (!validate_arg (s1, POINTER_TYPE)
9136 || !validate_arg (s2, POINTER_TYPE))
9137 return NULL_TREE;
9138 else
9139 {
9140 /* If the first argument is "", return NULL_TREE. */
9141 const char *p1 = c_getstr (s1);
9142 if (p1 && *p1 == '\0')
9143 {
9144 /* Evaluate and ignore argument s2 in case it has
9145 side-effects. */
9146 return omit_one_operand_loc (loc, size_type_node,
9147 size_zero_node, s2);
9148 }
9149
9150 /* If the second argument is "", return __builtin_strlen(s1). */
9151 const char *p2 = c_getstr (s2);
9152 if (p2 && *p2 == '\0')
9153 {
9154 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9155
9156 /* If the replacement _DECL isn't initialized, don't do the
9157 transformation. */
9158 if (!fn)
9159 return NULL_TREE;
9160
9161 return build_call_expr_loc (loc, fn, 1, s1);
9162 }
9163 return NULL_TREE;
9164 }
9165 }
9166
9167 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9168 produced. False otherwise. This is done so that we don't output the error
9169 or warning twice or three times. */
9170
9171 bool
9172 fold_builtin_next_arg (tree exp, bool va_start_p)
9173 {
9174 tree fntype = TREE_TYPE (current_function_decl);
9175 int nargs = call_expr_nargs (exp);
9176 tree arg;
9177 /* There is good chance the current input_location points inside the
9178 definition of the va_start macro (perhaps on the token for
9179 builtin) in a system header, so warnings will not be emitted.
9180 Use the location in real source code. */
9181 source_location current_location =
9182 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9183 NULL);
9184
9185 if (!stdarg_p (fntype))
9186 {
9187 error ("%<va_start%> used in function with fixed args");
9188 return true;
9189 }
9190
9191 if (va_start_p)
9192 {
9193 if (va_start_p && (nargs != 2))
9194 {
9195 error ("wrong number of arguments to function %<va_start%>");
9196 return true;
9197 }
9198 arg = CALL_EXPR_ARG (exp, 1);
9199 }
9200 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9201 when we checked the arguments and if needed issued a warning. */
9202 else
9203 {
9204 if (nargs == 0)
9205 {
9206 /* Evidently an out of date version of <stdarg.h>; can't validate
9207 va_start's second argument, but can still work as intended. */
9208 warning_at (current_location,
9209 OPT_Wvarargs,
9210 "%<__builtin_next_arg%> called without an argument");
9211 return true;
9212 }
9213 else if (nargs > 1)
9214 {
9215 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9216 return true;
9217 }
9218 arg = CALL_EXPR_ARG (exp, 0);
9219 }
9220
9221 if (TREE_CODE (arg) == SSA_NAME)
9222 arg = SSA_NAME_VAR (arg);
9223
9224 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9225 or __builtin_next_arg (0) the first time we see it, after checking
9226 the arguments and if needed issuing a warning. */
9227 if (!integer_zerop (arg))
9228 {
9229 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9230
9231 /* Strip off all nops for the sake of the comparison. This
9232 is not quite the same as STRIP_NOPS. It does more.
9233 We must also strip off INDIRECT_EXPR for C++ reference
9234 parameters. */
9235 while (CONVERT_EXPR_P (arg)
9236 || TREE_CODE (arg) == INDIRECT_REF)
9237 arg = TREE_OPERAND (arg, 0);
9238 if (arg != last_parm)
9239 {
9240 /* FIXME: Sometimes with the tree optimizers we can get the
9241 not the last argument even though the user used the last
9242 argument. We just warn and set the arg to be the last
9243 argument so that we will get wrong-code because of
9244 it. */
9245 warning_at (current_location,
9246 OPT_Wvarargs,
9247 "second parameter of %<va_start%> not last named argument");
9248 }
9249
9250 /* Undefined by C99 7.15.1.4p4 (va_start):
9251 "If the parameter parmN is declared with the register storage
9252 class, with a function or array type, or with a type that is
9253 not compatible with the type that results after application of
9254 the default argument promotions, the behavior is undefined."
9255 */
9256 else if (DECL_REGISTER (arg))
9257 {
9258 warning_at (current_location,
9259 OPT_Wvarargs,
9260 "undefined behavior when second parameter of "
9261 "%<va_start%> is declared with %<register%> storage");
9262 }
9263
9264 /* We want to verify the second parameter just once before the tree
9265 optimizers are run and then avoid keeping it in the tree,
9266 as otherwise we could warn even for correct code like:
9267 void foo (int i, ...)
9268 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9269 if (va_start_p)
9270 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9271 else
9272 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9273 }
9274 return false;
9275 }
9276
9277
9278 /* Expand a call EXP to __builtin_object_size. */
9279
9280 static rtx
9281 expand_builtin_object_size (tree exp)
9282 {
9283 tree ost;
9284 int object_size_type;
9285 tree fndecl = get_callee_fndecl (exp);
9286
9287 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9288 {
9289 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9290 exp, fndecl);
9291 expand_builtin_trap ();
9292 return const0_rtx;
9293 }
9294
9295 ost = CALL_EXPR_ARG (exp, 1);
9296 STRIP_NOPS (ost);
9297
9298 if (TREE_CODE (ost) != INTEGER_CST
9299 || tree_int_cst_sgn (ost) < 0
9300 || compare_tree_int (ost, 3) > 0)
9301 {
9302 error ("%Klast argument of %D is not integer constant between 0 and 3",
9303 exp, fndecl);
9304 expand_builtin_trap ();
9305 return const0_rtx;
9306 }
9307
9308 object_size_type = tree_to_shwi (ost);
9309
9310 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9311 }
9312
9313 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9314 FCODE is the BUILT_IN_* to use.
9315 Return NULL_RTX if we failed; the caller should emit a normal call,
9316 otherwise try to get the result in TARGET, if convenient (and in
9317 mode MODE if that's convenient). */
9318
9319 static rtx
9320 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9321 enum built_in_function fcode)
9322 {
9323 tree dest, src, len, size;
9324
9325 if (!validate_arglist (exp,
9326 POINTER_TYPE,
9327 fcode == BUILT_IN_MEMSET_CHK
9328 ? INTEGER_TYPE : POINTER_TYPE,
9329 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9330 return NULL_RTX;
9331
9332 dest = CALL_EXPR_ARG (exp, 0);
9333 src = CALL_EXPR_ARG (exp, 1);
9334 len = CALL_EXPR_ARG (exp, 2);
9335 size = CALL_EXPR_ARG (exp, 3);
9336
9337 if (! tree_fits_uhwi_p (size))
9338 return NULL_RTX;
9339
9340 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9341 {
9342 tree fn;
9343
9344 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9345 {
9346 warning_at (tree_nonartificial_location (exp),
9347 0, "%Kcall to %D will always overflow destination buffer",
9348 exp, get_callee_fndecl (exp));
9349 return NULL_RTX;
9350 }
9351
9352 fn = NULL_TREE;
9353 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9354 mem{cpy,pcpy,move,set} is available. */
9355 switch (fcode)
9356 {
9357 case BUILT_IN_MEMCPY_CHK:
9358 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9359 break;
9360 case BUILT_IN_MEMPCPY_CHK:
9361 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9362 break;
9363 case BUILT_IN_MEMMOVE_CHK:
9364 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9365 break;
9366 case BUILT_IN_MEMSET_CHK:
9367 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9368 break;
9369 default:
9370 break;
9371 }
9372
9373 if (! fn)
9374 return NULL_RTX;
9375
9376 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9377 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9378 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9379 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9380 }
9381 else if (fcode == BUILT_IN_MEMSET_CHK)
9382 return NULL_RTX;
9383 else
9384 {
9385 unsigned int dest_align = get_pointer_alignment (dest);
9386
9387 /* If DEST is not a pointer type, call the normal function. */
9388 if (dest_align == 0)
9389 return NULL_RTX;
9390
9391 /* If SRC and DEST are the same (and not volatile), do nothing. */
9392 if (operand_equal_p (src, dest, 0))
9393 {
9394 tree expr;
9395
9396 if (fcode != BUILT_IN_MEMPCPY_CHK)
9397 {
9398 /* Evaluate and ignore LEN in case it has side-effects. */
9399 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9400 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9401 }
9402
9403 expr = fold_build_pointer_plus (dest, len);
9404 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9405 }
9406
9407 /* __memmove_chk special case. */
9408 if (fcode == BUILT_IN_MEMMOVE_CHK)
9409 {
9410 unsigned int src_align = get_pointer_alignment (src);
9411
9412 if (src_align == 0)
9413 return NULL_RTX;
9414
9415 /* If src is categorized for a readonly section we can use
9416 normal __memcpy_chk. */
9417 if (readonly_data_expr (src))
9418 {
9419 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9420 if (!fn)
9421 return NULL_RTX;
9422 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9423 dest, src, len, size);
9424 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9425 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9426 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9427 }
9428 }
9429 return NULL_RTX;
9430 }
9431 }
9432
9433 /* Emit warning if a buffer overflow is detected at compile time. */
9434
9435 static void
9436 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9437 {
9438 int is_strlen = 0;
9439 tree len, size;
9440 location_t loc = tree_nonartificial_location (exp);
9441
9442 switch (fcode)
9443 {
9444 case BUILT_IN_STRCPY_CHK:
9445 case BUILT_IN_STPCPY_CHK:
9446 /* For __strcat_chk the warning will be emitted only if overflowing
9447 by at least strlen (dest) + 1 bytes. */
9448 case BUILT_IN_STRCAT_CHK:
9449 len = CALL_EXPR_ARG (exp, 1);
9450 size = CALL_EXPR_ARG (exp, 2);
9451 is_strlen = 1;
9452 break;
9453 case BUILT_IN_STRNCAT_CHK:
9454 case BUILT_IN_STRNCPY_CHK:
9455 case BUILT_IN_STPNCPY_CHK:
9456 len = CALL_EXPR_ARG (exp, 2);
9457 size = CALL_EXPR_ARG (exp, 3);
9458 break;
9459 case BUILT_IN_SNPRINTF_CHK:
9460 case BUILT_IN_VSNPRINTF_CHK:
9461 len = CALL_EXPR_ARG (exp, 1);
9462 size = CALL_EXPR_ARG (exp, 3);
9463 break;
9464 default:
9465 gcc_unreachable ();
9466 }
9467
9468 if (!len || !size)
9469 return;
9470
9471 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9472 return;
9473
9474 if (is_strlen)
9475 {
9476 len = c_strlen (len, 1);
9477 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9478 return;
9479 }
9480 else if (fcode == BUILT_IN_STRNCAT_CHK)
9481 {
9482 tree src = CALL_EXPR_ARG (exp, 1);
9483 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9484 return;
9485 src = c_strlen (src, 1);
9486 if (! src || ! tree_fits_uhwi_p (src))
9487 {
9488 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9489 exp, get_callee_fndecl (exp));
9490 return;
9491 }
9492 else if (tree_int_cst_lt (src, size))
9493 return;
9494 }
9495 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9496 return;
9497
9498 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9499 exp, get_callee_fndecl (exp));
9500 }
9501
9502 /* Emit warning if a buffer overflow is detected at compile time
9503 in __sprintf_chk/__vsprintf_chk calls. */
9504
9505 static void
9506 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9507 {
9508 tree size, len, fmt;
9509 const char *fmt_str;
9510 int nargs = call_expr_nargs (exp);
9511
9512 /* Verify the required arguments in the original call. */
9513
9514 if (nargs < 4)
9515 return;
9516 size = CALL_EXPR_ARG (exp, 2);
9517 fmt = CALL_EXPR_ARG (exp, 3);
9518
9519 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9520 return;
9521
9522 /* Check whether the format is a literal string constant. */
9523 fmt_str = c_getstr (fmt);
9524 if (fmt_str == NULL)
9525 return;
9526
9527 if (!init_target_chars ())
9528 return;
9529
9530 /* If the format doesn't contain % args or %%, we know its size. */
9531 if (strchr (fmt_str, target_percent) == 0)
9532 len = build_int_cstu (size_type_node, strlen (fmt_str));
9533 /* If the format is "%s" and first ... argument is a string literal,
9534 we know it too. */
9535 else if (fcode == BUILT_IN_SPRINTF_CHK
9536 && strcmp (fmt_str, target_percent_s) == 0)
9537 {
9538 tree arg;
9539
9540 if (nargs < 5)
9541 return;
9542 arg = CALL_EXPR_ARG (exp, 4);
9543 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9544 return;
9545
9546 len = c_strlen (arg, 1);
9547 if (!len || ! tree_fits_uhwi_p (len))
9548 return;
9549 }
9550 else
9551 return;
9552
9553 if (! tree_int_cst_lt (len, size))
9554 warning_at (tree_nonartificial_location (exp),
9555 0, "%Kcall to %D will always overflow destination buffer",
9556 exp, get_callee_fndecl (exp));
9557 }
9558
9559 /* Emit warning if a free is called with address of a variable. */
9560
9561 static void
9562 maybe_emit_free_warning (tree exp)
9563 {
9564 tree arg = CALL_EXPR_ARG (exp, 0);
9565
9566 STRIP_NOPS (arg);
9567 if (TREE_CODE (arg) != ADDR_EXPR)
9568 return;
9569
9570 arg = get_base_address (TREE_OPERAND (arg, 0));
9571 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9572 return;
9573
9574 if (SSA_VAR_P (arg))
9575 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9576 "%Kattempt to free a non-heap object %qD", exp, arg);
9577 else
9578 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9579 "%Kattempt to free a non-heap object", exp);
9580 }
9581
9582 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9583 if possible. */
9584
9585 static tree
9586 fold_builtin_object_size (tree ptr, tree ost)
9587 {
9588 unsigned HOST_WIDE_INT bytes;
9589 int object_size_type;
9590
9591 if (!validate_arg (ptr, POINTER_TYPE)
9592 || !validate_arg (ost, INTEGER_TYPE))
9593 return NULL_TREE;
9594
9595 STRIP_NOPS (ost);
9596
9597 if (TREE_CODE (ost) != INTEGER_CST
9598 || tree_int_cst_sgn (ost) < 0
9599 || compare_tree_int (ost, 3) > 0)
9600 return NULL_TREE;
9601
9602 object_size_type = tree_to_shwi (ost);
9603
9604 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9605 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9606 and (size_t) 0 for types 2 and 3. */
9607 if (TREE_SIDE_EFFECTS (ptr))
9608 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9609
9610 if (TREE_CODE (ptr) == ADDR_EXPR)
9611 {
9612 compute_builtin_object_size (ptr, object_size_type, &bytes);
9613 if (wi::fits_to_tree_p (bytes, size_type_node))
9614 return build_int_cstu (size_type_node, bytes);
9615 }
9616 else if (TREE_CODE (ptr) == SSA_NAME)
9617 {
9618 /* If object size is not known yet, delay folding until
9619 later. Maybe subsequent passes will help determining
9620 it. */
9621 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9622 && wi::fits_to_tree_p (bytes, size_type_node))
9623 return build_int_cstu (size_type_node, bytes);
9624 }
9625
9626 return NULL_TREE;
9627 }
9628
9629 /* Builtins with folding operations that operate on "..." arguments
9630 need special handling; we need to store the arguments in a convenient
9631 data structure before attempting any folding. Fortunately there are
9632 only a few builtins that fall into this category. FNDECL is the
9633 function, EXP is the CALL_EXPR for the call. */
9634
9635 static tree
9636 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9637 {
9638 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9639 tree ret = NULL_TREE;
9640
9641 switch (fcode)
9642 {
9643 case BUILT_IN_FPCLASSIFY:
9644 ret = fold_builtin_fpclassify (loc, args, nargs);
9645 break;
9646
9647 default:
9648 break;
9649 }
9650 if (ret)
9651 {
9652 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9653 SET_EXPR_LOCATION (ret, loc);
9654 TREE_NO_WARNING (ret) = 1;
9655 return ret;
9656 }
9657 return NULL_TREE;
9658 }
9659
9660 /* Initialize format string characters in the target charset. */
9661
9662 bool
9663 init_target_chars (void)
9664 {
9665 static bool init;
9666 if (!init)
9667 {
9668 target_newline = lang_hooks.to_target_charset ('\n');
9669 target_percent = lang_hooks.to_target_charset ('%');
9670 target_c = lang_hooks.to_target_charset ('c');
9671 target_s = lang_hooks.to_target_charset ('s');
9672 if (target_newline == 0 || target_percent == 0 || target_c == 0
9673 || target_s == 0)
9674 return false;
9675
9676 target_percent_c[0] = target_percent;
9677 target_percent_c[1] = target_c;
9678 target_percent_c[2] = '\0';
9679
9680 target_percent_s[0] = target_percent;
9681 target_percent_s[1] = target_s;
9682 target_percent_s[2] = '\0';
9683
9684 target_percent_s_newline[0] = target_percent;
9685 target_percent_s_newline[1] = target_s;
9686 target_percent_s_newline[2] = target_newline;
9687 target_percent_s_newline[3] = '\0';
9688
9689 init = true;
9690 }
9691 return true;
9692 }
9693
9694 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9695 and no overflow/underflow occurred. INEXACT is true if M was not
9696 exactly calculated. TYPE is the tree type for the result. This
9697 function assumes that you cleared the MPFR flags and then
9698 calculated M to see if anything subsequently set a flag prior to
9699 entering this function. Return NULL_TREE if any checks fail. */
9700
9701 static tree
9702 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9703 {
9704 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9705 overflow/underflow occurred. If -frounding-math, proceed iff the
9706 result of calling FUNC was exact. */
9707 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9708 && (!flag_rounding_math || !inexact))
9709 {
9710 REAL_VALUE_TYPE rr;
9711
9712 real_from_mpfr (&rr, m, type, GMP_RNDN);
9713 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9714 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9715 but the mpft_t is not, then we underflowed in the
9716 conversion. */
9717 if (real_isfinite (&rr)
9718 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9719 {
9720 REAL_VALUE_TYPE rmode;
9721
9722 real_convert (&rmode, TYPE_MODE (type), &rr);
9723 /* Proceed iff the specified mode can hold the value. */
9724 if (real_identical (&rmode, &rr))
9725 return build_real (type, rmode);
9726 }
9727 }
9728 return NULL_TREE;
9729 }
9730
9731 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9732 number and no overflow/underflow occurred. INEXACT is true if M
9733 was not exactly calculated. TYPE is the tree type for the result.
9734 This function assumes that you cleared the MPFR flags and then
9735 calculated M to see if anything subsequently set a flag prior to
9736 entering this function. Return NULL_TREE if any checks fail, if
9737 FORCE_CONVERT is true, then bypass the checks. */
9738
9739 static tree
9740 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9741 {
9742 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9743 overflow/underflow occurred. If -frounding-math, proceed iff the
9744 result of calling FUNC was exact. */
9745 if (force_convert
9746 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9747 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9748 && (!flag_rounding_math || !inexact)))
9749 {
9750 REAL_VALUE_TYPE re, im;
9751
9752 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9753 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9754 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9755 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9756 but the mpft_t is not, then we underflowed in the
9757 conversion. */
9758 if (force_convert
9759 || (real_isfinite (&re) && real_isfinite (&im)
9760 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9761 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9762 {
9763 REAL_VALUE_TYPE re_mode, im_mode;
9764
9765 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9766 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9767 /* Proceed iff the specified mode can hold the value. */
9768 if (force_convert
9769 || (real_identical (&re_mode, &re)
9770 && real_identical (&im_mode, &im)))
9771 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9772 build_real (TREE_TYPE (type), im_mode));
9773 }
9774 }
9775 return NULL_TREE;
9776 }
9777
9778 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9779 the pointer *(ARG_QUO) and return the result. The type is taken
9780 from the type of ARG0 and is used for setting the precision of the
9781 calculation and results. */
9782
9783 static tree
9784 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9785 {
9786 tree const type = TREE_TYPE (arg0);
9787 tree result = NULL_TREE;
9788
9789 STRIP_NOPS (arg0);
9790 STRIP_NOPS (arg1);
9791
9792 /* To proceed, MPFR must exactly represent the target floating point
9793 format, which only happens when the target base equals two. */
9794 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9795 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9796 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9797 {
9798 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9799 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9800
9801 if (real_isfinite (ra0) && real_isfinite (ra1))
9802 {
9803 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9804 const int prec = fmt->p;
9805 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9806 tree result_rem;
9807 long integer_quo;
9808 mpfr_t m0, m1;
9809
9810 mpfr_inits2 (prec, m0, m1, NULL);
9811 mpfr_from_real (m0, ra0, GMP_RNDN);
9812 mpfr_from_real (m1, ra1, GMP_RNDN);
9813 mpfr_clear_flags ();
9814 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9815 /* Remquo is independent of the rounding mode, so pass
9816 inexact=0 to do_mpfr_ckconv(). */
9817 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9818 mpfr_clears (m0, m1, NULL);
9819 if (result_rem)
9820 {
9821 /* MPFR calculates quo in the host's long so it may
9822 return more bits in quo than the target int can hold
9823 if sizeof(host long) > sizeof(target int). This can
9824 happen even for native compilers in LP64 mode. In
9825 these cases, modulo the quo value with the largest
9826 number that the target int can hold while leaving one
9827 bit for the sign. */
9828 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9829 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9830
9831 /* Dereference the quo pointer argument. */
9832 arg_quo = build_fold_indirect_ref (arg_quo);
9833 /* Proceed iff a valid pointer type was passed in. */
9834 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9835 {
9836 /* Set the value. */
9837 tree result_quo
9838 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9839 build_int_cst (TREE_TYPE (arg_quo),
9840 integer_quo));
9841 TREE_SIDE_EFFECTS (result_quo) = 1;
9842 /* Combine the quo assignment with the rem. */
9843 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9844 result_quo, result_rem));
9845 }
9846 }
9847 }
9848 }
9849 return result;
9850 }
9851
9852 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9853 resulting value as a tree with type TYPE. The mpfr precision is
9854 set to the precision of TYPE. We assume that this mpfr function
9855 returns zero if the result could be calculated exactly within the
9856 requested precision. In addition, the integer pointer represented
9857 by ARG_SG will be dereferenced and set to the appropriate signgam
9858 (-1,1) value. */
9859
9860 static tree
9861 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9862 {
9863 tree result = NULL_TREE;
9864
9865 STRIP_NOPS (arg);
9866
9867 /* To proceed, MPFR must exactly represent the target floating point
9868 format, which only happens when the target base equals two. Also
9869 verify ARG is a constant and that ARG_SG is an int pointer. */
9870 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9871 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9872 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9873 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9874 {
9875 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9876
9877 /* In addition to NaN and Inf, the argument cannot be zero or a
9878 negative integer. */
9879 if (real_isfinite (ra)
9880 && ra->cl != rvc_zero
9881 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9882 {
9883 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9884 const int prec = fmt->p;
9885 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9886 int inexact, sg;
9887 mpfr_t m;
9888 tree result_lg;
9889
9890 mpfr_init2 (m, prec);
9891 mpfr_from_real (m, ra, GMP_RNDN);
9892 mpfr_clear_flags ();
9893 inexact = mpfr_lgamma (m, &sg, m, rnd);
9894 result_lg = do_mpfr_ckconv (m, type, inexact);
9895 mpfr_clear (m);
9896 if (result_lg)
9897 {
9898 tree result_sg;
9899
9900 /* Dereference the arg_sg pointer argument. */
9901 arg_sg = build_fold_indirect_ref (arg_sg);
9902 /* Assign the signgam value into *arg_sg. */
9903 result_sg = fold_build2 (MODIFY_EXPR,
9904 TREE_TYPE (arg_sg), arg_sg,
9905 build_int_cst (TREE_TYPE (arg_sg), sg));
9906 TREE_SIDE_EFFECTS (result_sg) = 1;
9907 /* Combine the signgam assignment with the lgamma result. */
9908 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9909 result_sg, result_lg));
9910 }
9911 }
9912 }
9913
9914 return result;
9915 }
9916
9917 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9918 mpc function FUNC on it and return the resulting value as a tree
9919 with type TYPE. The mpfr precision is set to the precision of
9920 TYPE. We assume that function FUNC returns zero if the result
9921 could be calculated exactly within the requested precision. If
9922 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9923 in the arguments and/or results. */
9924
9925 tree
9926 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9927 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9928 {
9929 tree result = NULL_TREE;
9930
9931 STRIP_NOPS (arg0);
9932 STRIP_NOPS (arg1);
9933
9934 /* To proceed, MPFR must exactly represent the target floating point
9935 format, which only happens when the target base equals two. */
9936 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9937 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9938 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9939 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9940 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9941 {
9942 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9943 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9944 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9945 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9946
9947 if (do_nonfinite
9948 || (real_isfinite (re0) && real_isfinite (im0)
9949 && real_isfinite (re1) && real_isfinite (im1)))
9950 {
9951 const struct real_format *const fmt =
9952 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9953 const int prec = fmt->p;
9954 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9955 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9956 int inexact;
9957 mpc_t m0, m1;
9958
9959 mpc_init2 (m0, prec);
9960 mpc_init2 (m1, prec);
9961 mpfr_from_real (mpc_realref (m0), re0, rnd);
9962 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9963 mpfr_from_real (mpc_realref (m1), re1, rnd);
9964 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9965 mpfr_clear_flags ();
9966 inexact = func (m0, m0, m1, crnd);
9967 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9968 mpc_clear (m0);
9969 mpc_clear (m1);
9970 }
9971 }
9972
9973 return result;
9974 }
9975
9976 /* A wrapper function for builtin folding that prevents warnings for
9977 "statement without effect" and the like, caused by removing the
9978 call node earlier than the warning is generated. */
9979
9980 tree
9981 fold_call_stmt (gcall *stmt, bool ignore)
9982 {
9983 tree ret = NULL_TREE;
9984 tree fndecl = gimple_call_fndecl (stmt);
9985 location_t loc = gimple_location (stmt);
9986 if (fndecl
9987 && TREE_CODE (fndecl) == FUNCTION_DECL
9988 && DECL_BUILT_IN (fndecl)
9989 && !gimple_call_va_arg_pack_p (stmt))
9990 {
9991 int nargs = gimple_call_num_args (stmt);
9992 tree *args = (nargs > 0
9993 ? gimple_call_arg_ptr (stmt, 0)
9994 : &error_mark_node);
9995
9996 if (avoid_folding_inline_builtin (fndecl))
9997 return NULL_TREE;
9998 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9999 {
10000 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10001 }
10002 else
10003 {
10004 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10005 if (ret)
10006 {
10007 /* Propagate location information from original call to
10008 expansion of builtin. Otherwise things like
10009 maybe_emit_chk_warning, that operate on the expansion
10010 of a builtin, will use the wrong location information. */
10011 if (gimple_has_location (stmt))
10012 {
10013 tree realret = ret;
10014 if (TREE_CODE (ret) == NOP_EXPR)
10015 realret = TREE_OPERAND (ret, 0);
10016 if (CAN_HAVE_LOCATION_P (realret)
10017 && !EXPR_HAS_LOCATION (realret))
10018 SET_EXPR_LOCATION (realret, loc);
10019 return realret;
10020 }
10021 return ret;
10022 }
10023 }
10024 }
10025 return NULL_TREE;
10026 }
10027
10028 /* Look up the function in builtin_decl that corresponds to DECL
10029 and set ASMSPEC as its user assembler name. DECL must be a
10030 function decl that declares a builtin. */
10031
10032 void
10033 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10034 {
10035 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10036 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10037 && asmspec != 0);
10038
10039 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10040 set_user_assembler_name (builtin, asmspec);
10041
10042 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10043 && INT_TYPE_SIZE < BITS_PER_WORD)
10044 {
10045 set_user_assembler_libfunc ("ffs", asmspec);
10046 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10047 "ffs");
10048 }
10049 }
10050
10051 /* Return true if DECL is a builtin that expands to a constant or similarly
10052 simple code. */
10053 bool
10054 is_simple_builtin (tree decl)
10055 {
10056 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10057 switch (DECL_FUNCTION_CODE (decl))
10058 {
10059 /* Builtins that expand to constants. */
10060 case BUILT_IN_CONSTANT_P:
10061 case BUILT_IN_EXPECT:
10062 case BUILT_IN_OBJECT_SIZE:
10063 case BUILT_IN_UNREACHABLE:
10064 /* Simple register moves or loads from stack. */
10065 case BUILT_IN_ASSUME_ALIGNED:
10066 case BUILT_IN_RETURN_ADDRESS:
10067 case BUILT_IN_EXTRACT_RETURN_ADDR:
10068 case BUILT_IN_FROB_RETURN_ADDR:
10069 case BUILT_IN_RETURN:
10070 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10071 case BUILT_IN_FRAME_ADDRESS:
10072 case BUILT_IN_VA_END:
10073 case BUILT_IN_STACK_SAVE:
10074 case BUILT_IN_STACK_RESTORE:
10075 /* Exception state returns or moves registers around. */
10076 case BUILT_IN_EH_FILTER:
10077 case BUILT_IN_EH_POINTER:
10078 case BUILT_IN_EH_COPY_VALUES:
10079 return true;
10080
10081 default:
10082 return false;
10083 }
10084
10085 return false;
10086 }
10087
10088 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10089 most probably expanded inline into reasonably simple code. This is a
10090 superset of is_simple_builtin. */
10091 bool
10092 is_inexpensive_builtin (tree decl)
10093 {
10094 if (!decl)
10095 return false;
10096 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10097 return true;
10098 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10099 switch (DECL_FUNCTION_CODE (decl))
10100 {
10101 case BUILT_IN_ABS:
10102 case BUILT_IN_ALLOCA:
10103 case BUILT_IN_ALLOCA_WITH_ALIGN:
10104 case BUILT_IN_BSWAP16:
10105 case BUILT_IN_BSWAP32:
10106 case BUILT_IN_BSWAP64:
10107 case BUILT_IN_CLZ:
10108 case BUILT_IN_CLZIMAX:
10109 case BUILT_IN_CLZL:
10110 case BUILT_IN_CLZLL:
10111 case BUILT_IN_CTZ:
10112 case BUILT_IN_CTZIMAX:
10113 case BUILT_IN_CTZL:
10114 case BUILT_IN_CTZLL:
10115 case BUILT_IN_FFS:
10116 case BUILT_IN_FFSIMAX:
10117 case BUILT_IN_FFSL:
10118 case BUILT_IN_FFSLL:
10119 case BUILT_IN_IMAXABS:
10120 case BUILT_IN_FINITE:
10121 case BUILT_IN_FINITEF:
10122 case BUILT_IN_FINITEL:
10123 case BUILT_IN_FINITED32:
10124 case BUILT_IN_FINITED64:
10125 case BUILT_IN_FINITED128:
10126 case BUILT_IN_FPCLASSIFY:
10127 case BUILT_IN_ISFINITE:
10128 case BUILT_IN_ISINF_SIGN:
10129 case BUILT_IN_ISINF:
10130 case BUILT_IN_ISINFF:
10131 case BUILT_IN_ISINFL:
10132 case BUILT_IN_ISINFD32:
10133 case BUILT_IN_ISINFD64:
10134 case BUILT_IN_ISINFD128:
10135 case BUILT_IN_ISNAN:
10136 case BUILT_IN_ISNANF:
10137 case BUILT_IN_ISNANL:
10138 case BUILT_IN_ISNAND32:
10139 case BUILT_IN_ISNAND64:
10140 case BUILT_IN_ISNAND128:
10141 case BUILT_IN_ISNORMAL:
10142 case BUILT_IN_ISGREATER:
10143 case BUILT_IN_ISGREATEREQUAL:
10144 case BUILT_IN_ISLESS:
10145 case BUILT_IN_ISLESSEQUAL:
10146 case BUILT_IN_ISLESSGREATER:
10147 case BUILT_IN_ISUNORDERED:
10148 case BUILT_IN_VA_ARG_PACK:
10149 case BUILT_IN_VA_ARG_PACK_LEN:
10150 case BUILT_IN_VA_COPY:
10151 case BUILT_IN_TRAP:
10152 case BUILT_IN_SAVEREGS:
10153 case BUILT_IN_POPCOUNTL:
10154 case BUILT_IN_POPCOUNTLL:
10155 case BUILT_IN_POPCOUNTIMAX:
10156 case BUILT_IN_POPCOUNT:
10157 case BUILT_IN_PARITYL:
10158 case BUILT_IN_PARITYLL:
10159 case BUILT_IN_PARITYIMAX:
10160 case BUILT_IN_PARITY:
10161 case BUILT_IN_LABS:
10162 case BUILT_IN_LLABS:
10163 case BUILT_IN_PREFETCH:
10164 case BUILT_IN_ACC_ON_DEVICE:
10165 return true;
10166
10167 default:
10168 return is_simple_builtin (decl);
10169 }
10170
10171 return false;
10172 }