re PR middle-end/77925 (Add __builtin_issubnormal)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
71
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
76
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
83 {
84 #include "builtins.def"
85 };
86
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
90
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
93
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memchr (tree, rtx);
122 static rtx expand_builtin_memcpy (tree, rtx);
123 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
124 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
125 static rtx expand_builtin_memmove (tree, rtx);
126 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
127 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
128 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
129 machine_mode, int, tree);
130 static rtx expand_builtin_strcat (tree, rtx);
131 static rtx expand_builtin_strcpy (tree, rtx);
132 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
133 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
134 static rtx expand_builtin_stpncpy (tree, rtx);
135 static rtx expand_builtin_strncat (tree, rtx);
136 static rtx expand_builtin_strncpy (tree, rtx);
137 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138 static rtx expand_builtin_memset (tree, rtx, machine_mode);
139 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
141 static rtx expand_builtin_bzero (tree);
142 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
143 static rtx expand_builtin_alloca (tree);
144 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
145 static rtx expand_builtin_frame_address (tree, tree);
146 static tree stabilize_va_list_loc (location_t, tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (location_t, tree, tree);
151 static tree fold_builtin_inf (location_t, tree, int);
152 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
153 static bool validate_arg (const_tree, enum tree_code code);
154 static rtx expand_builtin_fabs (tree, rtx, rtx);
155 static rtx expand_builtin_signbit (tree, rtx);
156 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
157 static tree fold_builtin_isascii (location_t, tree);
158 static tree fold_builtin_toascii (location_t, tree);
159 static tree fold_builtin_isdigit (location_t, tree);
160 static tree fold_builtin_fabs (location_t, tree, tree);
161 static tree fold_builtin_abs (location_t, tree, tree);
162 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
163 enum tree_code);
164 static tree fold_builtin_0 (location_t, tree);
165 static tree fold_builtin_1 (location_t, tree, tree);
166 static tree fold_builtin_2 (location_t, tree, tree, tree);
167 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
168
169 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
170 static tree fold_builtin_strspn (location_t, tree, tree);
171 static tree fold_builtin_strcspn (location_t, tree, tree);
172
173 static rtx expand_builtin_object_size (tree);
174 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
175 enum built_in_function);
176 static void maybe_emit_chk_warning (tree, enum built_in_function);
177 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_free_warning (tree);
179 static tree fold_builtin_object_size (tree, tree);
180
181 unsigned HOST_WIDE_INT target_newline;
182 unsigned HOST_WIDE_INT target_percent;
183 static unsigned HOST_WIDE_INT target_c;
184 static unsigned HOST_WIDE_INT target_s;
185 char target_percent_c[3];
186 char target_percent_s[3];
187 char target_percent_s_newline[4];
188 static tree do_mpfr_remquo (tree, tree, tree);
189 static tree do_mpfr_lgamma_r (tree, tree, tree);
190 static void expand_builtin_sync_synchronize (void);
191
192 /* Return true if NAME starts with __builtin_ or __sync_. */
193
194 static bool
195 is_builtin_name (const char *name)
196 {
197 if (strncmp (name, "__builtin_", 10) == 0)
198 return true;
199 if (strncmp (name, "__sync_", 7) == 0)
200 return true;
201 if (strncmp (name, "__atomic_", 9) == 0)
202 return true;
203 if (flag_cilkplus
204 && (!strcmp (name, "__cilkrts_detach")
205 || !strcmp (name, "__cilkrts_pop_frame")))
206 return true;
207 return false;
208 }
209
210
211 /* Return true if DECL is a function symbol representing a built-in. */
212
213 bool
214 is_builtin_fn (tree decl)
215 {
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
217 }
218
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
222
223 bool
224 called_as_built_in (tree node)
225 {
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
231 }
232
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
237
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
244
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
247
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
251 {
252 HOST_WIDE_INT bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
258
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
263
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
267 {
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
274 }
275 else if (TREE_CODE (exp) == LABEL_DECL)
276 ;
277 else if (TREE_CODE (exp) == CONST_DECL)
278 {
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
284
285 known_alignment = true;
286 }
287 else if (DECL_P (exp))
288 {
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
291 }
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
295 {
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
300
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
304 {
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = least_bit_hwi (ptr_bitmask);
308 addr = TREE_OPERAND (addr, 0);
309 }
310
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
314
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
317
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
321 {
322 if (TMR_INDEX (exp))
323 {
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
328 }
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
332 }
333
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 unsigned int talign;
340 if (!addr_p && !known_alignment
341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
344 else
345 {
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
350 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
351 }
352 }
353 else if (TREE_CODE (exp) == STRING_CST)
354 {
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
358 if (CONSTANT_CLASS_P (exp))
359 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
360
361 known_alignment = true;
362 }
363
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
366 if (offset)
367 {
368 unsigned int trailing_zeros = tree_ctz (offset);
369 if (trailing_zeros < HOST_BITS_PER_INT)
370 {
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
374 }
375 }
376
377 *alignp = align;
378 *bitposp = bitpos & (*alignp - 1);
379 return known_alignment;
380 }
381
382 /* For a memory reference expression EXP compute values M and N such that M
383 divides (&EXP - N) and such that N < M. If these numbers can be determined,
384 store M in alignp and N in *BITPOSP and return true. Otherwise return false
385 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386
387 bool
388 get_object_alignment_1 (tree exp, unsigned int *alignp,
389 unsigned HOST_WIDE_INT *bitposp)
390 {
391 return get_object_alignment_2 (exp, alignp, bitposp, false);
392 }
393
394 /* Return the alignment in bits of EXP, an object. */
395
396 unsigned int
397 get_object_alignment (tree exp)
398 {
399 unsigned HOST_WIDE_INT bitpos = 0;
400 unsigned int align;
401
402 get_object_alignment_1 (exp, &align, &bitpos);
403
404 /* align and bitpos now specify known low bits of the pointer.
405 ptr & (align - 1) == bitpos. */
406
407 if (bitpos != 0)
408 align = least_bit_hwi (bitpos);
409 return align;
410 }
411
412 /* For a pointer valued expression EXP compute values M and N such that M
413 divides (EXP - N) and such that N < M. If these numbers can be determined,
414 store M in alignp and N in *BITPOSP and return true. Return false if
415 the results are just a conservative approximation.
416
417 If EXP is not a pointer, false is returned too. */
418
419 bool
420 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
421 unsigned HOST_WIDE_INT *bitposp)
422 {
423 STRIP_NOPS (exp);
424
425 if (TREE_CODE (exp) == ADDR_EXPR)
426 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
427 alignp, bitposp, true);
428 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
429 {
430 unsigned int align;
431 unsigned HOST_WIDE_INT bitpos;
432 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
433 &align, &bitpos);
434 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
435 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
436 else
437 {
438 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
439 if (trailing_zeros < HOST_BITS_PER_INT)
440 {
441 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
442 if (inner)
443 align = MIN (align, inner);
444 }
445 }
446 *alignp = align;
447 *bitposp = bitpos & (align - 1);
448 return res;
449 }
450 else if (TREE_CODE (exp) == SSA_NAME
451 && POINTER_TYPE_P (TREE_TYPE (exp)))
452 {
453 unsigned int ptr_align, ptr_misalign;
454 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
455
456 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
457 {
458 *bitposp = ptr_misalign * BITS_PER_UNIT;
459 *alignp = ptr_align * BITS_PER_UNIT;
460 /* Make sure to return a sensible alignment when the multiplication
461 by BITS_PER_UNIT overflowed. */
462 if (*alignp == 0)
463 *alignp = 1u << (HOST_BITS_PER_INT - 1);
464 /* We cannot really tell whether this result is an approximation. */
465 return false;
466 }
467 else
468 {
469 *bitposp = 0;
470 *alignp = BITS_PER_UNIT;
471 return false;
472 }
473 }
474 else if (TREE_CODE (exp) == INTEGER_CST)
475 {
476 *alignp = BIGGEST_ALIGNMENT;
477 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
478 & (BIGGEST_ALIGNMENT - 1));
479 return true;
480 }
481
482 *bitposp = 0;
483 *alignp = BITS_PER_UNIT;
484 return false;
485 }
486
487 /* Return the alignment in bits of EXP, a pointer valued expression.
488 The alignment returned is, by default, the alignment of the thing that
489 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
490
491 Otherwise, look at the expression to see if we can do better, i.e., if the
492 expression is actually pointing at an object whose alignment is tighter. */
493
494 unsigned int
495 get_pointer_alignment (tree exp)
496 {
497 unsigned HOST_WIDE_INT bitpos = 0;
498 unsigned int align;
499
500 get_pointer_alignment_1 (exp, &align, &bitpos);
501
502 /* align and bitpos now specify known low bits of the pointer.
503 ptr & (align - 1) == bitpos. */
504
505 if (bitpos != 0)
506 align = least_bit_hwi (bitpos);
507
508 return align;
509 }
510
511 /* Return the number of non-zero elements in the sequence
512 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
513 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
514
515 static unsigned
516 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
517 {
518 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
519
520 unsigned n;
521
522 if (eltsize == 1)
523 {
524 /* Optimize the common case of plain char. */
525 for (n = 0; n < maxelts; n++)
526 {
527 const char *elt = (const char*) ptr + n;
528 if (!*elt)
529 break;
530 }
531 }
532 else
533 {
534 for (n = 0; n < maxelts; n++)
535 {
536 const char *elt = (const char*) ptr + n * eltsize;
537 if (!memcmp (elt, "\0\0\0\0", eltsize))
538 break;
539 }
540 }
541 return n;
542 }
543
544 /* Compute the length of a null-terminated character string or wide
545 character string handling character sizes of 1, 2, and 4 bytes.
546 TREE_STRING_LENGTH is not the right way because it evaluates to
547 the size of the character array in bytes (as opposed to characters)
548 and because it can contain a zero byte in the middle.
549
550 ONLY_VALUE should be nonzero if the result is not going to be emitted
551 into the instruction stream and zero if it is going to be expanded.
552 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
553 is returned, otherwise NULL, since
554 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
555 evaluate the side-effects.
556
557 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
558 accesses. Note that this implies the result is not going to be emitted
559 into the instruction stream.
560
561 The value returned is of type `ssizetype'.
562
563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
565
566 tree
567 c_strlen (tree src, int only_value)
568 {
569 STRIP_NOPS (src);
570 if (TREE_CODE (src) == COND_EXPR
571 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
572 {
573 tree len1, len2;
574
575 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
576 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
577 if (tree_int_cst_equal (len1, len2))
578 return len1;
579 }
580
581 if (TREE_CODE (src) == COMPOUND_EXPR
582 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
583 return c_strlen (TREE_OPERAND (src, 1), only_value);
584
585 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
586
587 /* Offset from the beginning of the string in bytes. */
588 tree byteoff;
589 src = string_constant (src, &byteoff);
590 if (src == 0)
591 return NULL_TREE;
592
593 /* Determine the size of the string element. */
594 unsigned eltsize
595 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
596
597 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
598 length of SRC. */
599 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
600
601 /* PTR can point to the byte representation of any string type, including
602 char* and wchar_t*. */
603 const char *ptr = TREE_STRING_POINTER (src);
604
605 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
606 {
607 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
608 compute the offset to the following null if we don't know where to
609 start searching for it. */
610 if (string_length (ptr, eltsize, maxelts) < maxelts)
611 {
612 /* Return when an embedded null character is found. */
613 return NULL_TREE;
614 }
615
616 /* We don't know the starting offset, but we do know that the string
617 has no internal zero bytes. We can assume that the offset falls
618 within the bounds of the string; otherwise, the programmer deserves
619 what he gets. Subtract the offset from the length of the string,
620 and return that. This would perhaps not be valid if we were dealing
621 with named arrays in addition to literal string constants. */
622
623 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
624 }
625
626 /* Offset from the beginning of the string in elements. */
627 HOST_WIDE_INT eltoff;
628
629 /* We have a known offset into the string. Start searching there for
630 a null character if we can represent it as a single HOST_WIDE_INT. */
631 if (byteoff == 0)
632 eltoff = 0;
633 else if (! tree_fits_shwi_p (byteoff))
634 eltoff = -1;
635 else
636 eltoff = tree_to_shwi (byteoff) / eltsize;
637
638 /* If the offset is known to be out of bounds, warn, and call strlen at
639 runtime. */
640 if (eltoff < 0 || eltoff > maxelts)
641 {
642 /* Suppress multiple warnings for propagated constant strings. */
643 if (only_value != 2
644 && !TREE_NO_WARNING (src))
645 {
646 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
647 eltoff);
648 TREE_NO_WARNING (src) = 1;
649 }
650 return NULL_TREE;
651 }
652
653 /* Use strlen to search for the first zero byte. Since any strings
654 constructed with build_string will have nulls appended, we win even
655 if we get handed something like (char[4])"abcd".
656
657 Since ELTOFF is our starting index into the string, no further
658 calculation is needed. */
659 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
660 maxelts - eltoff);
661
662 return ssize_int (len);
663 }
664
665 /* Return a constant integer corresponding to target reading
666 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
667
668 static rtx
669 c_readstr (const char *str, machine_mode mode)
670 {
671 HOST_WIDE_INT ch;
672 unsigned int i, j;
673 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
674
675 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
676 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
677 / HOST_BITS_PER_WIDE_INT;
678
679 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
680 for (i = 0; i < len; i++)
681 tmp[i] = 0;
682
683 ch = 1;
684 for (i = 0; i < GET_MODE_SIZE (mode); i++)
685 {
686 j = i;
687 if (WORDS_BIG_ENDIAN)
688 j = GET_MODE_SIZE (mode) - i - 1;
689 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
690 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
691 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
692 j *= BITS_PER_UNIT;
693
694 if (ch)
695 ch = (unsigned char) str[i];
696 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
697 }
698
699 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
700 return immed_wide_int_const (c, mode);
701 }
702
703 /* Cast a target constant CST to target CHAR and if that value fits into
704 host char type, return zero and put that value into variable pointed to by
705 P. */
706
707 static int
708 target_char_cast (tree cst, char *p)
709 {
710 unsigned HOST_WIDE_INT val, hostval;
711
712 if (TREE_CODE (cst) != INTEGER_CST
713 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
714 return 1;
715
716 /* Do not care if it fits or not right here. */
717 val = TREE_INT_CST_LOW (cst);
718
719 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
720 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
721
722 hostval = val;
723 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
724 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
725
726 if (val != hostval)
727 return 1;
728
729 *p = hostval;
730 return 0;
731 }
732
733 /* Similar to save_expr, but assumes that arbitrary code is not executed
734 in between the multiple evaluations. In particular, we assume that a
735 non-addressable local variable will not be modified. */
736
737 static tree
738 builtin_save_expr (tree exp)
739 {
740 if (TREE_CODE (exp) == SSA_NAME
741 || (TREE_ADDRESSABLE (exp) == 0
742 && (TREE_CODE (exp) == PARM_DECL
743 || (VAR_P (exp) && !TREE_STATIC (exp)))))
744 return exp;
745
746 return save_expr (exp);
747 }
748
749 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
750 times to get the address of either a higher stack frame, or a return
751 address located within it (depending on FNDECL_CODE). */
752
753 static rtx
754 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
755 {
756 int i;
757 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
758 if (tem == NULL_RTX)
759 {
760 /* For a zero count with __builtin_return_address, we don't care what
761 frame address we return, because target-specific definitions will
762 override us. Therefore frame pointer elimination is OK, and using
763 the soft frame pointer is OK.
764
765 For a nonzero count, or a zero count with __builtin_frame_address,
766 we require a stable offset from the current frame pointer to the
767 previous one, so we must use the hard frame pointer, and
768 we must disable frame pointer elimination. */
769 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
770 tem = frame_pointer_rtx;
771 else
772 {
773 tem = hard_frame_pointer_rtx;
774
775 /* Tell reload not to eliminate the frame pointer. */
776 crtl->accesses_prior_frames = 1;
777 }
778 }
779
780 if (count > 0)
781 SETUP_FRAME_ADDRESSES ();
782
783 /* On the SPARC, the return address is not in the frame, it is in a
784 register. There is no way to access it off of the current frame
785 pointer, but it can be accessed off the previous frame pointer by
786 reading the value from the register window save area. */
787 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
788 count--;
789
790 /* Scan back COUNT frames to the specified frame. */
791 for (i = 0; i < count; i++)
792 {
793 /* Assume the dynamic chain pointer is in the word that the
794 frame address points to, unless otherwise specified. */
795 tem = DYNAMIC_CHAIN_ADDRESS (tem);
796 tem = memory_address (Pmode, tem);
797 tem = gen_frame_mem (Pmode, tem);
798 tem = copy_to_reg (tem);
799 }
800
801 /* For __builtin_frame_address, return what we've got. But, on
802 the SPARC for example, we may have to add a bias. */
803 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
804 return FRAME_ADDR_RTX (tem);
805
806 /* For __builtin_return_address, get the return address from that frame. */
807 #ifdef RETURN_ADDR_RTX
808 tem = RETURN_ADDR_RTX (count, tem);
809 #else
810 tem = memory_address (Pmode,
811 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
812 tem = gen_frame_mem (Pmode, tem);
813 #endif
814 return tem;
815 }
816
817 /* Alias set used for setjmp buffer. */
818 static alias_set_type setjmp_alias_set = -1;
819
820 /* Construct the leading half of a __builtin_setjmp call. Control will
821 return to RECEIVER_LABEL. This is also called directly by the SJLJ
822 exception handling code. */
823
824 void
825 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
826 {
827 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
828 rtx stack_save;
829 rtx mem;
830
831 if (setjmp_alias_set == -1)
832 setjmp_alias_set = new_alias_set ();
833
834 buf_addr = convert_memory_address (Pmode, buf_addr);
835
836 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
837
838 /* We store the frame pointer and the address of receiver_label in
839 the buffer and use the rest of it for the stack save area, which
840 is machine-dependent. */
841
842 mem = gen_rtx_MEM (Pmode, buf_addr);
843 set_mem_alias_set (mem, setjmp_alias_set);
844 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
845
846 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
847 GET_MODE_SIZE (Pmode))),
848 set_mem_alias_set (mem, setjmp_alias_set);
849
850 emit_move_insn (validize_mem (mem),
851 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
852
853 stack_save = gen_rtx_MEM (sa_mode,
854 plus_constant (Pmode, buf_addr,
855 2 * GET_MODE_SIZE (Pmode)));
856 set_mem_alias_set (stack_save, setjmp_alias_set);
857 emit_stack_save (SAVE_NONLOCAL, &stack_save);
858
859 /* If there is further processing to do, do it. */
860 if (targetm.have_builtin_setjmp_setup ())
861 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
862
863 /* We have a nonlocal label. */
864 cfun->has_nonlocal_label = 1;
865 }
866
867 /* Construct the trailing part of a __builtin_setjmp call. This is
868 also called directly by the SJLJ exception handling code.
869 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
870
871 void
872 expand_builtin_setjmp_receiver (rtx receiver_label)
873 {
874 rtx chain;
875
876 /* Mark the FP as used when we get here, so we have to make sure it's
877 marked as used by this function. */
878 emit_use (hard_frame_pointer_rtx);
879
880 /* Mark the static chain as clobbered here so life information
881 doesn't get messed up for it. */
882 chain = targetm.calls.static_chain (current_function_decl, true);
883 if (chain && REG_P (chain))
884 emit_clobber (chain);
885
886 /* Now put in the code to restore the frame pointer, and argument
887 pointer, if needed. */
888 if (! targetm.have_nonlocal_goto ())
889 {
890 /* First adjust our frame pointer to its actual value. It was
891 previously set to the start of the virtual area corresponding to
892 the stacked variables when we branched here and now needs to be
893 adjusted to the actual hardware fp value.
894
895 Assignments to virtual registers are converted by
896 instantiate_virtual_regs into the corresponding assignment
897 to the underlying register (fp in this case) that makes
898 the original assignment true.
899 So the following insn will actually be decrementing fp by
900 STARTING_FRAME_OFFSET. */
901 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
902
903 /* Restoring the frame pointer also modifies the hard frame pointer.
904 Mark it used (so that the previous assignment remains live once
905 the frame pointer is eliminated) and clobbered (to represent the
906 implicit update from the assignment). */
907 emit_use (hard_frame_pointer_rtx);
908 emit_clobber (hard_frame_pointer_rtx);
909 }
910
911 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
912 {
913 /* If the argument pointer can be eliminated in favor of the
914 frame pointer, we don't need to restore it. We assume here
915 that if such an elimination is present, it can always be used.
916 This is the case on all known machines; if we don't make this
917 assumption, we do unnecessary saving on many machines. */
918 size_t i;
919 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
920
921 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
922 if (elim_regs[i].from == ARG_POINTER_REGNUM
923 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
924 break;
925
926 if (i == ARRAY_SIZE (elim_regs))
927 {
928 /* Now restore our arg pointer from the address at which it
929 was saved in our stack frame. */
930 emit_move_insn (crtl->args.internal_arg_pointer,
931 copy_to_reg (get_arg_pointer_save_area ()));
932 }
933 }
934
935 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
936 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
937 else if (targetm.have_nonlocal_goto_receiver ())
938 emit_insn (targetm.gen_nonlocal_goto_receiver ());
939 else
940 { /* Nothing */ }
941
942 /* We must not allow the code we just generated to be reordered by
943 scheduling. Specifically, the update of the frame pointer must
944 happen immediately, not later. */
945 emit_insn (gen_blockage ());
946 }
947
948 /* __builtin_longjmp is passed a pointer to an array of five words (not
949 all will be used on all machines). It operates similarly to the C
950 library function of the same name, but is more efficient. Much of
951 the code below is copied from the handling of non-local gotos. */
952
953 static void
954 expand_builtin_longjmp (rtx buf_addr, rtx value)
955 {
956 rtx fp, lab, stack;
957 rtx_insn *insn, *last;
958 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
959
960 /* DRAP is needed for stack realign if longjmp is expanded to current
961 function */
962 if (SUPPORTS_STACK_ALIGNMENT)
963 crtl->need_drap = true;
964
965 if (setjmp_alias_set == -1)
966 setjmp_alias_set = new_alias_set ();
967
968 buf_addr = convert_memory_address (Pmode, buf_addr);
969
970 buf_addr = force_reg (Pmode, buf_addr);
971
972 /* We require that the user must pass a second argument of 1, because
973 that is what builtin_setjmp will return. */
974 gcc_assert (value == const1_rtx);
975
976 last = get_last_insn ();
977 if (targetm.have_builtin_longjmp ())
978 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
979 else
980 {
981 fp = gen_rtx_MEM (Pmode, buf_addr);
982 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
983 GET_MODE_SIZE (Pmode)));
984
985 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
986 2 * GET_MODE_SIZE (Pmode)));
987 set_mem_alias_set (fp, setjmp_alias_set);
988 set_mem_alias_set (lab, setjmp_alias_set);
989 set_mem_alias_set (stack, setjmp_alias_set);
990
991 /* Pick up FP, label, and SP from the block and jump. This code is
992 from expand_goto in stmt.c; see there for detailed comments. */
993 if (targetm.have_nonlocal_goto ())
994 /* We have to pass a value to the nonlocal_goto pattern that will
995 get copied into the static_chain pointer, but it does not matter
996 what that value is, because builtin_setjmp does not use it. */
997 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
998 else
999 {
1000 lab = copy_to_reg (lab);
1001
1002 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1003 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1004
1005 emit_move_insn (hard_frame_pointer_rtx, fp);
1006 emit_stack_restore (SAVE_NONLOCAL, stack);
1007
1008 emit_use (hard_frame_pointer_rtx);
1009 emit_use (stack_pointer_rtx);
1010 emit_indirect_jump (lab);
1011 }
1012 }
1013
1014 /* Search backwards and mark the jump insn as a non-local goto.
1015 Note that this precludes the use of __builtin_longjmp to a
1016 __builtin_setjmp target in the same function. However, we've
1017 already cautioned the user that these functions are for
1018 internal exception handling use only. */
1019 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1020 {
1021 gcc_assert (insn != last);
1022
1023 if (JUMP_P (insn))
1024 {
1025 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1026 break;
1027 }
1028 else if (CALL_P (insn))
1029 break;
1030 }
1031 }
1032
1033 static inline bool
1034 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1035 {
1036 return (iter->i < iter->n);
1037 }
1038
1039 /* This function validates the types of a function call argument list
1040 against a specified list of tree_codes. If the last specifier is a 0,
1041 that represents an ellipsis, otherwise the last specifier must be a
1042 VOID_TYPE. */
1043
1044 static bool
1045 validate_arglist (const_tree callexpr, ...)
1046 {
1047 enum tree_code code;
1048 bool res = 0;
1049 va_list ap;
1050 const_call_expr_arg_iterator iter;
1051 const_tree arg;
1052
1053 va_start (ap, callexpr);
1054 init_const_call_expr_arg_iterator (callexpr, &iter);
1055
1056 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1057 tree fn = CALL_EXPR_FN (callexpr);
1058 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1059
1060 for (unsigned argno = 1; ; ++argno)
1061 {
1062 code = (enum tree_code) va_arg (ap, int);
1063
1064 switch (code)
1065 {
1066 case 0:
1067 /* This signifies an ellipses, any further arguments are all ok. */
1068 res = true;
1069 goto end;
1070 case VOID_TYPE:
1071 /* This signifies an endlink, if no arguments remain, return
1072 true, otherwise return false. */
1073 res = !more_const_call_expr_args_p (&iter);
1074 goto end;
1075 case POINTER_TYPE:
1076 /* The actual argument must be nonnull when either the whole
1077 called function has been declared nonnull, or when the formal
1078 argument corresponding to the actual argument has been. */
1079 if (argmap
1080 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1081 {
1082 arg = next_const_call_expr_arg (&iter);
1083 if (!validate_arg (arg, code) || integer_zerop (arg))
1084 goto end;
1085 break;
1086 }
1087 /* FALLTHRU */
1088 default:
1089 /* If no parameters remain or the parameter's code does not
1090 match the specified code, return false. Otherwise continue
1091 checking any remaining arguments. */
1092 arg = next_const_call_expr_arg (&iter);
1093 if (!validate_arg (arg, code))
1094 goto end;
1095 break;
1096 }
1097 }
1098
1099 /* We need gotos here since we can only have one VA_CLOSE in a
1100 function. */
1101 end: ;
1102 va_end (ap);
1103
1104 BITMAP_FREE (argmap);
1105
1106 return res;
1107 }
1108
1109 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1110 and the address of the save area. */
1111
1112 static rtx
1113 expand_builtin_nonlocal_goto (tree exp)
1114 {
1115 tree t_label, t_save_area;
1116 rtx r_label, r_save_area, r_fp, r_sp;
1117 rtx_insn *insn;
1118
1119 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1120 return NULL_RTX;
1121
1122 t_label = CALL_EXPR_ARG (exp, 0);
1123 t_save_area = CALL_EXPR_ARG (exp, 1);
1124
1125 r_label = expand_normal (t_label);
1126 r_label = convert_memory_address (Pmode, r_label);
1127 r_save_area = expand_normal (t_save_area);
1128 r_save_area = convert_memory_address (Pmode, r_save_area);
1129 /* Copy the address of the save location to a register just in case it was
1130 based on the frame pointer. */
1131 r_save_area = copy_to_reg (r_save_area);
1132 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1133 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1134 plus_constant (Pmode, r_save_area,
1135 GET_MODE_SIZE (Pmode)));
1136
1137 crtl->has_nonlocal_goto = 1;
1138
1139 /* ??? We no longer need to pass the static chain value, afaik. */
1140 if (targetm.have_nonlocal_goto ())
1141 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1142 else
1143 {
1144 r_label = copy_to_reg (r_label);
1145
1146 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1147 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1148
1149 /* Restore frame pointer for containing function. */
1150 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1151 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1152
1153 /* USE of hard_frame_pointer_rtx added for consistency;
1154 not clear if really needed. */
1155 emit_use (hard_frame_pointer_rtx);
1156 emit_use (stack_pointer_rtx);
1157
1158 /* If the architecture is using a GP register, we must
1159 conservatively assume that the target function makes use of it.
1160 The prologue of functions with nonlocal gotos must therefore
1161 initialize the GP register to the appropriate value, and we
1162 must then make sure that this value is live at the point
1163 of the jump. (Note that this doesn't necessarily apply
1164 to targets with a nonlocal_goto pattern; they are free
1165 to implement it in their own way. Note also that this is
1166 a no-op if the GP register is a global invariant.) */
1167 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1168 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1169 emit_use (pic_offset_table_rtx);
1170
1171 emit_indirect_jump (r_label);
1172 }
1173
1174 /* Search backwards to the jump insn and mark it as a
1175 non-local goto. */
1176 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1177 {
1178 if (JUMP_P (insn))
1179 {
1180 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1181 break;
1182 }
1183 else if (CALL_P (insn))
1184 break;
1185 }
1186
1187 return const0_rtx;
1188 }
1189
1190 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1191 (not all will be used on all machines) that was passed to __builtin_setjmp.
1192 It updates the stack pointer in that block to the current value. This is
1193 also called directly by the SJLJ exception handling code. */
1194
1195 void
1196 expand_builtin_update_setjmp_buf (rtx buf_addr)
1197 {
1198 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1199 rtx stack_save
1200 = gen_rtx_MEM (sa_mode,
1201 memory_address
1202 (sa_mode,
1203 plus_constant (Pmode, buf_addr,
1204 2 * GET_MODE_SIZE (Pmode))));
1205
1206 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1207 }
1208
1209 /* Expand a call to __builtin_prefetch. For a target that does not support
1210 data prefetch, evaluate the memory address argument in case it has side
1211 effects. */
1212
1213 static void
1214 expand_builtin_prefetch (tree exp)
1215 {
1216 tree arg0, arg1, arg2;
1217 int nargs;
1218 rtx op0, op1, op2;
1219
1220 if (!validate_arglist (exp, POINTER_TYPE, 0))
1221 return;
1222
1223 arg0 = CALL_EXPR_ARG (exp, 0);
1224
1225 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1226 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1227 locality). */
1228 nargs = call_expr_nargs (exp);
1229 if (nargs > 1)
1230 arg1 = CALL_EXPR_ARG (exp, 1);
1231 else
1232 arg1 = integer_zero_node;
1233 if (nargs > 2)
1234 arg2 = CALL_EXPR_ARG (exp, 2);
1235 else
1236 arg2 = integer_three_node;
1237
1238 /* Argument 0 is an address. */
1239 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1240
1241 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1242 if (TREE_CODE (arg1) != INTEGER_CST)
1243 {
1244 error ("second argument to %<__builtin_prefetch%> must be a constant");
1245 arg1 = integer_zero_node;
1246 }
1247 op1 = expand_normal (arg1);
1248 /* Argument 1 must be either zero or one. */
1249 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1250 {
1251 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1252 " using zero");
1253 op1 = const0_rtx;
1254 }
1255
1256 /* Argument 2 (locality) must be a compile-time constant int. */
1257 if (TREE_CODE (arg2) != INTEGER_CST)
1258 {
1259 error ("third argument to %<__builtin_prefetch%> must be a constant");
1260 arg2 = integer_zero_node;
1261 }
1262 op2 = expand_normal (arg2);
1263 /* Argument 2 must be 0, 1, 2, or 3. */
1264 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1265 {
1266 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1267 op2 = const0_rtx;
1268 }
1269
1270 if (targetm.have_prefetch ())
1271 {
1272 struct expand_operand ops[3];
1273
1274 create_address_operand (&ops[0], op0);
1275 create_integer_operand (&ops[1], INTVAL (op1));
1276 create_integer_operand (&ops[2], INTVAL (op2));
1277 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1278 return;
1279 }
1280
1281 /* Don't do anything with direct references to volatile memory, but
1282 generate code to handle other side effects. */
1283 if (!MEM_P (op0) && side_effects_p (op0))
1284 emit_insn (op0);
1285 }
1286
1287 /* Get a MEM rtx for expression EXP which is the address of an operand
1288 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1289 the maximum length of the block of memory that might be accessed or
1290 NULL if unknown. */
1291
1292 static rtx
1293 get_memory_rtx (tree exp, tree len)
1294 {
1295 tree orig_exp = exp;
1296 rtx addr, mem;
1297
1298 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1299 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1300 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1301 exp = TREE_OPERAND (exp, 0);
1302
1303 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1304 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1305
1306 /* Get an expression we can use to find the attributes to assign to MEM.
1307 First remove any nops. */
1308 while (CONVERT_EXPR_P (exp)
1309 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1310 exp = TREE_OPERAND (exp, 0);
1311
1312 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1313 (as builtin stringops may alias with anything). */
1314 exp = fold_build2 (MEM_REF,
1315 build_array_type (char_type_node,
1316 build_range_type (sizetype,
1317 size_one_node, len)),
1318 exp, build_int_cst (ptr_type_node, 0));
1319
1320 /* If the MEM_REF has no acceptable address, try to get the base object
1321 from the original address we got, and build an all-aliasing
1322 unknown-sized access to that one. */
1323 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1324 set_mem_attributes (mem, exp, 0);
1325 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1326 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1327 0))))
1328 {
1329 exp = build_fold_addr_expr (exp);
1330 exp = fold_build2 (MEM_REF,
1331 build_array_type (char_type_node,
1332 build_range_type (sizetype,
1333 size_zero_node,
1334 NULL)),
1335 exp, build_int_cst (ptr_type_node, 0));
1336 set_mem_attributes (mem, exp, 0);
1337 }
1338 set_mem_alias_set (mem, 0);
1339 return mem;
1340 }
1341 \f
1342 /* Built-in functions to perform an untyped call and return. */
1343
1344 #define apply_args_mode \
1345 (this_target_builtins->x_apply_args_mode)
1346 #define apply_result_mode \
1347 (this_target_builtins->x_apply_result_mode)
1348
1349 /* Return the size required for the block returned by __builtin_apply_args,
1350 and initialize apply_args_mode. */
1351
1352 static int
1353 apply_args_size (void)
1354 {
1355 static int size = -1;
1356 int align;
1357 unsigned int regno;
1358 machine_mode mode;
1359
1360 /* The values computed by this function never change. */
1361 if (size < 0)
1362 {
1363 /* The first value is the incoming arg-pointer. */
1364 size = GET_MODE_SIZE (Pmode);
1365
1366 /* The second value is the structure value address unless this is
1367 passed as an "invisible" first argument. */
1368 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1369 size += GET_MODE_SIZE (Pmode);
1370
1371 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1372 if (FUNCTION_ARG_REGNO_P (regno))
1373 {
1374 mode = targetm.calls.get_raw_arg_mode (regno);
1375
1376 gcc_assert (mode != VOIDmode);
1377
1378 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1379 if (size % align != 0)
1380 size = CEIL (size, align) * align;
1381 size += GET_MODE_SIZE (mode);
1382 apply_args_mode[regno] = mode;
1383 }
1384 else
1385 {
1386 apply_args_mode[regno] = VOIDmode;
1387 }
1388 }
1389 return size;
1390 }
1391
1392 /* Return the size required for the block returned by __builtin_apply,
1393 and initialize apply_result_mode. */
1394
1395 static int
1396 apply_result_size (void)
1397 {
1398 static int size = -1;
1399 int align, regno;
1400 machine_mode mode;
1401
1402 /* The values computed by this function never change. */
1403 if (size < 0)
1404 {
1405 size = 0;
1406
1407 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1408 if (targetm.calls.function_value_regno_p (regno))
1409 {
1410 mode = targetm.calls.get_raw_result_mode (regno);
1411
1412 gcc_assert (mode != VOIDmode);
1413
1414 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1415 if (size % align != 0)
1416 size = CEIL (size, align) * align;
1417 size += GET_MODE_SIZE (mode);
1418 apply_result_mode[regno] = mode;
1419 }
1420 else
1421 apply_result_mode[regno] = VOIDmode;
1422
1423 /* Allow targets that use untyped_call and untyped_return to override
1424 the size so that machine-specific information can be stored here. */
1425 #ifdef APPLY_RESULT_SIZE
1426 size = APPLY_RESULT_SIZE;
1427 #endif
1428 }
1429 return size;
1430 }
1431
1432 /* Create a vector describing the result block RESULT. If SAVEP is true,
1433 the result block is used to save the values; otherwise it is used to
1434 restore the values. */
1435
1436 static rtx
1437 result_vector (int savep, rtx result)
1438 {
1439 int regno, size, align, nelts;
1440 machine_mode mode;
1441 rtx reg, mem;
1442 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1443
1444 size = nelts = 0;
1445 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1446 if ((mode = apply_result_mode[regno]) != VOIDmode)
1447 {
1448 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1449 if (size % align != 0)
1450 size = CEIL (size, align) * align;
1451 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1452 mem = adjust_address (result, mode, size);
1453 savevec[nelts++] = (savep
1454 ? gen_rtx_SET (mem, reg)
1455 : gen_rtx_SET (reg, mem));
1456 size += GET_MODE_SIZE (mode);
1457 }
1458 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1459 }
1460
1461 /* Save the state required to perform an untyped call with the same
1462 arguments as were passed to the current function. */
1463
1464 static rtx
1465 expand_builtin_apply_args_1 (void)
1466 {
1467 rtx registers, tem;
1468 int size, align, regno;
1469 machine_mode mode;
1470 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1471
1472 /* Create a block where the arg-pointer, structure value address,
1473 and argument registers can be saved. */
1474 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1475
1476 /* Walk past the arg-pointer and structure value address. */
1477 size = GET_MODE_SIZE (Pmode);
1478 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1479 size += GET_MODE_SIZE (Pmode);
1480
1481 /* Save each register used in calling a function to the block. */
1482 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1483 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 {
1485 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1486 if (size % align != 0)
1487 size = CEIL (size, align) * align;
1488
1489 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1490
1491 emit_move_insn (adjust_address (registers, mode, size), tem);
1492 size += GET_MODE_SIZE (mode);
1493 }
1494
1495 /* Save the arg pointer to the block. */
1496 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1497 /* We need the pointer as the caller actually passed them to us, not
1498 as we might have pretended they were passed. Make sure it's a valid
1499 operand, as emit_move_insn isn't expected to handle a PLUS. */
1500 if (STACK_GROWS_DOWNWARD)
1501 tem
1502 = force_operand (plus_constant (Pmode, tem,
1503 crtl->args.pretend_args_size),
1504 NULL_RTX);
1505 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1506
1507 size = GET_MODE_SIZE (Pmode);
1508
1509 /* Save the structure value address unless this is passed as an
1510 "invisible" first argument. */
1511 if (struct_incoming_value)
1512 {
1513 emit_move_insn (adjust_address (registers, Pmode, size),
1514 copy_to_reg (struct_incoming_value));
1515 size += GET_MODE_SIZE (Pmode);
1516 }
1517
1518 /* Return the address of the block. */
1519 return copy_addr_to_reg (XEXP (registers, 0));
1520 }
1521
1522 /* __builtin_apply_args returns block of memory allocated on
1523 the stack into which is stored the arg pointer, structure
1524 value address, static chain, and all the registers that might
1525 possibly be used in performing a function call. The code is
1526 moved to the start of the function so the incoming values are
1527 saved. */
1528
1529 static rtx
1530 expand_builtin_apply_args (void)
1531 {
1532 /* Don't do __builtin_apply_args more than once in a function.
1533 Save the result of the first call and reuse it. */
1534 if (apply_args_value != 0)
1535 return apply_args_value;
1536 {
1537 /* When this function is called, it means that registers must be
1538 saved on entry to this function. So we migrate the
1539 call to the first insn of this function. */
1540 rtx temp;
1541
1542 start_sequence ();
1543 temp = expand_builtin_apply_args_1 ();
1544 rtx_insn *seq = get_insns ();
1545 end_sequence ();
1546
1547 apply_args_value = temp;
1548
1549 /* Put the insns after the NOTE that starts the function.
1550 If this is inside a start_sequence, make the outer-level insn
1551 chain current, so the code is placed at the start of the
1552 function. If internal_arg_pointer is a non-virtual pseudo,
1553 it needs to be placed after the function that initializes
1554 that pseudo. */
1555 push_topmost_sequence ();
1556 if (REG_P (crtl->args.internal_arg_pointer)
1557 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1558 emit_insn_before (seq, parm_birth_insn);
1559 else
1560 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1561 pop_topmost_sequence ();
1562 return temp;
1563 }
1564 }
1565
1566 /* Perform an untyped call and save the state required to perform an
1567 untyped return of whatever value was returned by the given function. */
1568
1569 static rtx
1570 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1571 {
1572 int size, align, regno;
1573 machine_mode mode;
1574 rtx incoming_args, result, reg, dest, src;
1575 rtx_call_insn *call_insn;
1576 rtx old_stack_level = 0;
1577 rtx call_fusage = 0;
1578 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1579
1580 arguments = convert_memory_address (Pmode, arguments);
1581
1582 /* Create a block where the return registers can be saved. */
1583 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1584
1585 /* Fetch the arg pointer from the ARGUMENTS block. */
1586 incoming_args = gen_reg_rtx (Pmode);
1587 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1588 if (!STACK_GROWS_DOWNWARD)
1589 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1590 incoming_args, 0, OPTAB_LIB_WIDEN);
1591
1592 /* Push a new argument block and copy the arguments. Do not allow
1593 the (potential) memcpy call below to interfere with our stack
1594 manipulations. */
1595 do_pending_stack_adjust ();
1596 NO_DEFER_POP;
1597
1598 /* Save the stack with nonlocal if available. */
1599 if (targetm.have_save_stack_nonlocal ())
1600 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1601 else
1602 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1603
1604 /* Allocate a block of memory onto the stack and copy the memory
1605 arguments to the outgoing arguments address. We can pass TRUE
1606 as the 4th argument because we just saved the stack pointer
1607 and will restore it right after the call. */
1608 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1609
1610 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1611 may have already set current_function_calls_alloca to true.
1612 current_function_calls_alloca won't be set if argsize is zero,
1613 so we have to guarantee need_drap is true here. */
1614 if (SUPPORTS_STACK_ALIGNMENT)
1615 crtl->need_drap = true;
1616
1617 dest = virtual_outgoing_args_rtx;
1618 if (!STACK_GROWS_DOWNWARD)
1619 {
1620 if (CONST_INT_P (argsize))
1621 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1622 else
1623 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1624 }
1625 dest = gen_rtx_MEM (BLKmode, dest);
1626 set_mem_align (dest, PARM_BOUNDARY);
1627 src = gen_rtx_MEM (BLKmode, incoming_args);
1628 set_mem_align (src, PARM_BOUNDARY);
1629 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1630
1631 /* Refer to the argument block. */
1632 apply_args_size ();
1633 arguments = gen_rtx_MEM (BLKmode, arguments);
1634 set_mem_align (arguments, PARM_BOUNDARY);
1635
1636 /* Walk past the arg-pointer and structure value address. */
1637 size = GET_MODE_SIZE (Pmode);
1638 if (struct_value)
1639 size += GET_MODE_SIZE (Pmode);
1640
1641 /* Restore each of the registers previously saved. Make USE insns
1642 for each of these registers for use in making the call. */
1643 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1644 if ((mode = apply_args_mode[regno]) != VOIDmode)
1645 {
1646 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1647 if (size % align != 0)
1648 size = CEIL (size, align) * align;
1649 reg = gen_rtx_REG (mode, regno);
1650 emit_move_insn (reg, adjust_address (arguments, mode, size));
1651 use_reg (&call_fusage, reg);
1652 size += GET_MODE_SIZE (mode);
1653 }
1654
1655 /* Restore the structure value address unless this is passed as an
1656 "invisible" first argument. */
1657 size = GET_MODE_SIZE (Pmode);
1658 if (struct_value)
1659 {
1660 rtx value = gen_reg_rtx (Pmode);
1661 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1662 emit_move_insn (struct_value, value);
1663 if (REG_P (struct_value))
1664 use_reg (&call_fusage, struct_value);
1665 size += GET_MODE_SIZE (Pmode);
1666 }
1667
1668 /* All arguments and registers used for the call are set up by now! */
1669 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1670
1671 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1672 and we don't want to load it into a register as an optimization,
1673 because prepare_call_address already did it if it should be done. */
1674 if (GET_CODE (function) != SYMBOL_REF)
1675 function = memory_address (FUNCTION_MODE, function);
1676
1677 /* Generate the actual call instruction and save the return value. */
1678 if (targetm.have_untyped_call ())
1679 {
1680 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1681 emit_call_insn (targetm.gen_untyped_call (mem, result,
1682 result_vector (1, result)));
1683 }
1684 else if (targetm.have_call_value ())
1685 {
1686 rtx valreg = 0;
1687
1688 /* Locate the unique return register. It is not possible to
1689 express a call that sets more than one return register using
1690 call_value; use untyped_call for that. In fact, untyped_call
1691 only needs to save the return registers in the given block. */
1692 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1693 if ((mode = apply_result_mode[regno]) != VOIDmode)
1694 {
1695 gcc_assert (!valreg); /* have_untyped_call required. */
1696
1697 valreg = gen_rtx_REG (mode, regno);
1698 }
1699
1700 emit_insn (targetm.gen_call_value (valreg,
1701 gen_rtx_MEM (FUNCTION_MODE, function),
1702 const0_rtx, NULL_RTX, const0_rtx));
1703
1704 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1705 }
1706 else
1707 gcc_unreachable ();
1708
1709 /* Find the CALL insn we just emitted, and attach the register usage
1710 information. */
1711 call_insn = last_call_insn ();
1712 add_function_usage_to (call_insn, call_fusage);
1713
1714 /* Restore the stack. */
1715 if (targetm.have_save_stack_nonlocal ())
1716 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1717 else
1718 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1719 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1720
1721 OK_DEFER_POP;
1722
1723 /* Return the address of the result block. */
1724 result = copy_addr_to_reg (XEXP (result, 0));
1725 return convert_memory_address (ptr_mode, result);
1726 }
1727
1728 /* Perform an untyped return. */
1729
1730 static void
1731 expand_builtin_return (rtx result)
1732 {
1733 int size, align, regno;
1734 machine_mode mode;
1735 rtx reg;
1736 rtx_insn *call_fusage = 0;
1737
1738 result = convert_memory_address (Pmode, result);
1739
1740 apply_result_size ();
1741 result = gen_rtx_MEM (BLKmode, result);
1742
1743 if (targetm.have_untyped_return ())
1744 {
1745 rtx vector = result_vector (0, result);
1746 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1747 emit_barrier ();
1748 return;
1749 }
1750
1751 /* Restore the return value and note that each value is used. */
1752 size = 0;
1753 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1754 if ((mode = apply_result_mode[regno]) != VOIDmode)
1755 {
1756 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1757 if (size % align != 0)
1758 size = CEIL (size, align) * align;
1759 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1760 emit_move_insn (reg, adjust_address (result, mode, size));
1761
1762 push_to_sequence (call_fusage);
1763 emit_use (reg);
1764 call_fusage = get_insns ();
1765 end_sequence ();
1766 size += GET_MODE_SIZE (mode);
1767 }
1768
1769 /* Put the USE insns before the return. */
1770 emit_insn (call_fusage);
1771
1772 /* Return whatever values was restored by jumping directly to the end
1773 of the function. */
1774 expand_naked_return ();
1775 }
1776
1777 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1778
1779 static enum type_class
1780 type_to_class (tree type)
1781 {
1782 switch (TREE_CODE (type))
1783 {
1784 case VOID_TYPE: return void_type_class;
1785 case INTEGER_TYPE: return integer_type_class;
1786 case ENUMERAL_TYPE: return enumeral_type_class;
1787 case BOOLEAN_TYPE: return boolean_type_class;
1788 case POINTER_TYPE: return pointer_type_class;
1789 case REFERENCE_TYPE: return reference_type_class;
1790 case OFFSET_TYPE: return offset_type_class;
1791 case REAL_TYPE: return real_type_class;
1792 case COMPLEX_TYPE: return complex_type_class;
1793 case FUNCTION_TYPE: return function_type_class;
1794 case METHOD_TYPE: return method_type_class;
1795 case RECORD_TYPE: return record_type_class;
1796 case UNION_TYPE:
1797 case QUAL_UNION_TYPE: return union_type_class;
1798 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1799 ? string_type_class : array_type_class);
1800 case LANG_TYPE: return lang_type_class;
1801 default: return no_type_class;
1802 }
1803 }
1804
1805 /* Expand a call EXP to __builtin_classify_type. */
1806
1807 static rtx
1808 expand_builtin_classify_type (tree exp)
1809 {
1810 if (call_expr_nargs (exp))
1811 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1812 return GEN_INT (no_type_class);
1813 }
1814
1815 /* This helper macro, meant to be used in mathfn_built_in below,
1816 determines which among a set of three builtin math functions is
1817 appropriate for a given type mode. The `F' and `L' cases are
1818 automatically generated from the `double' case. */
1819 #define CASE_MATHFN(MATHFN) \
1820 CASE_CFN_##MATHFN: \
1821 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1822 fcodel = BUILT_IN_##MATHFN##L ; break;
1823 /* Similar to above, but appends _R after any F/L suffix. */
1824 #define CASE_MATHFN_REENT(MATHFN) \
1825 case CFN_BUILT_IN_##MATHFN##_R: \
1826 case CFN_BUILT_IN_##MATHFN##F_R: \
1827 case CFN_BUILT_IN_##MATHFN##L_R: \
1828 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1829 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1830
1831 /* Return a function equivalent to FN but operating on floating-point
1832 values of type TYPE, or END_BUILTINS if no such function exists.
1833 This is purely an operation on function codes; it does not guarantee
1834 that the target actually has an implementation of the function. */
1835
1836 static built_in_function
1837 mathfn_built_in_2 (tree type, combined_fn fn)
1838 {
1839 built_in_function fcode, fcodef, fcodel;
1840
1841 switch (fn)
1842 {
1843 CASE_MATHFN (ACOS)
1844 CASE_MATHFN (ACOSH)
1845 CASE_MATHFN (ASIN)
1846 CASE_MATHFN (ASINH)
1847 CASE_MATHFN (ATAN)
1848 CASE_MATHFN (ATAN2)
1849 CASE_MATHFN (ATANH)
1850 CASE_MATHFN (CBRT)
1851 CASE_MATHFN (CEIL)
1852 CASE_MATHFN (CEXPI)
1853 CASE_MATHFN (COPYSIGN)
1854 CASE_MATHFN (COS)
1855 CASE_MATHFN (COSH)
1856 CASE_MATHFN (DREM)
1857 CASE_MATHFN (ERF)
1858 CASE_MATHFN (ERFC)
1859 CASE_MATHFN (EXP)
1860 CASE_MATHFN (EXP10)
1861 CASE_MATHFN (EXP2)
1862 CASE_MATHFN (EXPM1)
1863 CASE_MATHFN (FABS)
1864 CASE_MATHFN (FDIM)
1865 CASE_MATHFN (FLOOR)
1866 CASE_MATHFN (FMA)
1867 CASE_MATHFN (FMAX)
1868 CASE_MATHFN (FMIN)
1869 CASE_MATHFN (FMOD)
1870 CASE_MATHFN (FREXP)
1871 CASE_MATHFN (GAMMA)
1872 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1873 CASE_MATHFN (HUGE_VAL)
1874 CASE_MATHFN (HYPOT)
1875 CASE_MATHFN (ILOGB)
1876 CASE_MATHFN (ICEIL)
1877 CASE_MATHFN (IFLOOR)
1878 CASE_MATHFN (INF)
1879 CASE_MATHFN (IRINT)
1880 CASE_MATHFN (IROUND)
1881 CASE_MATHFN (ISINF)
1882 CASE_MATHFN (J0)
1883 CASE_MATHFN (J1)
1884 CASE_MATHFN (JN)
1885 CASE_MATHFN (LCEIL)
1886 CASE_MATHFN (LDEXP)
1887 CASE_MATHFN (LFLOOR)
1888 CASE_MATHFN (LGAMMA)
1889 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1890 CASE_MATHFN (LLCEIL)
1891 CASE_MATHFN (LLFLOOR)
1892 CASE_MATHFN (LLRINT)
1893 CASE_MATHFN (LLROUND)
1894 CASE_MATHFN (LOG)
1895 CASE_MATHFN (LOG10)
1896 CASE_MATHFN (LOG1P)
1897 CASE_MATHFN (LOG2)
1898 CASE_MATHFN (LOGB)
1899 CASE_MATHFN (LRINT)
1900 CASE_MATHFN (LROUND)
1901 CASE_MATHFN (MODF)
1902 CASE_MATHFN (NAN)
1903 CASE_MATHFN (NANS)
1904 CASE_MATHFN (NEARBYINT)
1905 CASE_MATHFN (NEXTAFTER)
1906 CASE_MATHFN (NEXTTOWARD)
1907 CASE_MATHFN (POW)
1908 CASE_MATHFN (POWI)
1909 CASE_MATHFN (POW10)
1910 CASE_MATHFN (REMAINDER)
1911 CASE_MATHFN (REMQUO)
1912 CASE_MATHFN (RINT)
1913 CASE_MATHFN (ROUND)
1914 CASE_MATHFN (SCALB)
1915 CASE_MATHFN (SCALBLN)
1916 CASE_MATHFN (SCALBN)
1917 CASE_MATHFN (SIGNBIT)
1918 CASE_MATHFN (SIGNIFICAND)
1919 CASE_MATHFN (SIN)
1920 CASE_MATHFN (SINCOS)
1921 CASE_MATHFN (SINH)
1922 CASE_MATHFN (SQRT)
1923 CASE_MATHFN (TAN)
1924 CASE_MATHFN (TANH)
1925 CASE_MATHFN (TGAMMA)
1926 CASE_MATHFN (TRUNC)
1927 CASE_MATHFN (Y0)
1928 CASE_MATHFN (Y1)
1929 CASE_MATHFN (YN)
1930
1931 default:
1932 return END_BUILTINS;
1933 }
1934
1935 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1936 return fcode;
1937 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1938 return fcodef;
1939 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1940 return fcodel;
1941 else
1942 return END_BUILTINS;
1943 }
1944
1945 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1946 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1947 otherwise use the explicit declaration. If we can't do the conversion,
1948 return null. */
1949
1950 static tree
1951 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1952 {
1953 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1954 if (fcode2 == END_BUILTINS)
1955 return NULL_TREE;
1956
1957 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1958 return NULL_TREE;
1959
1960 return builtin_decl_explicit (fcode2);
1961 }
1962
1963 /* Like mathfn_built_in_1, but always use the implicit array. */
1964
1965 tree
1966 mathfn_built_in (tree type, combined_fn fn)
1967 {
1968 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1969 }
1970
1971 /* Like mathfn_built_in_1, but take a built_in_function and
1972 always use the implicit array. */
1973
1974 tree
1975 mathfn_built_in (tree type, enum built_in_function fn)
1976 {
1977 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1978 }
1979
1980 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1981 return its code, otherwise return IFN_LAST. Note that this function
1982 only tests whether the function is defined in internals.def, not whether
1983 it is actually available on the target. */
1984
1985 internal_fn
1986 associated_internal_fn (tree fndecl)
1987 {
1988 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1989 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1990 switch (DECL_FUNCTION_CODE (fndecl))
1991 {
1992 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1993 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1994 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1995 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1996 #include "internal-fn.def"
1997
1998 CASE_FLT_FN (BUILT_IN_POW10):
1999 return IFN_EXP10;
2000
2001 CASE_FLT_FN (BUILT_IN_DREM):
2002 return IFN_REMAINDER;
2003
2004 CASE_FLT_FN (BUILT_IN_SCALBN):
2005 CASE_FLT_FN (BUILT_IN_SCALBLN):
2006 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2007 return IFN_LDEXP;
2008 return IFN_LAST;
2009
2010 default:
2011 return IFN_LAST;
2012 }
2013 }
2014
2015 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2016 on the current target by a call to an internal function, return the
2017 code of that internal function, otherwise return IFN_LAST. The caller
2018 is responsible for ensuring that any side-effects of the built-in
2019 call are dealt with correctly. E.g. if CALL sets errno, the caller
2020 must decide that the errno result isn't needed or make it available
2021 in some other way. */
2022
2023 internal_fn
2024 replacement_internal_fn (gcall *call)
2025 {
2026 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2027 {
2028 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2029 if (ifn != IFN_LAST)
2030 {
2031 tree_pair types = direct_internal_fn_types (ifn, call);
2032 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2033 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2034 return ifn;
2035 }
2036 }
2037 return IFN_LAST;
2038 }
2039
2040 /* Expand a call to the builtin trinary math functions (fma).
2041 Return NULL_RTX if a normal call should be emitted rather than expanding the
2042 function in-line. EXP is the expression that is a call to the builtin
2043 function; if convenient, the result should be placed in TARGET.
2044 SUBTARGET may be used as the target for computing one of EXP's
2045 operands. */
2046
2047 static rtx
2048 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2049 {
2050 optab builtin_optab;
2051 rtx op0, op1, op2, result;
2052 rtx_insn *insns;
2053 tree fndecl = get_callee_fndecl (exp);
2054 tree arg0, arg1, arg2;
2055 machine_mode mode;
2056
2057 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2058 return NULL_RTX;
2059
2060 arg0 = CALL_EXPR_ARG (exp, 0);
2061 arg1 = CALL_EXPR_ARG (exp, 1);
2062 arg2 = CALL_EXPR_ARG (exp, 2);
2063
2064 switch (DECL_FUNCTION_CODE (fndecl))
2065 {
2066 CASE_FLT_FN (BUILT_IN_FMA):
2067 builtin_optab = fma_optab; break;
2068 default:
2069 gcc_unreachable ();
2070 }
2071
2072 /* Make a suitable register to place result in. */
2073 mode = TYPE_MODE (TREE_TYPE (exp));
2074
2075 /* Before working hard, check whether the instruction is available. */
2076 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2077 return NULL_RTX;
2078
2079 result = gen_reg_rtx (mode);
2080
2081 /* Always stabilize the argument list. */
2082 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2083 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2084 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2085
2086 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2087 op1 = expand_normal (arg1);
2088 op2 = expand_normal (arg2);
2089
2090 start_sequence ();
2091
2092 /* Compute into RESULT.
2093 Set RESULT to wherever the result comes back. */
2094 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2095 result, 0);
2096
2097 /* If we were unable to expand via the builtin, stop the sequence
2098 (without outputting the insns) and call to the library function
2099 with the stabilized argument list. */
2100 if (result == 0)
2101 {
2102 end_sequence ();
2103 return expand_call (exp, target, target == const0_rtx);
2104 }
2105
2106 /* Output the entire sequence. */
2107 insns = get_insns ();
2108 end_sequence ();
2109 emit_insn (insns);
2110
2111 return result;
2112 }
2113
2114 /* Expand a call to the builtin sin and cos math functions.
2115 Return NULL_RTX if a normal call should be emitted rather than expanding the
2116 function in-line. EXP is the expression that is a call to the builtin
2117 function; if convenient, the result should be placed in TARGET.
2118 SUBTARGET may be used as the target for computing one of EXP's
2119 operands. */
2120
2121 static rtx
2122 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2123 {
2124 optab builtin_optab;
2125 rtx op0;
2126 rtx_insn *insns;
2127 tree fndecl = get_callee_fndecl (exp);
2128 machine_mode mode;
2129 tree arg;
2130
2131 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2132 return NULL_RTX;
2133
2134 arg = CALL_EXPR_ARG (exp, 0);
2135
2136 switch (DECL_FUNCTION_CODE (fndecl))
2137 {
2138 CASE_FLT_FN (BUILT_IN_SIN):
2139 CASE_FLT_FN (BUILT_IN_COS):
2140 builtin_optab = sincos_optab; break;
2141 default:
2142 gcc_unreachable ();
2143 }
2144
2145 /* Make a suitable register to place result in. */
2146 mode = TYPE_MODE (TREE_TYPE (exp));
2147
2148 /* Check if sincos insn is available, otherwise fallback
2149 to sin or cos insn. */
2150 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2151 switch (DECL_FUNCTION_CODE (fndecl))
2152 {
2153 CASE_FLT_FN (BUILT_IN_SIN):
2154 builtin_optab = sin_optab; break;
2155 CASE_FLT_FN (BUILT_IN_COS):
2156 builtin_optab = cos_optab; break;
2157 default:
2158 gcc_unreachable ();
2159 }
2160
2161 /* Before working hard, check whether the instruction is available. */
2162 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2163 {
2164 rtx result = gen_reg_rtx (mode);
2165
2166 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2167 need to expand the argument again. This way, we will not perform
2168 side-effects more the once. */
2169 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2170
2171 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2172
2173 start_sequence ();
2174
2175 /* Compute into RESULT.
2176 Set RESULT to wherever the result comes back. */
2177 if (builtin_optab == sincos_optab)
2178 {
2179 int ok;
2180
2181 switch (DECL_FUNCTION_CODE (fndecl))
2182 {
2183 CASE_FLT_FN (BUILT_IN_SIN):
2184 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2185 break;
2186 CASE_FLT_FN (BUILT_IN_COS):
2187 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2188 break;
2189 default:
2190 gcc_unreachable ();
2191 }
2192 gcc_assert (ok);
2193 }
2194 else
2195 result = expand_unop (mode, builtin_optab, op0, result, 0);
2196
2197 if (result != 0)
2198 {
2199 /* Output the entire sequence. */
2200 insns = get_insns ();
2201 end_sequence ();
2202 emit_insn (insns);
2203 return result;
2204 }
2205
2206 /* If we were unable to expand via the builtin, stop the sequence
2207 (without outputting the insns) and call to the library function
2208 with the stabilized argument list. */
2209 end_sequence ();
2210 }
2211
2212 return expand_call (exp, target, target == const0_rtx);
2213 }
2214
2215 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2216 return an RTL instruction code that implements the functionality.
2217 If that isn't possible or available return CODE_FOR_nothing. */
2218
2219 static enum insn_code
2220 interclass_mathfn_icode (tree arg, tree fndecl)
2221 {
2222 bool errno_set = false;
2223 optab builtin_optab = unknown_optab;
2224 machine_mode mode;
2225
2226 switch (DECL_FUNCTION_CODE (fndecl))
2227 {
2228 CASE_FLT_FN (BUILT_IN_ILOGB):
2229 errno_set = true;
2230 builtin_optab = ilogb_optab;
2231 break;
2232 default:
2233 gcc_unreachable ();
2234 }
2235
2236 /* There's no easy way to detect the case we need to set EDOM. */
2237 if (flag_errno_math && errno_set)
2238 return CODE_FOR_nothing;
2239
2240 /* Optab mode depends on the mode of the input argument. */
2241 mode = TYPE_MODE (TREE_TYPE (arg));
2242
2243 if (builtin_optab)
2244 return optab_handler (builtin_optab, mode);
2245 return CODE_FOR_nothing;
2246 }
2247
2248 /* Expand a call to one of the builtin math functions that operate on
2249 floating point argument and output an integer result (ilogb, etc).
2250 Return 0 if a normal call should be emitted rather than expanding the
2251 function in-line. EXP is the expression that is a call to the builtin
2252 function; if convenient, the result should be placed in TARGET. */
2253
2254 static rtx
2255 expand_builtin_interclass_mathfn (tree exp, rtx target)
2256 {
2257 enum insn_code icode = CODE_FOR_nothing;
2258 rtx op0;
2259 tree fndecl = get_callee_fndecl (exp);
2260 machine_mode mode;
2261 tree arg;
2262
2263 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2264 return NULL_RTX;
2265
2266 arg = CALL_EXPR_ARG (exp, 0);
2267 icode = interclass_mathfn_icode (arg, fndecl);
2268 mode = TYPE_MODE (TREE_TYPE (arg));
2269
2270 if (icode != CODE_FOR_nothing)
2271 {
2272 struct expand_operand ops[1];
2273 rtx_insn *last = get_last_insn ();
2274 tree orig_arg = arg;
2275
2276 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2277 need to expand the argument again. This way, we will not perform
2278 side-effects more the once. */
2279 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2280
2281 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2282
2283 if (mode != GET_MODE (op0))
2284 op0 = convert_to_mode (mode, op0, 0);
2285
2286 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2287 if (maybe_legitimize_operands (icode, 0, 1, ops)
2288 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2289 return ops[0].value;
2290
2291 delete_insns_since (last);
2292 CALL_EXPR_ARG (exp, 0) = orig_arg;
2293 }
2294
2295 return NULL_RTX;
2296 }
2297
2298 /* Expand a call to the builtin sincos math function.
2299 Return NULL_RTX if a normal call should be emitted rather than expanding the
2300 function in-line. EXP is the expression that is a call to the builtin
2301 function. */
2302
2303 static rtx
2304 expand_builtin_sincos (tree exp)
2305 {
2306 rtx op0, op1, op2, target1, target2;
2307 machine_mode mode;
2308 tree arg, sinp, cosp;
2309 int result;
2310 location_t loc = EXPR_LOCATION (exp);
2311 tree alias_type, alias_off;
2312
2313 if (!validate_arglist (exp, REAL_TYPE,
2314 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2315 return NULL_RTX;
2316
2317 arg = CALL_EXPR_ARG (exp, 0);
2318 sinp = CALL_EXPR_ARG (exp, 1);
2319 cosp = CALL_EXPR_ARG (exp, 2);
2320
2321 /* Make a suitable register to place result in. */
2322 mode = TYPE_MODE (TREE_TYPE (arg));
2323
2324 /* Check if sincos insn is available, otherwise emit the call. */
2325 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2326 return NULL_RTX;
2327
2328 target1 = gen_reg_rtx (mode);
2329 target2 = gen_reg_rtx (mode);
2330
2331 op0 = expand_normal (arg);
2332 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2333 alias_off = build_int_cst (alias_type, 0);
2334 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2335 sinp, alias_off));
2336 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2337 cosp, alias_off));
2338
2339 /* Compute into target1 and target2.
2340 Set TARGET to wherever the result comes back. */
2341 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2342 gcc_assert (result);
2343
2344 /* Move target1 and target2 to the memory locations indicated
2345 by op1 and op2. */
2346 emit_move_insn (op1, target1);
2347 emit_move_insn (op2, target2);
2348
2349 return const0_rtx;
2350 }
2351
2352 /* Expand a call to the internal cexpi builtin to the sincos math function.
2353 EXP is the expression that is a call to the builtin function; if convenient,
2354 the result should be placed in TARGET. */
2355
2356 static rtx
2357 expand_builtin_cexpi (tree exp, rtx target)
2358 {
2359 tree fndecl = get_callee_fndecl (exp);
2360 tree arg, type;
2361 machine_mode mode;
2362 rtx op0, op1, op2;
2363 location_t loc = EXPR_LOCATION (exp);
2364
2365 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2366 return NULL_RTX;
2367
2368 arg = CALL_EXPR_ARG (exp, 0);
2369 type = TREE_TYPE (arg);
2370 mode = TYPE_MODE (TREE_TYPE (arg));
2371
2372 /* Try expanding via a sincos optab, fall back to emitting a libcall
2373 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2374 is only generated from sincos, cexp or if we have either of them. */
2375 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2376 {
2377 op1 = gen_reg_rtx (mode);
2378 op2 = gen_reg_rtx (mode);
2379
2380 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2381
2382 /* Compute into op1 and op2. */
2383 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2384 }
2385 else if (targetm.libc_has_function (function_sincos))
2386 {
2387 tree call, fn = NULL_TREE;
2388 tree top1, top2;
2389 rtx op1a, op2a;
2390
2391 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2392 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2393 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2394 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2395 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2396 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2397 else
2398 gcc_unreachable ();
2399
2400 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2401 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2402 op1a = copy_addr_to_reg (XEXP (op1, 0));
2403 op2a = copy_addr_to_reg (XEXP (op2, 0));
2404 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2405 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2406
2407 /* Make sure not to fold the sincos call again. */
2408 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2409 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2410 call, 3, arg, top1, top2));
2411 }
2412 else
2413 {
2414 tree call, fn = NULL_TREE, narg;
2415 tree ctype = build_complex_type (type);
2416
2417 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2418 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2419 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2420 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2421 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2422 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2423 else
2424 gcc_unreachable ();
2425
2426 /* If we don't have a decl for cexp create one. This is the
2427 friendliest fallback if the user calls __builtin_cexpi
2428 without full target C99 function support. */
2429 if (fn == NULL_TREE)
2430 {
2431 tree fntype;
2432 const char *name = NULL;
2433
2434 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2435 name = "cexpf";
2436 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2437 name = "cexp";
2438 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2439 name = "cexpl";
2440
2441 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2442 fn = build_fn_decl (name, fntype);
2443 }
2444
2445 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2446 build_real (type, dconst0), arg);
2447
2448 /* Make sure not to fold the cexp call again. */
2449 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2450 return expand_expr (build_call_nary (ctype, call, 1, narg),
2451 target, VOIDmode, EXPAND_NORMAL);
2452 }
2453
2454 /* Now build the proper return type. */
2455 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2456 make_tree (TREE_TYPE (arg), op2),
2457 make_tree (TREE_TYPE (arg), op1)),
2458 target, VOIDmode, EXPAND_NORMAL);
2459 }
2460
2461 /* Conveniently construct a function call expression. FNDECL names the
2462 function to be called, N is the number of arguments, and the "..."
2463 parameters are the argument expressions. Unlike build_call_exr
2464 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2465
2466 static tree
2467 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2468 {
2469 va_list ap;
2470 tree fntype = TREE_TYPE (fndecl);
2471 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2472
2473 va_start (ap, n);
2474 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2475 va_end (ap);
2476 SET_EXPR_LOCATION (fn, loc);
2477 return fn;
2478 }
2479
2480 /* Expand a call to one of the builtin rounding functions gcc defines
2481 as an extension (lfloor and lceil). As these are gcc extensions we
2482 do not need to worry about setting errno to EDOM.
2483 If expanding via optab fails, lower expression to (int)(floor(x)).
2484 EXP is the expression that is a call to the builtin function;
2485 if convenient, the result should be placed in TARGET. */
2486
2487 static rtx
2488 expand_builtin_int_roundingfn (tree exp, rtx target)
2489 {
2490 convert_optab builtin_optab;
2491 rtx op0, tmp;
2492 rtx_insn *insns;
2493 tree fndecl = get_callee_fndecl (exp);
2494 enum built_in_function fallback_fn;
2495 tree fallback_fndecl;
2496 machine_mode mode;
2497 tree arg;
2498
2499 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2500 gcc_unreachable ();
2501
2502 arg = CALL_EXPR_ARG (exp, 0);
2503
2504 switch (DECL_FUNCTION_CODE (fndecl))
2505 {
2506 CASE_FLT_FN (BUILT_IN_ICEIL):
2507 CASE_FLT_FN (BUILT_IN_LCEIL):
2508 CASE_FLT_FN (BUILT_IN_LLCEIL):
2509 builtin_optab = lceil_optab;
2510 fallback_fn = BUILT_IN_CEIL;
2511 break;
2512
2513 CASE_FLT_FN (BUILT_IN_IFLOOR):
2514 CASE_FLT_FN (BUILT_IN_LFLOOR):
2515 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2516 builtin_optab = lfloor_optab;
2517 fallback_fn = BUILT_IN_FLOOR;
2518 break;
2519
2520 default:
2521 gcc_unreachable ();
2522 }
2523
2524 /* Make a suitable register to place result in. */
2525 mode = TYPE_MODE (TREE_TYPE (exp));
2526
2527 target = gen_reg_rtx (mode);
2528
2529 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2530 need to expand the argument again. This way, we will not perform
2531 side-effects more the once. */
2532 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2533
2534 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2535
2536 start_sequence ();
2537
2538 /* Compute into TARGET. */
2539 if (expand_sfix_optab (target, op0, builtin_optab))
2540 {
2541 /* Output the entire sequence. */
2542 insns = get_insns ();
2543 end_sequence ();
2544 emit_insn (insns);
2545 return target;
2546 }
2547
2548 /* If we were unable to expand via the builtin, stop the sequence
2549 (without outputting the insns). */
2550 end_sequence ();
2551
2552 /* Fall back to floating point rounding optab. */
2553 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2554
2555 /* For non-C99 targets we may end up without a fallback fndecl here
2556 if the user called __builtin_lfloor directly. In this case emit
2557 a call to the floor/ceil variants nevertheless. This should result
2558 in the best user experience for not full C99 targets. */
2559 if (fallback_fndecl == NULL_TREE)
2560 {
2561 tree fntype;
2562 const char *name = NULL;
2563
2564 switch (DECL_FUNCTION_CODE (fndecl))
2565 {
2566 case BUILT_IN_ICEIL:
2567 case BUILT_IN_LCEIL:
2568 case BUILT_IN_LLCEIL:
2569 name = "ceil";
2570 break;
2571 case BUILT_IN_ICEILF:
2572 case BUILT_IN_LCEILF:
2573 case BUILT_IN_LLCEILF:
2574 name = "ceilf";
2575 break;
2576 case BUILT_IN_ICEILL:
2577 case BUILT_IN_LCEILL:
2578 case BUILT_IN_LLCEILL:
2579 name = "ceill";
2580 break;
2581 case BUILT_IN_IFLOOR:
2582 case BUILT_IN_LFLOOR:
2583 case BUILT_IN_LLFLOOR:
2584 name = "floor";
2585 break;
2586 case BUILT_IN_IFLOORF:
2587 case BUILT_IN_LFLOORF:
2588 case BUILT_IN_LLFLOORF:
2589 name = "floorf";
2590 break;
2591 case BUILT_IN_IFLOORL:
2592 case BUILT_IN_LFLOORL:
2593 case BUILT_IN_LLFLOORL:
2594 name = "floorl";
2595 break;
2596 default:
2597 gcc_unreachable ();
2598 }
2599
2600 fntype = build_function_type_list (TREE_TYPE (arg),
2601 TREE_TYPE (arg), NULL_TREE);
2602 fallback_fndecl = build_fn_decl (name, fntype);
2603 }
2604
2605 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2606
2607 tmp = expand_normal (exp);
2608 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2609
2610 /* Truncate the result of floating point optab to integer
2611 via expand_fix (). */
2612 target = gen_reg_rtx (mode);
2613 expand_fix (target, tmp, 0);
2614
2615 return target;
2616 }
2617
2618 /* Expand a call to one of the builtin math functions doing integer
2619 conversion (lrint).
2620 Return 0 if a normal call should be emitted rather than expanding the
2621 function in-line. EXP is the expression that is a call to the builtin
2622 function; if convenient, the result should be placed in TARGET. */
2623
2624 static rtx
2625 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2626 {
2627 convert_optab builtin_optab;
2628 rtx op0;
2629 rtx_insn *insns;
2630 tree fndecl = get_callee_fndecl (exp);
2631 tree arg;
2632 machine_mode mode;
2633 enum built_in_function fallback_fn = BUILT_IN_NONE;
2634
2635 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2636 gcc_unreachable ();
2637
2638 arg = CALL_EXPR_ARG (exp, 0);
2639
2640 switch (DECL_FUNCTION_CODE (fndecl))
2641 {
2642 CASE_FLT_FN (BUILT_IN_IRINT):
2643 fallback_fn = BUILT_IN_LRINT;
2644 gcc_fallthrough ();
2645 CASE_FLT_FN (BUILT_IN_LRINT):
2646 CASE_FLT_FN (BUILT_IN_LLRINT):
2647 builtin_optab = lrint_optab;
2648 break;
2649
2650 CASE_FLT_FN (BUILT_IN_IROUND):
2651 fallback_fn = BUILT_IN_LROUND;
2652 gcc_fallthrough ();
2653 CASE_FLT_FN (BUILT_IN_LROUND):
2654 CASE_FLT_FN (BUILT_IN_LLROUND):
2655 builtin_optab = lround_optab;
2656 break;
2657
2658 default:
2659 gcc_unreachable ();
2660 }
2661
2662 /* There's no easy way to detect the case we need to set EDOM. */
2663 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2664 return NULL_RTX;
2665
2666 /* Make a suitable register to place result in. */
2667 mode = TYPE_MODE (TREE_TYPE (exp));
2668
2669 /* There's no easy way to detect the case we need to set EDOM. */
2670 if (!flag_errno_math)
2671 {
2672 rtx result = gen_reg_rtx (mode);
2673
2674 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2675 need to expand the argument again. This way, we will not perform
2676 side-effects more the once. */
2677 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2678
2679 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2680
2681 start_sequence ();
2682
2683 if (expand_sfix_optab (result, op0, builtin_optab))
2684 {
2685 /* Output the entire sequence. */
2686 insns = get_insns ();
2687 end_sequence ();
2688 emit_insn (insns);
2689 return result;
2690 }
2691
2692 /* If we were unable to expand via the builtin, stop the sequence
2693 (without outputting the insns) and call to the library function
2694 with the stabilized argument list. */
2695 end_sequence ();
2696 }
2697
2698 if (fallback_fn != BUILT_IN_NONE)
2699 {
2700 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2701 targets, (int) round (x) should never be transformed into
2702 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2703 a call to lround in the hope that the target provides at least some
2704 C99 functions. This should result in the best user experience for
2705 not full C99 targets. */
2706 tree fallback_fndecl = mathfn_built_in_1
2707 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2708
2709 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2710 fallback_fndecl, 1, arg);
2711
2712 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2713 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2714 return convert_to_mode (mode, target, 0);
2715 }
2716
2717 return expand_call (exp, target, target == const0_rtx);
2718 }
2719
2720 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2721 a normal call should be emitted rather than expanding the function
2722 in-line. EXP is the expression that is a call to the builtin
2723 function; if convenient, the result should be placed in TARGET. */
2724
2725 static rtx
2726 expand_builtin_powi (tree exp, rtx target)
2727 {
2728 tree arg0, arg1;
2729 rtx op0, op1;
2730 machine_mode mode;
2731 machine_mode mode2;
2732
2733 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2734 return NULL_RTX;
2735
2736 arg0 = CALL_EXPR_ARG (exp, 0);
2737 arg1 = CALL_EXPR_ARG (exp, 1);
2738 mode = TYPE_MODE (TREE_TYPE (exp));
2739
2740 /* Emit a libcall to libgcc. */
2741
2742 /* Mode of the 2nd argument must match that of an int. */
2743 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2744
2745 if (target == NULL_RTX)
2746 target = gen_reg_rtx (mode);
2747
2748 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2749 if (GET_MODE (op0) != mode)
2750 op0 = convert_to_mode (mode, op0, 0);
2751 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2752 if (GET_MODE (op1) != mode2)
2753 op1 = convert_to_mode (mode2, op1, 0);
2754
2755 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2756 target, LCT_CONST, mode, 2,
2757 op0, mode, op1, mode2);
2758
2759 return target;
2760 }
2761
2762 /* Expand expression EXP which is a call to the strlen builtin. Return
2763 NULL_RTX if we failed the caller should emit a normal call, otherwise
2764 try to get the result in TARGET, if convenient. */
2765
2766 static rtx
2767 expand_builtin_strlen (tree exp, rtx target,
2768 machine_mode target_mode)
2769 {
2770 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2771 return NULL_RTX;
2772 else
2773 {
2774 struct expand_operand ops[4];
2775 rtx pat;
2776 tree len;
2777 tree src = CALL_EXPR_ARG (exp, 0);
2778 rtx src_reg;
2779 rtx_insn *before_strlen;
2780 machine_mode insn_mode = target_mode;
2781 enum insn_code icode = CODE_FOR_nothing;
2782 unsigned int align;
2783
2784 /* If the length can be computed at compile-time, return it. */
2785 len = c_strlen (src, 0);
2786 if (len)
2787 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2788
2789 /* If the length can be computed at compile-time and is constant
2790 integer, but there are side-effects in src, evaluate
2791 src for side-effects, then return len.
2792 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2793 can be optimized into: i++; x = 3; */
2794 len = c_strlen (src, 1);
2795 if (len && TREE_CODE (len) == INTEGER_CST)
2796 {
2797 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2798 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2799 }
2800
2801 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2802
2803 /* If SRC is not a pointer type, don't do this operation inline. */
2804 if (align == 0)
2805 return NULL_RTX;
2806
2807 /* Bail out if we can't compute strlen in the right mode. */
2808 while (insn_mode != VOIDmode)
2809 {
2810 icode = optab_handler (strlen_optab, insn_mode);
2811 if (icode != CODE_FOR_nothing)
2812 break;
2813
2814 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2815 }
2816 if (insn_mode == VOIDmode)
2817 return NULL_RTX;
2818
2819 /* Make a place to hold the source address. We will not expand
2820 the actual source until we are sure that the expansion will
2821 not fail -- there are trees that cannot be expanded twice. */
2822 src_reg = gen_reg_rtx (Pmode);
2823
2824 /* Mark the beginning of the strlen sequence so we can emit the
2825 source operand later. */
2826 before_strlen = get_last_insn ();
2827
2828 create_output_operand (&ops[0], target, insn_mode);
2829 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2830 create_integer_operand (&ops[2], 0);
2831 create_integer_operand (&ops[3], align);
2832 if (!maybe_expand_insn (icode, 4, ops))
2833 return NULL_RTX;
2834
2835 /* Now that we are assured of success, expand the source. */
2836 start_sequence ();
2837 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2838 if (pat != src_reg)
2839 {
2840 #ifdef POINTERS_EXTEND_UNSIGNED
2841 if (GET_MODE (pat) != Pmode)
2842 pat = convert_to_mode (Pmode, pat,
2843 POINTERS_EXTEND_UNSIGNED);
2844 #endif
2845 emit_move_insn (src_reg, pat);
2846 }
2847 pat = get_insns ();
2848 end_sequence ();
2849
2850 if (before_strlen)
2851 emit_insn_after (pat, before_strlen);
2852 else
2853 emit_insn_before (pat, get_insns ());
2854
2855 /* Return the value in the proper mode for this function. */
2856 if (GET_MODE (ops[0].value) == target_mode)
2857 target = ops[0].value;
2858 else if (target != 0)
2859 convert_move (target, ops[0].value, 0);
2860 else
2861 target = convert_to_mode (target_mode, ops[0].value, 0);
2862
2863 return target;
2864 }
2865 }
2866
2867 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2868 bytes from constant string DATA + OFFSET and return it as target
2869 constant. */
2870
2871 static rtx
2872 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2873 machine_mode mode)
2874 {
2875 const char *str = (const char *) data;
2876
2877 gcc_assert (offset >= 0
2878 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2879 <= strlen (str) + 1));
2880
2881 return c_readstr (str + offset, mode);
2882 }
2883
2884 /* LEN specify length of the block of memcpy/memset operation.
2885 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2886 In some cases we can make very likely guess on max size, then we
2887 set it into PROBABLE_MAX_SIZE. */
2888
2889 static void
2890 determine_block_size (tree len, rtx len_rtx,
2891 unsigned HOST_WIDE_INT *min_size,
2892 unsigned HOST_WIDE_INT *max_size,
2893 unsigned HOST_WIDE_INT *probable_max_size)
2894 {
2895 if (CONST_INT_P (len_rtx))
2896 {
2897 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2898 return;
2899 }
2900 else
2901 {
2902 wide_int min, max;
2903 enum value_range_type range_type = VR_UNDEFINED;
2904
2905 /* Determine bounds from the type. */
2906 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2907 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2908 else
2909 *min_size = 0;
2910 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2911 *probable_max_size = *max_size
2912 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2913 else
2914 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2915
2916 if (TREE_CODE (len) == SSA_NAME)
2917 range_type = get_range_info (len, &min, &max);
2918 if (range_type == VR_RANGE)
2919 {
2920 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2921 *min_size = min.to_uhwi ();
2922 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2923 *probable_max_size = *max_size = max.to_uhwi ();
2924 }
2925 else if (range_type == VR_ANTI_RANGE)
2926 {
2927 /* Anti range 0...N lets us to determine minimal size to N+1. */
2928 if (min == 0)
2929 {
2930 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2931 *min_size = max.to_uhwi () + 1;
2932 }
2933 /* Code like
2934
2935 int n;
2936 if (n < 100)
2937 memcpy (a, b, n)
2938
2939 Produce anti range allowing negative values of N. We still
2940 can use the information and make a guess that N is not negative.
2941 */
2942 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2943 *probable_max_size = min.to_uhwi () - 1;
2944 }
2945 }
2946 gcc_checking_assert (*max_size <=
2947 (unsigned HOST_WIDE_INT)
2948 GET_MODE_MASK (GET_MODE (len_rtx)));
2949 }
2950
2951 /* Helper function to do the actual work for expand_builtin_memcpy. */
2952
2953 static rtx
2954 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2955 {
2956 const char *src_str;
2957 unsigned int src_align = get_pointer_alignment (src);
2958 unsigned int dest_align = get_pointer_alignment (dest);
2959 rtx dest_mem, src_mem, dest_addr, len_rtx;
2960 HOST_WIDE_INT expected_size = -1;
2961 unsigned int expected_align = 0;
2962 unsigned HOST_WIDE_INT min_size;
2963 unsigned HOST_WIDE_INT max_size;
2964 unsigned HOST_WIDE_INT probable_max_size;
2965
2966 /* If DEST is not a pointer type, call the normal function. */
2967 if (dest_align == 0)
2968 return NULL_RTX;
2969
2970 /* If either SRC is not a pointer type, don't do this
2971 operation in-line. */
2972 if (src_align == 0)
2973 return NULL_RTX;
2974
2975 if (currently_expanding_gimple_stmt)
2976 stringop_block_profile (currently_expanding_gimple_stmt,
2977 &expected_align, &expected_size);
2978
2979 if (expected_align < dest_align)
2980 expected_align = dest_align;
2981 dest_mem = get_memory_rtx (dest, len);
2982 set_mem_align (dest_mem, dest_align);
2983 len_rtx = expand_normal (len);
2984 determine_block_size (len, len_rtx, &min_size, &max_size,
2985 &probable_max_size);
2986 src_str = c_getstr (src);
2987
2988 /* If SRC is a string constant and block move would be done
2989 by pieces, we can avoid loading the string from memory
2990 and only stored the computed constants. */
2991 if (src_str
2992 && CONST_INT_P (len_rtx)
2993 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2994 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2995 CONST_CAST (char *, src_str),
2996 dest_align, false))
2997 {
2998 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2999 builtin_memcpy_read_str,
3000 CONST_CAST (char *, src_str),
3001 dest_align, false, 0);
3002 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3003 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3004 return dest_mem;
3005 }
3006
3007 src_mem = get_memory_rtx (src, len);
3008 set_mem_align (src_mem, src_align);
3009
3010 /* Copy word part most expediently. */
3011 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3012 CALL_EXPR_TAILCALL (exp)
3013 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3014 expected_align, expected_size,
3015 min_size, max_size, probable_max_size);
3016
3017 if (dest_addr == 0)
3018 {
3019 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3020 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3021 }
3022
3023 return dest_addr;
3024 }
3025
3026 /* Try to verify that the sizes and lengths of the arguments to a string
3027 manipulation function given by EXP are within valid bounds and that
3028 the operation does not lead to buffer overflow. Arguments other than
3029 EXP may be null. When non-null, the arguments have the following
3030 meaning:
3031 SIZE is the user-supplied size argument to the function (such as in
3032 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3033 number of bytes to write.
3034 MAXLEN is the user-supplied bound on the length of the source sequence
3035 (such as in strncat(d, s, N). It specifies the upper limit on the number
3036 of bytes to write.
3037 SRC is the source string (such as in strcpy(d, s)) when the expression
3038 EXP is a string function call (as opposed to a memory call like memcpy).
3039 As an exception, SRC can also be an integer denoting the precomputed
3040 size of the source string or object (for functions like memcpy).
3041 OBJSIZE is the size of the destination object specified by the last
3042 argument to the _chk builtins, typically resulting from the expansion
3043 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3044 OBJSIZE).
3045
3046 When SIZE is null LEN is checked to verify that it doesn't exceed
3047 SIZE_MAX.
3048
3049 If the call is successfully verified as safe from buffer overflow
3050 the function returns true, otherwise false.. */
3051
3052 static bool
3053 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
3054 {
3055 /* The size of the largest object is half the address space, or
3056 SSIZE_MAX. (This is way too permissive.) */
3057 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3058
3059 tree slen = NULL_TREE;
3060
3061 tree range[2] = { NULL_TREE, NULL_TREE };
3062
3063 /* Set to true when the exact number of bytes written by a string
3064 function like strcpy is not known and the only thing that is
3065 known is that it must be at least one (for the terminating nul). */
3066 bool at_least_one = false;
3067 if (src)
3068 {
3069 /* SRC is normally a pointer to string but as a special case
3070 it can be an integer denoting the length of a string. */
3071 if (POINTER_TYPE_P (TREE_TYPE (src)))
3072 {
3073 /* Try to determine the range of lengths the source string
3074 refers to. If it can be determined and is less than
3075 the upper bound given by MAXLEN add one to it for
3076 the terminating nul. Otherwise, set it to one for
3077 the same reason, or to MAXLEN as appropriate. */
3078 get_range_strlen (src, range);
3079 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3080 {
3081 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3082 range[0] = range[1] = maxlen;
3083 else
3084 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3085 range[0], size_one_node);
3086
3087 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3088 range[1] = maxlen;
3089 else if (!integer_all_onesp (range[1]))
3090 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3091 range[1], size_one_node);
3092
3093 slen = range[0];
3094 }
3095 else
3096 {
3097 at_least_one = true;
3098 slen = size_one_node;
3099 }
3100 }
3101 else
3102 slen = src;
3103 }
3104
3105 if (!size && !maxlen)
3106 {
3107 /* When the only available piece of data is the object size
3108 there is nothing to do. */
3109 if (!slen)
3110 return true;
3111
3112 /* Otherwise, when the length of the source sequence is known
3113 (as with with strlen), set SIZE to it. */
3114 if (!range[0])
3115 size = slen;
3116 }
3117
3118 if (!objsize)
3119 objsize = maxobjsize;
3120
3121 /* The SIZE is exact if it's non-null, constant, and in range of
3122 unsigned HOST_WIDE_INT. */
3123 bool exactsize = size && tree_fits_uhwi_p (size);
3124
3125 if (size)
3126 get_size_range (size, range);
3127
3128 /* First check the number of bytes to be written against the maximum
3129 object size. */
3130 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3131 {
3132 location_t loc = tree_nonartificial_location (exp);
3133 loc = expansion_point_location_if_in_system_header (loc);
3134
3135 if (range[0] == range[1])
3136 warning_at (loc, opt,
3137 "%K%qD specified size %E "
3138 "exceeds maximum object size %E",
3139 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3140 else
3141 warning_at (loc, opt,
3142 "%K%qD specified size between %E and %E "
3143 "exceeds maximum object size %E",
3144 exp, get_callee_fndecl (exp),
3145 range[0], range[1], maxobjsize);
3146 return false;
3147 }
3148
3149 /* Next check the number of bytes to be written against the destination
3150 object size. */
3151 if (range[0] || !exactsize || integer_all_onesp (size))
3152 {
3153 if (range[0]
3154 && ((tree_fits_uhwi_p (objsize)
3155 && tree_int_cst_lt (objsize, range[0]))
3156 || (tree_fits_uhwi_p (size)
3157 && tree_int_cst_lt (size, range[0]))))
3158 {
3159 location_t loc = tree_nonartificial_location (exp);
3160 loc = expansion_point_location_if_in_system_header (loc);
3161
3162 if (size == slen && at_least_one)
3163 {
3164 /* This is a call to strcpy with a destination of 0 size
3165 and a source of unknown length. The call will write
3166 at least one byte past the end of the destination. */
3167 warning_at (loc, opt,
3168 "%K%qD writing %E or more bytes into a region "
3169 "of size %E overflows the destination",
3170 exp, get_callee_fndecl (exp), range[0], objsize);
3171 }
3172 else if (tree_int_cst_equal (range[0], range[1]))
3173 warning_at (loc, opt,
3174 (integer_onep (range[0])
3175 ? G_("%K%qD writing %E byte into a region "
3176 "of size %E overflows the destination")
3177 : G_("%K%qD writing %E bytes into a region "
3178 "of size %E overflows the destination")),
3179 exp, get_callee_fndecl (exp), range[0], objsize);
3180 else if (tree_int_cst_sign_bit (range[1]))
3181 {
3182 /* Avoid printing the upper bound if it's invalid. */
3183 warning_at (loc, opt,
3184 "%K%qD writing %E or more bytes into a region "
3185 "of size %E overflows the destination",
3186 exp, get_callee_fndecl (exp), range[0], objsize);
3187 }
3188 else
3189 warning_at (loc, opt,
3190 "%K%qD writing between %E and %E bytes into "
3191 "a region of size %E overflows the destination",
3192 exp, get_callee_fndecl (exp), range[0], range[1],
3193 objsize);
3194
3195 /* Return error when an overflow has been detected. */
3196 return false;
3197 }
3198 }
3199
3200 /* Check the maximum length of the source sequence against the size
3201 of the destination object if known, or against the maximum size
3202 of an object. */
3203 if (maxlen)
3204 {
3205 get_size_range (maxlen, range);
3206
3207 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3208 {
3209 location_t loc = tree_nonartificial_location (exp);
3210 loc = expansion_point_location_if_in_system_header (loc);
3211
3212 if (tree_int_cst_lt (maxobjsize, range[0]))
3213 {
3214 /* Warn about crazy big sizes first since that's more
3215 likely to be meaningful than saying that the bound
3216 is greater than the object size if both are big. */
3217 if (range[0] == range[1])
3218 warning_at (loc, opt,
3219 "%K%qD specified bound %E "
3220 "exceeds maximum object size %E",
3221 exp, get_callee_fndecl (exp),
3222 range[0], maxobjsize);
3223 else
3224 warning_at (loc, opt,
3225 "%K%qD specified bound between %E and %E "
3226 "exceeds maximum object size %E",
3227 exp, get_callee_fndecl (exp),
3228 range[0], range[1], maxobjsize);
3229
3230 return false;
3231 }
3232
3233 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3234 {
3235 if (tree_int_cst_equal (range[0], range[1]))
3236 warning_at (loc, opt,
3237 "%K%qD specified bound %E "
3238 "exceeds destination size %E",
3239 exp, get_callee_fndecl (exp),
3240 range[0], objsize);
3241 else
3242 warning_at (loc, opt,
3243 "%K%qD specified bound between %E and %E "
3244 "exceeds destination size %E",
3245 exp, get_callee_fndecl (exp),
3246 range[0], range[1], objsize);
3247 return false;
3248 }
3249 }
3250 }
3251
3252 if (slen
3253 && slen == src
3254 && size && range[0]
3255 && tree_int_cst_lt (slen, range[0]))
3256 {
3257 location_t loc = tree_nonartificial_location (exp);
3258
3259 if (tree_int_cst_equal (range[0], range[1]))
3260 warning_at (loc, opt,
3261 (tree_int_cst_equal (range[0], integer_one_node)
3262 ? G_("%K%qD reading %E byte from a region of size %E")
3263 : G_("%K%qD reading %E bytes from a region of size %E")),
3264 exp, get_callee_fndecl (exp), range[0], slen);
3265 else if (tree_int_cst_sign_bit (range[1]))
3266 {
3267 /* Avoid printing the upper bound if it's invalid. */
3268 warning_at (loc, opt,
3269 "%K%qD reading %E or more bytes from a region "
3270 "of size %E",
3271 exp, get_callee_fndecl (exp), range[0], slen);
3272 }
3273 else
3274 warning_at (loc, opt,
3275 "%K%qD reading between %E and %E bytes from a region "
3276 "of size %E",
3277 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3278 return false;
3279 }
3280
3281 return true;
3282 }
3283
3284 /* Helper to compute the size of the object referenced by the DEST
3285 expression which must of of pointer type, using Object Size type
3286 OSTYPE (only the least significant 2 bits are used). Return
3287 the size of the object if successful or NULL when the size cannot
3288 be determined. */
3289
3290 static inline tree
3291 compute_objsize (tree dest, int ostype)
3292 {
3293 unsigned HOST_WIDE_INT size;
3294 if (compute_builtin_object_size (dest, ostype & 3, &size))
3295 return build_int_cst (sizetype, size);
3296
3297 return NULL_TREE;
3298 }
3299
3300 /* Helper to determine and check the sizes of the source and the destination
3301 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3302 call expression, DEST is the destination argument, SRC is the source
3303 argument or null, and LEN is the number of bytes. Use Object Size type-0
3304 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3305 (no overflow or invalid sizes), false otherwise. */
3306
3307 static bool
3308 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3309 {
3310 if (!warn_stringop_overflow)
3311 return true;
3312
3313 /* For functions like memset and memcpy that operate on raw memory
3314 try to determine the size of the largest source and destination
3315 object using type-0 Object Size regardless of the object size
3316 type specified by the option. */
3317 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3318 tree dstsize = compute_objsize (dest, 0);
3319
3320 return check_sizes (OPT_Wstringop_overflow_, exp,
3321 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3322 }
3323
3324 /* Validate memchr arguments without performing any expansion.
3325 Return NULL_RTX. */
3326
3327 static rtx
3328 expand_builtin_memchr (tree exp, rtx)
3329 {
3330 if (!validate_arglist (exp,
3331 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3332 return NULL_RTX;
3333
3334 tree arg1 = CALL_EXPR_ARG (exp, 0);
3335 tree len = CALL_EXPR_ARG (exp, 2);
3336
3337 /* Diagnose calls where the specified length exceeds the size
3338 of the object. */
3339 if (warn_stringop_overflow)
3340 {
3341 tree size = compute_objsize (arg1, 0);
3342 check_sizes (OPT_Wstringop_overflow_,
3343 exp, len, /*maxlen=*/NULL_TREE,
3344 size, /*objsize=*/NULL_TREE);
3345 }
3346
3347 return NULL_RTX;
3348 }
3349
3350 /* Expand a call EXP to the memcpy builtin.
3351 Return NULL_RTX if we failed, the caller should emit a normal call,
3352 otherwise try to get the result in TARGET, if convenient (and in
3353 mode MODE if that's convenient). */
3354
3355 static rtx
3356 expand_builtin_memcpy (tree exp, rtx target)
3357 {
3358 if (!validate_arglist (exp,
3359 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3360 return NULL_RTX;
3361
3362 tree dest = CALL_EXPR_ARG (exp, 0);
3363 tree src = CALL_EXPR_ARG (exp, 1);
3364 tree len = CALL_EXPR_ARG (exp, 2);
3365
3366 check_memop_sizes (exp, dest, src, len);
3367
3368 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3369 }
3370
3371 /* Check a call EXP to the memmove built-in for validity.
3372 Return NULL_RTX on both success and failure. */
3373
3374 static rtx
3375 expand_builtin_memmove (tree exp, rtx)
3376 {
3377 if (!validate_arglist (exp,
3378 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3379 return NULL_RTX;
3380
3381 tree dest = CALL_EXPR_ARG (exp, 0);
3382 tree src = CALL_EXPR_ARG (exp, 1);
3383 tree len = CALL_EXPR_ARG (exp, 2);
3384
3385 check_memop_sizes (exp, dest, src, len);
3386
3387 return NULL_RTX;
3388 }
3389
3390 /* Expand an instrumented call EXP to the memcpy builtin.
3391 Return NULL_RTX if we failed, the caller should emit a normal call,
3392 otherwise try to get the result in TARGET, if convenient (and in
3393 mode MODE if that's convenient). */
3394
3395 static rtx
3396 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3397 {
3398 if (!validate_arglist (exp,
3399 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3400 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3401 INTEGER_TYPE, VOID_TYPE))
3402 return NULL_RTX;
3403 else
3404 {
3405 tree dest = CALL_EXPR_ARG (exp, 0);
3406 tree src = CALL_EXPR_ARG (exp, 2);
3407 tree len = CALL_EXPR_ARG (exp, 4);
3408 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3409
3410 /* Return src bounds with the result. */
3411 if (res)
3412 {
3413 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3414 expand_normal (CALL_EXPR_ARG (exp, 1)));
3415 res = chkp_join_splitted_slot (res, bnd);
3416 }
3417 return res;
3418 }
3419 }
3420
3421 /* Expand a call EXP to the mempcpy builtin.
3422 Return NULL_RTX if we failed; the caller should emit a normal call,
3423 otherwise try to get the result in TARGET, if convenient (and in
3424 mode MODE if that's convenient). If ENDP is 0 return the
3425 destination pointer, if ENDP is 1 return the end pointer ala
3426 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3427 stpcpy. */
3428
3429 static rtx
3430 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3431 {
3432 if (!validate_arglist (exp,
3433 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3434 return NULL_RTX;
3435
3436 tree dest = CALL_EXPR_ARG (exp, 0);
3437 tree src = CALL_EXPR_ARG (exp, 1);
3438 tree len = CALL_EXPR_ARG (exp, 2);
3439
3440 /* Avoid expanding mempcpy into memcpy when the call is determined
3441 to overflow the buffer. This also prevents the same overflow
3442 from being diagnosed again when expanding memcpy. */
3443 if (!check_memop_sizes (exp, dest, src, len))
3444 return NULL_RTX;
3445
3446 return expand_builtin_mempcpy_args (dest, src, len,
3447 target, mode, /*endp=*/ 1,
3448 exp);
3449 }
3450
3451 /* Expand an instrumented call EXP to the mempcpy builtin.
3452 Return NULL_RTX if we failed, the caller should emit a normal call,
3453 otherwise try to get the result in TARGET, if convenient (and in
3454 mode MODE if that's convenient). */
3455
3456 static rtx
3457 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3458 {
3459 if (!validate_arglist (exp,
3460 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3461 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3462 INTEGER_TYPE, VOID_TYPE))
3463 return NULL_RTX;
3464 else
3465 {
3466 tree dest = CALL_EXPR_ARG (exp, 0);
3467 tree src = CALL_EXPR_ARG (exp, 2);
3468 tree len = CALL_EXPR_ARG (exp, 4);
3469 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3470 mode, 1, exp);
3471
3472 /* Return src bounds with the result. */
3473 if (res)
3474 {
3475 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3476 expand_normal (CALL_EXPR_ARG (exp, 1)));
3477 res = chkp_join_splitted_slot (res, bnd);
3478 }
3479 return res;
3480 }
3481 }
3482
3483 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3484 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3485 so that this can also be called without constructing an actual CALL_EXPR.
3486 The other arguments and return value are the same as for
3487 expand_builtin_mempcpy. */
3488
3489 static rtx
3490 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3491 rtx target, machine_mode mode, int endp,
3492 tree orig_exp)
3493 {
3494 tree fndecl = get_callee_fndecl (orig_exp);
3495
3496 /* If return value is ignored, transform mempcpy into memcpy. */
3497 if (target == const0_rtx
3498 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3499 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3500 {
3501 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3502 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3503 dest, src, len);
3504 return expand_expr (result, target, mode, EXPAND_NORMAL);
3505 }
3506 else if (target == const0_rtx
3507 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3508 {
3509 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3510 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3511 dest, src, len);
3512 return expand_expr (result, target, mode, EXPAND_NORMAL);
3513 }
3514 else
3515 {
3516 const char *src_str;
3517 unsigned int src_align = get_pointer_alignment (src);
3518 unsigned int dest_align = get_pointer_alignment (dest);
3519 rtx dest_mem, src_mem, len_rtx;
3520
3521 /* If either SRC or DEST is not a pointer type, don't do this
3522 operation in-line. */
3523 if (dest_align == 0 || src_align == 0)
3524 return NULL_RTX;
3525
3526 /* If LEN is not constant, call the normal function. */
3527 if (! tree_fits_uhwi_p (len))
3528 return NULL_RTX;
3529
3530 len_rtx = expand_normal (len);
3531 src_str = c_getstr (src);
3532
3533 /* If SRC is a string constant and block move would be done
3534 by pieces, we can avoid loading the string from memory
3535 and only stored the computed constants. */
3536 if (src_str
3537 && CONST_INT_P (len_rtx)
3538 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3539 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3540 CONST_CAST (char *, src_str),
3541 dest_align, false))
3542 {
3543 dest_mem = get_memory_rtx (dest, len);
3544 set_mem_align (dest_mem, dest_align);
3545 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3546 builtin_memcpy_read_str,
3547 CONST_CAST (char *, src_str),
3548 dest_align, false, endp);
3549 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3550 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3551 return dest_mem;
3552 }
3553
3554 if (CONST_INT_P (len_rtx)
3555 && can_move_by_pieces (INTVAL (len_rtx),
3556 MIN (dest_align, src_align)))
3557 {
3558 dest_mem = get_memory_rtx (dest, len);
3559 set_mem_align (dest_mem, dest_align);
3560 src_mem = get_memory_rtx (src, len);
3561 set_mem_align (src_mem, src_align);
3562 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3563 MIN (dest_align, src_align), endp);
3564 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3565 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3566 return dest_mem;
3567 }
3568
3569 return NULL_RTX;
3570 }
3571 }
3572
3573 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3574 we failed, the caller should emit a normal call, otherwise try to
3575 get the result in TARGET, if convenient. If ENDP is 0 return the
3576 destination pointer, if ENDP is 1 return the end pointer ala
3577 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3578 stpcpy. */
3579
3580 static rtx
3581 expand_movstr (tree dest, tree src, rtx target, int endp)
3582 {
3583 struct expand_operand ops[3];
3584 rtx dest_mem;
3585 rtx src_mem;
3586
3587 if (!targetm.have_movstr ())
3588 return NULL_RTX;
3589
3590 dest_mem = get_memory_rtx (dest, NULL);
3591 src_mem = get_memory_rtx (src, NULL);
3592 if (!endp)
3593 {
3594 target = force_reg (Pmode, XEXP (dest_mem, 0));
3595 dest_mem = replace_equiv_address (dest_mem, target);
3596 }
3597
3598 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3599 create_fixed_operand (&ops[1], dest_mem);
3600 create_fixed_operand (&ops[2], src_mem);
3601 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3602 return NULL_RTX;
3603
3604 if (endp && target != const0_rtx)
3605 {
3606 target = ops[0].value;
3607 /* movstr is supposed to set end to the address of the NUL
3608 terminator. If the caller requested a mempcpy-like return value,
3609 adjust it. */
3610 if (endp == 1)
3611 {
3612 rtx tem = plus_constant (GET_MODE (target),
3613 gen_lowpart (GET_MODE (target), target), 1);
3614 emit_move_insn (target, force_operand (tem, NULL_RTX));
3615 }
3616 }
3617 return target;
3618 }
3619
3620 /* Do some very basic size validation of a call to the strcpy builtin
3621 given by EXP. Return NULL_RTX to have the built-in expand to a call
3622 to the library function. */
3623
3624 static rtx
3625 expand_builtin_strcat (tree exp, rtx)
3626 {
3627 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3628 || !warn_stringop_overflow)
3629 return NULL_RTX;
3630
3631 tree dest = CALL_EXPR_ARG (exp, 0);
3632 tree src = CALL_EXPR_ARG (exp, 1);
3633
3634 /* There is no way here to determine the length of the string in
3635 the destination to which the SRC string is being appended so
3636 just diagnose cases when the souce string is longer than
3637 the destination object. */
3638
3639 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3640
3641 check_sizes (OPT_Wstringop_overflow_,
3642 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3643
3644 return NULL_RTX;
3645 }
3646
3647 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3648 NULL_RTX if we failed the caller should emit a normal call, otherwise
3649 try to get the result in TARGET, if convenient (and in mode MODE if that's
3650 convenient). */
3651
3652 static rtx
3653 expand_builtin_strcpy (tree exp, rtx target)
3654 {
3655 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3656 return NULL_RTX;
3657
3658 tree dest = CALL_EXPR_ARG (exp, 0);
3659 tree src = CALL_EXPR_ARG (exp, 1);
3660
3661 if (warn_stringop_overflow)
3662 {
3663 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3664 check_sizes (OPT_Wstringop_overflow_,
3665 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3666 }
3667
3668 return expand_builtin_strcpy_args (dest, src, target);
3669 }
3670
3671 /* Helper function to do the actual work for expand_builtin_strcpy. The
3672 arguments to the builtin_strcpy call DEST and SRC are broken out
3673 so that this can also be called without constructing an actual CALL_EXPR.
3674 The other arguments and return value are the same as for
3675 expand_builtin_strcpy. */
3676
3677 static rtx
3678 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3679 {
3680 return expand_movstr (dest, src, target, /*endp=*/0);
3681 }
3682
3683 /* Expand a call EXP to the stpcpy builtin.
3684 Return NULL_RTX if we failed the caller should emit a normal call,
3685 otherwise try to get the result in TARGET, if convenient (and in
3686 mode MODE if that's convenient). */
3687
3688 static rtx
3689 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3690 {
3691 tree dst, src;
3692 location_t loc = EXPR_LOCATION (exp);
3693
3694 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3695 return NULL_RTX;
3696
3697 dst = CALL_EXPR_ARG (exp, 0);
3698 src = CALL_EXPR_ARG (exp, 1);
3699
3700 if (warn_stringop_overflow)
3701 {
3702 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3703 check_sizes (OPT_Wstringop_overflow_,
3704 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3705 }
3706
3707 /* If return value is ignored, transform stpcpy into strcpy. */
3708 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3709 {
3710 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3711 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3712 return expand_expr (result, target, mode, EXPAND_NORMAL);
3713 }
3714 else
3715 {
3716 tree len, lenp1;
3717 rtx ret;
3718
3719 /* Ensure we get an actual string whose length can be evaluated at
3720 compile-time, not an expression containing a string. This is
3721 because the latter will potentially produce pessimized code
3722 when used to produce the return value. */
3723 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3724 return expand_movstr (dst, src, target, /*endp=*/2);
3725
3726 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3727 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3728 target, mode, /*endp=*/2,
3729 exp);
3730
3731 if (ret)
3732 return ret;
3733
3734 if (TREE_CODE (len) == INTEGER_CST)
3735 {
3736 rtx len_rtx = expand_normal (len);
3737
3738 if (CONST_INT_P (len_rtx))
3739 {
3740 ret = expand_builtin_strcpy_args (dst, src, target);
3741
3742 if (ret)
3743 {
3744 if (! target)
3745 {
3746 if (mode != VOIDmode)
3747 target = gen_reg_rtx (mode);
3748 else
3749 target = gen_reg_rtx (GET_MODE (ret));
3750 }
3751 if (GET_MODE (target) != GET_MODE (ret))
3752 ret = gen_lowpart (GET_MODE (target), ret);
3753
3754 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3755 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3756 gcc_assert (ret);
3757
3758 return target;
3759 }
3760 }
3761 }
3762
3763 return expand_movstr (dst, src, target, /*endp=*/2);
3764 }
3765 }
3766
3767 /* Check a call EXP to the stpncpy built-in for validity.
3768 Return NULL_RTX on both success and failure. */
3769
3770 static rtx
3771 expand_builtin_stpncpy (tree exp, rtx)
3772 {
3773 if (!validate_arglist (exp,
3774 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3775 || !warn_stringop_overflow)
3776 return NULL_RTX;
3777
3778 /* The source and destination of the call. */
3779 tree dest = CALL_EXPR_ARG (exp, 0);
3780 tree src = CALL_EXPR_ARG (exp, 1);
3781
3782 /* The exact number of bytes to write (not the maximum). */
3783 tree len = CALL_EXPR_ARG (exp, 2);
3784
3785 /* The size of the destination object. */
3786 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3787
3788 check_sizes (OPT_Wstringop_overflow_,
3789 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3790
3791 return NULL_RTX;
3792 }
3793
3794 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3795 bytes from constant string DATA + OFFSET and return it as target
3796 constant. */
3797
3798 rtx
3799 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3800 machine_mode mode)
3801 {
3802 const char *str = (const char *) data;
3803
3804 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3805 return const0_rtx;
3806
3807 return c_readstr (str + offset, mode);
3808 }
3809
3810 /* Helper to check the sizes of sequences and the destination of calls
3811 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3812 success (no overflow or invalid sizes), false otherwise. */
3813
3814 static bool
3815 check_strncat_sizes (tree exp, tree objsize)
3816 {
3817 tree dest = CALL_EXPR_ARG (exp, 0);
3818 tree src = CALL_EXPR_ARG (exp, 1);
3819 tree maxlen = CALL_EXPR_ARG (exp, 2);
3820
3821 /* Try to determine the range of lengths that the source expression
3822 refers to. */
3823 tree lenrange[2];
3824 get_range_strlen (src, lenrange);
3825
3826 /* Try to verify that the destination is big enough for the shortest
3827 string. */
3828
3829 if (!objsize && warn_stringop_overflow)
3830 {
3831 /* If it hasn't been provided by __strncat_chk, try to determine
3832 the size of the destination object into which the source is
3833 being copied. */
3834 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3835 }
3836
3837 /* Add one for the terminating nul. */
3838 tree srclen = (lenrange[0]
3839 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3840 size_one_node)
3841 : NULL_TREE);
3842
3843 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3844 nul so the specified upper bound should never be equal to (or greater
3845 than) the size of the destination. */
3846 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3847 && tree_int_cst_equal (objsize, maxlen))
3848 {
3849 location_t loc = tree_nonartificial_location (exp);
3850 loc = expansion_point_location_if_in_system_header (loc);
3851
3852 warning_at (loc, OPT_Wstringop_overflow_,
3853 "%K%qD specified bound %E equals destination size",
3854 exp, get_callee_fndecl (exp), maxlen);
3855
3856 return false;
3857 }
3858
3859 if (!srclen
3860 || (maxlen && tree_fits_uhwi_p (maxlen)
3861 && tree_fits_uhwi_p (srclen)
3862 && tree_int_cst_lt (maxlen, srclen)))
3863 srclen = maxlen;
3864
3865 /* The number of bytes to write is LEN but check_sizes will also
3866 check SRCLEN if LEN's value isn't known. */
3867 return check_sizes (OPT_Wstringop_overflow_,
3868 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3869 }
3870
3871 /* Similar to expand_builtin_strcat, do some very basic size validation
3872 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3873 the built-in expand to a call to the library function. */
3874
3875 static rtx
3876 expand_builtin_strncat (tree exp, rtx)
3877 {
3878 if (!validate_arglist (exp,
3879 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3880 || !warn_stringop_overflow)
3881 return NULL_RTX;
3882
3883 tree dest = CALL_EXPR_ARG (exp, 0);
3884 tree src = CALL_EXPR_ARG (exp, 1);
3885 /* The upper bound on the number of bytes to write. */
3886 tree maxlen = CALL_EXPR_ARG (exp, 2);
3887 /* The length of the source sequence. */
3888 tree slen = c_strlen (src, 1);
3889
3890 /* Try to determine the range of lengths that the source expression
3891 refers to. */
3892 tree lenrange[2];
3893 if (slen)
3894 lenrange[0] = lenrange[1] = slen;
3895 else
3896 get_range_strlen (src, lenrange);
3897
3898 /* Try to verify that the destination is big enough for the shortest
3899 string. First try to determine the size of the destination object
3900 into which the source is being copied. */
3901 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3902
3903 /* Add one for the terminating nul. */
3904 tree srclen = (lenrange[0]
3905 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3906 size_one_node)
3907 : NULL_TREE);
3908
3909 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3910 nul so the specified upper bound should never be equal to (or greater
3911 than) the size of the destination. */
3912 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3913 && tree_int_cst_equal (destsize, maxlen))
3914 {
3915 location_t loc = tree_nonartificial_location (exp);
3916 loc = expansion_point_location_if_in_system_header (loc);
3917
3918 warning_at (loc, OPT_Wstringop_overflow_,
3919 "%K%qD specified bound %E equals destination size",
3920 exp, get_callee_fndecl (exp), maxlen);
3921
3922 return NULL_RTX;
3923 }
3924
3925 if (!srclen
3926 || (maxlen && tree_fits_uhwi_p (maxlen)
3927 && tree_fits_uhwi_p (srclen)
3928 && tree_int_cst_lt (maxlen, srclen)))
3929 srclen = maxlen;
3930
3931 /* The number of bytes to write is LEN but check_sizes will also
3932 check SRCLEN if LEN's value isn't known. */
3933 check_sizes (OPT_Wstringop_overflow_,
3934 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3935
3936 return NULL_RTX;
3937 }
3938
3939 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3940 NULL_RTX if we failed the caller should emit a normal call. */
3941
3942 static rtx
3943 expand_builtin_strncpy (tree exp, rtx target)
3944 {
3945 location_t loc = EXPR_LOCATION (exp);
3946
3947 if (validate_arglist (exp,
3948 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3949 {
3950 tree dest = CALL_EXPR_ARG (exp, 0);
3951 tree src = CALL_EXPR_ARG (exp, 1);
3952 /* The number of bytes to write (not the maximum). */
3953 tree len = CALL_EXPR_ARG (exp, 2);
3954 /* The length of the source sequence. */
3955 tree slen = c_strlen (src, 1);
3956
3957 if (warn_stringop_overflow)
3958 {
3959 tree destsize = compute_objsize (dest,
3960 warn_stringop_overflow - 1);
3961
3962 /* The number of bytes to write is LEN but check_sizes will also
3963 check SLEN if LEN's value isn't known. */
3964 check_sizes (OPT_Wstringop_overflow_,
3965 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3966 }
3967
3968 /* We must be passed a constant len and src parameter. */
3969 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3970 return NULL_RTX;
3971
3972 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3973
3974 /* We're required to pad with trailing zeros if the requested
3975 len is greater than strlen(s2)+1. In that case try to
3976 use store_by_pieces, if it fails, punt. */
3977 if (tree_int_cst_lt (slen, len))
3978 {
3979 unsigned int dest_align = get_pointer_alignment (dest);
3980 const char *p = c_getstr (src);
3981 rtx dest_mem;
3982
3983 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3984 || !can_store_by_pieces (tree_to_uhwi (len),
3985 builtin_strncpy_read_str,
3986 CONST_CAST (char *, p),
3987 dest_align, false))
3988 return NULL_RTX;
3989
3990 dest_mem = get_memory_rtx (dest, len);
3991 store_by_pieces (dest_mem, tree_to_uhwi (len),
3992 builtin_strncpy_read_str,
3993 CONST_CAST (char *, p), dest_align, false, 0);
3994 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3995 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3996 return dest_mem;
3997 }
3998 }
3999 return NULL_RTX;
4000 }
4001
4002 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4003 bytes from constant string DATA + OFFSET and return it as target
4004 constant. */
4005
4006 rtx
4007 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4008 machine_mode mode)
4009 {
4010 const char *c = (const char *) data;
4011 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4012
4013 memset (p, *c, GET_MODE_SIZE (mode));
4014
4015 return c_readstr (p, mode);
4016 }
4017
4018 /* Callback routine for store_by_pieces. Return the RTL of a register
4019 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4020 char value given in the RTL register data. For example, if mode is
4021 4 bytes wide, return the RTL for 0x01010101*data. */
4022
4023 static rtx
4024 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4025 machine_mode mode)
4026 {
4027 rtx target, coeff;
4028 size_t size;
4029 char *p;
4030
4031 size = GET_MODE_SIZE (mode);
4032 if (size == 1)
4033 return (rtx) data;
4034
4035 p = XALLOCAVEC (char, size);
4036 memset (p, 1, size);
4037 coeff = c_readstr (p, mode);
4038
4039 target = convert_to_mode (mode, (rtx) data, 1);
4040 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4041 return force_reg (mode, target);
4042 }
4043
4044 /* Expand expression EXP, which is a call to the memset builtin. Return
4045 NULL_RTX if we failed the caller should emit a normal call, otherwise
4046 try to get the result in TARGET, if convenient (and in mode MODE if that's
4047 convenient). */
4048
4049 static rtx
4050 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4051 {
4052 if (!validate_arglist (exp,
4053 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4054 return NULL_RTX;
4055
4056 tree dest = CALL_EXPR_ARG (exp, 0);
4057 tree val = CALL_EXPR_ARG (exp, 1);
4058 tree len = CALL_EXPR_ARG (exp, 2);
4059
4060 check_memop_sizes (exp, dest, NULL_TREE, len);
4061
4062 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4063 }
4064
4065 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4066 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4067 try to get the result in TARGET, if convenient (and in mode MODE if that's
4068 convenient). */
4069
4070 static rtx
4071 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4072 {
4073 if (!validate_arglist (exp,
4074 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4075 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4076 return NULL_RTX;
4077 else
4078 {
4079 tree dest = CALL_EXPR_ARG (exp, 0);
4080 tree val = CALL_EXPR_ARG (exp, 2);
4081 tree len = CALL_EXPR_ARG (exp, 3);
4082 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4083
4084 /* Return src bounds with the result. */
4085 if (res)
4086 {
4087 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4088 expand_normal (CALL_EXPR_ARG (exp, 1)));
4089 res = chkp_join_splitted_slot (res, bnd);
4090 }
4091 return res;
4092 }
4093 }
4094
4095 /* Helper function to do the actual work for expand_builtin_memset. The
4096 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4097 so that this can also be called without constructing an actual CALL_EXPR.
4098 The other arguments and return value are the same as for
4099 expand_builtin_memset. */
4100
4101 static rtx
4102 expand_builtin_memset_args (tree dest, tree val, tree len,
4103 rtx target, machine_mode mode, tree orig_exp)
4104 {
4105 tree fndecl, fn;
4106 enum built_in_function fcode;
4107 machine_mode val_mode;
4108 char c;
4109 unsigned int dest_align;
4110 rtx dest_mem, dest_addr, len_rtx;
4111 HOST_WIDE_INT expected_size = -1;
4112 unsigned int expected_align = 0;
4113 unsigned HOST_WIDE_INT min_size;
4114 unsigned HOST_WIDE_INT max_size;
4115 unsigned HOST_WIDE_INT probable_max_size;
4116
4117 dest_align = get_pointer_alignment (dest);
4118
4119 /* If DEST is not a pointer type, don't do this operation in-line. */
4120 if (dest_align == 0)
4121 return NULL_RTX;
4122
4123 if (currently_expanding_gimple_stmt)
4124 stringop_block_profile (currently_expanding_gimple_stmt,
4125 &expected_align, &expected_size);
4126
4127 if (expected_align < dest_align)
4128 expected_align = dest_align;
4129
4130 /* If the LEN parameter is zero, return DEST. */
4131 if (integer_zerop (len))
4132 {
4133 /* Evaluate and ignore VAL in case it has side-effects. */
4134 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4135 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4136 }
4137
4138 /* Stabilize the arguments in case we fail. */
4139 dest = builtin_save_expr (dest);
4140 val = builtin_save_expr (val);
4141 len = builtin_save_expr (len);
4142
4143 len_rtx = expand_normal (len);
4144 determine_block_size (len, len_rtx, &min_size, &max_size,
4145 &probable_max_size);
4146 dest_mem = get_memory_rtx (dest, len);
4147 val_mode = TYPE_MODE (unsigned_char_type_node);
4148
4149 if (TREE_CODE (val) != INTEGER_CST)
4150 {
4151 rtx val_rtx;
4152
4153 val_rtx = expand_normal (val);
4154 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4155
4156 /* Assume that we can memset by pieces if we can store
4157 * the coefficients by pieces (in the required modes).
4158 * We can't pass builtin_memset_gen_str as that emits RTL. */
4159 c = 1;
4160 if (tree_fits_uhwi_p (len)
4161 && can_store_by_pieces (tree_to_uhwi (len),
4162 builtin_memset_read_str, &c, dest_align,
4163 true))
4164 {
4165 val_rtx = force_reg (val_mode, val_rtx);
4166 store_by_pieces (dest_mem, tree_to_uhwi (len),
4167 builtin_memset_gen_str, val_rtx, dest_align,
4168 true, 0);
4169 }
4170 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4171 dest_align, expected_align,
4172 expected_size, min_size, max_size,
4173 probable_max_size))
4174 goto do_libcall;
4175
4176 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4177 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4178 return dest_mem;
4179 }
4180
4181 if (target_char_cast (val, &c))
4182 goto do_libcall;
4183
4184 if (c)
4185 {
4186 if (tree_fits_uhwi_p (len)
4187 && can_store_by_pieces (tree_to_uhwi (len),
4188 builtin_memset_read_str, &c, dest_align,
4189 true))
4190 store_by_pieces (dest_mem, tree_to_uhwi (len),
4191 builtin_memset_read_str, &c, dest_align, true, 0);
4192 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4193 gen_int_mode (c, val_mode),
4194 dest_align, expected_align,
4195 expected_size, min_size, max_size,
4196 probable_max_size))
4197 goto do_libcall;
4198
4199 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4200 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4201 return dest_mem;
4202 }
4203
4204 set_mem_align (dest_mem, dest_align);
4205 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4206 CALL_EXPR_TAILCALL (orig_exp)
4207 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4208 expected_align, expected_size,
4209 min_size, max_size,
4210 probable_max_size);
4211
4212 if (dest_addr == 0)
4213 {
4214 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4215 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4216 }
4217
4218 return dest_addr;
4219
4220 do_libcall:
4221 fndecl = get_callee_fndecl (orig_exp);
4222 fcode = DECL_FUNCTION_CODE (fndecl);
4223 if (fcode == BUILT_IN_MEMSET
4224 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4225 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4226 dest, val, len);
4227 else if (fcode == BUILT_IN_BZERO)
4228 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4229 dest, len);
4230 else
4231 gcc_unreachable ();
4232 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4233 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4234 return expand_call (fn, target, target == const0_rtx);
4235 }
4236
4237 /* Expand expression EXP, which is a call to the bzero builtin. Return
4238 NULL_RTX if we failed the caller should emit a normal call. */
4239
4240 static rtx
4241 expand_builtin_bzero (tree exp)
4242 {
4243 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4244 return NULL_RTX;
4245
4246 tree dest = CALL_EXPR_ARG (exp, 0);
4247 tree size = CALL_EXPR_ARG (exp, 1);
4248
4249 check_memop_sizes (exp, dest, NULL_TREE, size);
4250
4251 /* New argument list transforming bzero(ptr x, int y) to
4252 memset(ptr x, int 0, size_t y). This is done this way
4253 so that if it isn't expanded inline, we fallback to
4254 calling bzero instead of memset. */
4255
4256 location_t loc = EXPR_LOCATION (exp);
4257
4258 return expand_builtin_memset_args (dest, integer_zero_node,
4259 fold_convert_loc (loc,
4260 size_type_node, size),
4261 const0_rtx, VOIDmode, exp);
4262 }
4263
4264 /* Try to expand cmpstr operation ICODE with the given operands.
4265 Return the result rtx on success, otherwise return null. */
4266
4267 static rtx
4268 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4269 HOST_WIDE_INT align)
4270 {
4271 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4272
4273 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4274 target = NULL_RTX;
4275
4276 struct expand_operand ops[4];
4277 create_output_operand (&ops[0], target, insn_mode);
4278 create_fixed_operand (&ops[1], arg1_rtx);
4279 create_fixed_operand (&ops[2], arg2_rtx);
4280 create_integer_operand (&ops[3], align);
4281 if (maybe_expand_insn (icode, 4, ops))
4282 return ops[0].value;
4283 return NULL_RTX;
4284 }
4285
4286 /* Expand expression EXP, which is a call to the memcmp built-in function.
4287 Return NULL_RTX if we failed and the caller should emit a normal call,
4288 otherwise try to get the result in TARGET, if convenient.
4289 RESULT_EQ is true if we can relax the returned value to be either zero
4290 or nonzero, without caring about the sign. */
4291
4292 static rtx
4293 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4294 {
4295 if (!validate_arglist (exp,
4296 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4297 return NULL_RTX;
4298
4299 tree arg1 = CALL_EXPR_ARG (exp, 0);
4300 tree arg2 = CALL_EXPR_ARG (exp, 1);
4301 tree len = CALL_EXPR_ARG (exp, 2);
4302
4303 /* Diagnose calls where the specified length exceeds the size of either
4304 object. */
4305 if (warn_stringop_overflow)
4306 {
4307 tree size = compute_objsize (arg1, 0);
4308 if (check_sizes (OPT_Wstringop_overflow_,
4309 exp, len, /*maxlen=*/NULL_TREE,
4310 size, /*objsize=*/NULL_TREE))
4311 {
4312 size = compute_objsize (arg2, 0);
4313 check_sizes (OPT_Wstringop_overflow_,
4314 exp, len, /*maxlen=*/NULL_TREE,
4315 size, /*objsize=*/NULL_TREE);
4316 }
4317 }
4318
4319 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4320 location_t loc = EXPR_LOCATION (exp);
4321
4322 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4323 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4324
4325 /* If we don't have POINTER_TYPE, call the function. */
4326 if (arg1_align == 0 || arg2_align == 0)
4327 return NULL_RTX;
4328
4329 rtx arg1_rtx = get_memory_rtx (arg1, len);
4330 rtx arg2_rtx = get_memory_rtx (arg2, len);
4331 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4332
4333 /* Set MEM_SIZE as appropriate. */
4334 if (CONST_INT_P (len_rtx))
4335 {
4336 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4337 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4338 }
4339
4340 by_pieces_constfn constfn = NULL;
4341
4342 const char *src_str = c_getstr (arg2);
4343 if (result_eq && src_str == NULL)
4344 {
4345 src_str = c_getstr (arg1);
4346 if (src_str != NULL)
4347 std::swap (arg1_rtx, arg2_rtx);
4348 }
4349
4350 /* If SRC is a string constant and block move would be done
4351 by pieces, we can avoid loading the string from memory
4352 and only stored the computed constants. */
4353 if (src_str
4354 && CONST_INT_P (len_rtx)
4355 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4356 constfn = builtin_memcpy_read_str;
4357
4358 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4359 TREE_TYPE (len), target,
4360 result_eq, constfn,
4361 CONST_CAST (char *, src_str));
4362
4363 if (result)
4364 {
4365 /* Return the value in the proper mode for this function. */
4366 if (GET_MODE (result) == mode)
4367 return result;
4368
4369 if (target != 0)
4370 {
4371 convert_move (target, result, 0);
4372 return target;
4373 }
4374
4375 return convert_to_mode (mode, result, 0);
4376 }
4377
4378 return NULL_RTX;
4379 }
4380
4381 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4382 if we failed the caller should emit a normal call, otherwise try to get
4383 the result in TARGET, if convenient. */
4384
4385 static rtx
4386 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4387 {
4388 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4389 return NULL_RTX;
4390
4391 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4392 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4393 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4394 {
4395 rtx arg1_rtx, arg2_rtx;
4396 tree fndecl, fn;
4397 tree arg1 = CALL_EXPR_ARG (exp, 0);
4398 tree arg2 = CALL_EXPR_ARG (exp, 1);
4399 rtx result = NULL_RTX;
4400
4401 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4402 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4403
4404 /* If we don't have POINTER_TYPE, call the function. */
4405 if (arg1_align == 0 || arg2_align == 0)
4406 return NULL_RTX;
4407
4408 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4409 arg1 = builtin_save_expr (arg1);
4410 arg2 = builtin_save_expr (arg2);
4411
4412 arg1_rtx = get_memory_rtx (arg1, NULL);
4413 arg2_rtx = get_memory_rtx (arg2, NULL);
4414
4415 /* Try to call cmpstrsi. */
4416 if (cmpstr_icode != CODE_FOR_nothing)
4417 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4418 MIN (arg1_align, arg2_align));
4419
4420 /* Try to determine at least one length and call cmpstrnsi. */
4421 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4422 {
4423 tree len;
4424 rtx arg3_rtx;
4425
4426 tree len1 = c_strlen (arg1, 1);
4427 tree len2 = c_strlen (arg2, 1);
4428
4429 if (len1)
4430 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4431 if (len2)
4432 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4433
4434 /* If we don't have a constant length for the first, use the length
4435 of the second, if we know it. We don't require a constant for
4436 this case; some cost analysis could be done if both are available
4437 but neither is constant. For now, assume they're equally cheap,
4438 unless one has side effects. If both strings have constant lengths,
4439 use the smaller. */
4440
4441 if (!len1)
4442 len = len2;
4443 else if (!len2)
4444 len = len1;
4445 else if (TREE_SIDE_EFFECTS (len1))
4446 len = len2;
4447 else if (TREE_SIDE_EFFECTS (len2))
4448 len = len1;
4449 else if (TREE_CODE (len1) != INTEGER_CST)
4450 len = len2;
4451 else if (TREE_CODE (len2) != INTEGER_CST)
4452 len = len1;
4453 else if (tree_int_cst_lt (len1, len2))
4454 len = len1;
4455 else
4456 len = len2;
4457
4458 /* If both arguments have side effects, we cannot optimize. */
4459 if (len && !TREE_SIDE_EFFECTS (len))
4460 {
4461 arg3_rtx = expand_normal (len);
4462 result = expand_cmpstrn_or_cmpmem
4463 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4464 arg3_rtx, MIN (arg1_align, arg2_align));
4465 }
4466 }
4467
4468 if (result)
4469 {
4470 /* Return the value in the proper mode for this function. */
4471 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4472 if (GET_MODE (result) == mode)
4473 return result;
4474 if (target == 0)
4475 return convert_to_mode (mode, result, 0);
4476 convert_move (target, result, 0);
4477 return target;
4478 }
4479
4480 /* Expand the library call ourselves using a stabilized argument
4481 list to avoid re-evaluating the function's arguments twice. */
4482 fndecl = get_callee_fndecl (exp);
4483 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4484 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4485 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4486 return expand_call (fn, target, target == const0_rtx);
4487 }
4488 return NULL_RTX;
4489 }
4490
4491 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4492 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4493 the result in TARGET, if convenient. */
4494
4495 static rtx
4496 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4497 ATTRIBUTE_UNUSED machine_mode mode)
4498 {
4499 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4500
4501 if (!validate_arglist (exp,
4502 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4503 return NULL_RTX;
4504
4505 /* If c_strlen can determine an expression for one of the string
4506 lengths, and it doesn't have side effects, then emit cmpstrnsi
4507 using length MIN(strlen(string)+1, arg3). */
4508 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4509 if (cmpstrn_icode != CODE_FOR_nothing)
4510 {
4511 tree len, len1, len2, len3;
4512 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4513 rtx result;
4514 tree fndecl, fn;
4515 tree arg1 = CALL_EXPR_ARG (exp, 0);
4516 tree arg2 = CALL_EXPR_ARG (exp, 1);
4517 tree arg3 = CALL_EXPR_ARG (exp, 2);
4518
4519 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4520 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4521
4522 len1 = c_strlen (arg1, 1);
4523 len2 = c_strlen (arg2, 1);
4524
4525 if (len1)
4526 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4527 if (len2)
4528 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4529
4530 len3 = fold_convert_loc (loc, sizetype, arg3);
4531
4532 /* If we don't have a constant length for the first, use the length
4533 of the second, if we know it. If neither string is constant length,
4534 use the given length argument. We don't require a constant for
4535 this case; some cost analysis could be done if both are available
4536 but neither is constant. For now, assume they're equally cheap,
4537 unless one has side effects. If both strings have constant lengths,
4538 use the smaller. */
4539
4540 if (!len1 && !len2)
4541 len = len3;
4542 else if (!len1)
4543 len = len2;
4544 else if (!len2)
4545 len = len1;
4546 else if (TREE_SIDE_EFFECTS (len1))
4547 len = len2;
4548 else if (TREE_SIDE_EFFECTS (len2))
4549 len = len1;
4550 else if (TREE_CODE (len1) != INTEGER_CST)
4551 len = len2;
4552 else if (TREE_CODE (len2) != INTEGER_CST)
4553 len = len1;
4554 else if (tree_int_cst_lt (len1, len2))
4555 len = len1;
4556 else
4557 len = len2;
4558
4559 /* If we are not using the given length, we must incorporate it here.
4560 The actual new length parameter will be MIN(len,arg3) in this case. */
4561 if (len != len3)
4562 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4563 arg1_rtx = get_memory_rtx (arg1, len);
4564 arg2_rtx = get_memory_rtx (arg2, len);
4565 arg3_rtx = expand_normal (len);
4566 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4567 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4568 MIN (arg1_align, arg2_align));
4569 if (result)
4570 {
4571 /* Return the value in the proper mode for this function. */
4572 mode = TYPE_MODE (TREE_TYPE (exp));
4573 if (GET_MODE (result) == mode)
4574 return result;
4575 if (target == 0)
4576 return convert_to_mode (mode, result, 0);
4577 convert_move (target, result, 0);
4578 return target;
4579 }
4580
4581 /* Expand the library call ourselves using a stabilized argument
4582 list to avoid re-evaluating the function's arguments twice. */
4583 fndecl = get_callee_fndecl (exp);
4584 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4585 arg1, arg2, len);
4586 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4587 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4588 return expand_call (fn, target, target == const0_rtx);
4589 }
4590 return NULL_RTX;
4591 }
4592
4593 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4594 if that's convenient. */
4595
4596 rtx
4597 expand_builtin_saveregs (void)
4598 {
4599 rtx val;
4600 rtx_insn *seq;
4601
4602 /* Don't do __builtin_saveregs more than once in a function.
4603 Save the result of the first call and reuse it. */
4604 if (saveregs_value != 0)
4605 return saveregs_value;
4606
4607 /* When this function is called, it means that registers must be
4608 saved on entry to this function. So we migrate the call to the
4609 first insn of this function. */
4610
4611 start_sequence ();
4612
4613 /* Do whatever the machine needs done in this case. */
4614 val = targetm.calls.expand_builtin_saveregs ();
4615
4616 seq = get_insns ();
4617 end_sequence ();
4618
4619 saveregs_value = val;
4620
4621 /* Put the insns after the NOTE that starts the function. If this
4622 is inside a start_sequence, make the outer-level insn chain current, so
4623 the code is placed at the start of the function. */
4624 push_topmost_sequence ();
4625 emit_insn_after (seq, entry_of_function ());
4626 pop_topmost_sequence ();
4627
4628 return val;
4629 }
4630
4631 /* Expand a call to __builtin_next_arg. */
4632
4633 static rtx
4634 expand_builtin_next_arg (void)
4635 {
4636 /* Checking arguments is already done in fold_builtin_next_arg
4637 that must be called before this function. */
4638 return expand_binop (ptr_mode, add_optab,
4639 crtl->args.internal_arg_pointer,
4640 crtl->args.arg_offset_rtx,
4641 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4642 }
4643
4644 /* Make it easier for the backends by protecting the valist argument
4645 from multiple evaluations. */
4646
4647 static tree
4648 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4649 {
4650 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4651
4652 /* The current way of determining the type of valist is completely
4653 bogus. We should have the information on the va builtin instead. */
4654 if (!vatype)
4655 vatype = targetm.fn_abi_va_list (cfun->decl);
4656
4657 if (TREE_CODE (vatype) == ARRAY_TYPE)
4658 {
4659 if (TREE_SIDE_EFFECTS (valist))
4660 valist = save_expr (valist);
4661
4662 /* For this case, the backends will be expecting a pointer to
4663 vatype, but it's possible we've actually been given an array
4664 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4665 So fix it. */
4666 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4667 {
4668 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4669 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4670 }
4671 }
4672 else
4673 {
4674 tree pt = build_pointer_type (vatype);
4675
4676 if (! needs_lvalue)
4677 {
4678 if (! TREE_SIDE_EFFECTS (valist))
4679 return valist;
4680
4681 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4682 TREE_SIDE_EFFECTS (valist) = 1;
4683 }
4684
4685 if (TREE_SIDE_EFFECTS (valist))
4686 valist = save_expr (valist);
4687 valist = fold_build2_loc (loc, MEM_REF,
4688 vatype, valist, build_int_cst (pt, 0));
4689 }
4690
4691 return valist;
4692 }
4693
4694 /* The "standard" definition of va_list is void*. */
4695
4696 tree
4697 std_build_builtin_va_list (void)
4698 {
4699 return ptr_type_node;
4700 }
4701
4702 /* The "standard" abi va_list is va_list_type_node. */
4703
4704 tree
4705 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4706 {
4707 return va_list_type_node;
4708 }
4709
4710 /* The "standard" type of va_list is va_list_type_node. */
4711
4712 tree
4713 std_canonical_va_list_type (tree type)
4714 {
4715 tree wtype, htype;
4716
4717 wtype = va_list_type_node;
4718 htype = type;
4719
4720 if (TREE_CODE (wtype) == ARRAY_TYPE)
4721 {
4722 /* If va_list is an array type, the argument may have decayed
4723 to a pointer type, e.g. by being passed to another function.
4724 In that case, unwrap both types so that we can compare the
4725 underlying records. */
4726 if (TREE_CODE (htype) == ARRAY_TYPE
4727 || POINTER_TYPE_P (htype))
4728 {
4729 wtype = TREE_TYPE (wtype);
4730 htype = TREE_TYPE (htype);
4731 }
4732 }
4733 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4734 return va_list_type_node;
4735
4736 return NULL_TREE;
4737 }
4738
4739 /* The "standard" implementation of va_start: just assign `nextarg' to
4740 the variable. */
4741
4742 void
4743 std_expand_builtin_va_start (tree valist, rtx nextarg)
4744 {
4745 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4746 convert_move (va_r, nextarg, 0);
4747
4748 /* We do not have any valid bounds for the pointer, so
4749 just store zero bounds for it. */
4750 if (chkp_function_instrumented_p (current_function_decl))
4751 chkp_expand_bounds_reset_for_mem (valist,
4752 make_tree (TREE_TYPE (valist),
4753 nextarg));
4754 }
4755
4756 /* Expand EXP, a call to __builtin_va_start. */
4757
4758 static rtx
4759 expand_builtin_va_start (tree exp)
4760 {
4761 rtx nextarg;
4762 tree valist;
4763 location_t loc = EXPR_LOCATION (exp);
4764
4765 if (call_expr_nargs (exp) < 2)
4766 {
4767 error_at (loc, "too few arguments to function %<va_start%>");
4768 return const0_rtx;
4769 }
4770
4771 if (fold_builtin_next_arg (exp, true))
4772 return const0_rtx;
4773
4774 nextarg = expand_builtin_next_arg ();
4775 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4776
4777 if (targetm.expand_builtin_va_start)
4778 targetm.expand_builtin_va_start (valist, nextarg);
4779 else
4780 std_expand_builtin_va_start (valist, nextarg);
4781
4782 return const0_rtx;
4783 }
4784
4785 /* Expand EXP, a call to __builtin_va_end. */
4786
4787 static rtx
4788 expand_builtin_va_end (tree exp)
4789 {
4790 tree valist = CALL_EXPR_ARG (exp, 0);
4791
4792 /* Evaluate for side effects, if needed. I hate macros that don't
4793 do that. */
4794 if (TREE_SIDE_EFFECTS (valist))
4795 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4796
4797 return const0_rtx;
4798 }
4799
4800 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4801 builtin rather than just as an assignment in stdarg.h because of the
4802 nastiness of array-type va_list types. */
4803
4804 static rtx
4805 expand_builtin_va_copy (tree exp)
4806 {
4807 tree dst, src, t;
4808 location_t loc = EXPR_LOCATION (exp);
4809
4810 dst = CALL_EXPR_ARG (exp, 0);
4811 src = CALL_EXPR_ARG (exp, 1);
4812
4813 dst = stabilize_va_list_loc (loc, dst, 1);
4814 src = stabilize_va_list_loc (loc, src, 0);
4815
4816 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4817
4818 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4819 {
4820 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4821 TREE_SIDE_EFFECTS (t) = 1;
4822 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4823 }
4824 else
4825 {
4826 rtx dstb, srcb, size;
4827
4828 /* Evaluate to pointers. */
4829 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4830 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4831 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4832 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4833
4834 dstb = convert_memory_address (Pmode, dstb);
4835 srcb = convert_memory_address (Pmode, srcb);
4836
4837 /* "Dereference" to BLKmode memories. */
4838 dstb = gen_rtx_MEM (BLKmode, dstb);
4839 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4840 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4841 srcb = gen_rtx_MEM (BLKmode, srcb);
4842 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4843 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4844
4845 /* Copy. */
4846 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4847 }
4848
4849 return const0_rtx;
4850 }
4851
4852 /* Expand a call to one of the builtin functions __builtin_frame_address or
4853 __builtin_return_address. */
4854
4855 static rtx
4856 expand_builtin_frame_address (tree fndecl, tree exp)
4857 {
4858 /* The argument must be a nonnegative integer constant.
4859 It counts the number of frames to scan up the stack.
4860 The value is either the frame pointer value or the return
4861 address saved in that frame. */
4862 if (call_expr_nargs (exp) == 0)
4863 /* Warning about missing arg was already issued. */
4864 return const0_rtx;
4865 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4866 {
4867 error ("invalid argument to %qD", fndecl);
4868 return const0_rtx;
4869 }
4870 else
4871 {
4872 /* Number of frames to scan up the stack. */
4873 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4874
4875 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4876
4877 /* Some ports cannot access arbitrary stack frames. */
4878 if (tem == NULL)
4879 {
4880 warning (0, "unsupported argument to %qD", fndecl);
4881 return const0_rtx;
4882 }
4883
4884 if (count)
4885 {
4886 /* Warn since no effort is made to ensure that any frame
4887 beyond the current one exists or can be safely reached. */
4888 warning (OPT_Wframe_address, "calling %qD with "
4889 "a nonzero argument is unsafe", fndecl);
4890 }
4891
4892 /* For __builtin_frame_address, return what we've got. */
4893 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4894 return tem;
4895
4896 if (!REG_P (tem)
4897 && ! CONSTANT_P (tem))
4898 tem = copy_addr_to_reg (tem);
4899 return tem;
4900 }
4901 }
4902
4903 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4904 failed and the caller should emit a normal call. */
4905
4906 static rtx
4907 expand_builtin_alloca (tree exp)
4908 {
4909 rtx op0;
4910 rtx result;
4911 unsigned int align;
4912 tree fndecl = get_callee_fndecl (exp);
4913 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4914 == BUILT_IN_ALLOCA_WITH_ALIGN);
4915 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4916 bool valid_arglist
4917 = (alloca_with_align
4918 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4919 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4920
4921 if (!valid_arglist)
4922 return NULL_RTX;
4923
4924 if ((alloca_with_align && !warn_vla_limit)
4925 || (!alloca_with_align && !warn_alloca_limit))
4926 {
4927 /* -Walloca-larger-than and -Wvla-larger-than settings override
4928 the more general -Walloc-size-larger-than so unless either of
4929 the former options is specified check the alloca arguments for
4930 overflow. */
4931 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4932 int idx[] = { 0, -1 };
4933 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4934 }
4935
4936 /* Compute the argument. */
4937 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4938
4939 /* Compute the alignment. */
4940 align = (alloca_with_align
4941 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4942 : BIGGEST_ALIGNMENT);
4943
4944 /* Allocate the desired space. If the allocation stems from the declaration
4945 of a variable-sized object, it cannot accumulate. */
4946 result = allocate_dynamic_stack_space (op0, 0, align, alloca_for_var);
4947 result = convert_memory_address (ptr_mode, result);
4948
4949 return result;
4950 }
4951
4952 /* Expand a call to bswap builtin in EXP.
4953 Return NULL_RTX if a normal call should be emitted rather than expanding the
4954 function in-line. If convenient, the result should be placed in TARGET.
4955 SUBTARGET may be used as the target for computing one of EXP's operands. */
4956
4957 static rtx
4958 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4959 rtx subtarget)
4960 {
4961 tree arg;
4962 rtx op0;
4963
4964 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4965 return NULL_RTX;
4966
4967 arg = CALL_EXPR_ARG (exp, 0);
4968 op0 = expand_expr (arg,
4969 subtarget && GET_MODE (subtarget) == target_mode
4970 ? subtarget : NULL_RTX,
4971 target_mode, EXPAND_NORMAL);
4972 if (GET_MODE (op0) != target_mode)
4973 op0 = convert_to_mode (target_mode, op0, 1);
4974
4975 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4976
4977 gcc_assert (target);
4978
4979 return convert_to_mode (target_mode, target, 1);
4980 }
4981
4982 /* Expand a call to a unary builtin in EXP.
4983 Return NULL_RTX if a normal call should be emitted rather than expanding the
4984 function in-line. If convenient, the result should be placed in TARGET.
4985 SUBTARGET may be used as the target for computing one of EXP's operands. */
4986
4987 static rtx
4988 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4989 rtx subtarget, optab op_optab)
4990 {
4991 rtx op0;
4992
4993 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4994 return NULL_RTX;
4995
4996 /* Compute the argument. */
4997 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4998 (subtarget
4999 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5000 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5001 VOIDmode, EXPAND_NORMAL);
5002 /* Compute op, into TARGET if possible.
5003 Set TARGET to wherever the result comes back. */
5004 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5005 op_optab, op0, target, op_optab != clrsb_optab);
5006 gcc_assert (target);
5007
5008 return convert_to_mode (target_mode, target, 0);
5009 }
5010
5011 /* Expand a call to __builtin_expect. We just return our argument
5012 as the builtin_expect semantic should've been already executed by
5013 tree branch prediction pass. */
5014
5015 static rtx
5016 expand_builtin_expect (tree exp, rtx target)
5017 {
5018 tree arg;
5019
5020 if (call_expr_nargs (exp) < 2)
5021 return const0_rtx;
5022 arg = CALL_EXPR_ARG (exp, 0);
5023
5024 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5025 /* When guessing was done, the hints should be already stripped away. */
5026 gcc_assert (!flag_guess_branch_prob
5027 || optimize == 0 || seen_error ());
5028 return target;
5029 }
5030
5031 /* Expand a call to __builtin_assume_aligned. We just return our first
5032 argument as the builtin_assume_aligned semantic should've been already
5033 executed by CCP. */
5034
5035 static rtx
5036 expand_builtin_assume_aligned (tree exp, rtx target)
5037 {
5038 if (call_expr_nargs (exp) < 2)
5039 return const0_rtx;
5040 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5041 EXPAND_NORMAL);
5042 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5043 && (call_expr_nargs (exp) < 3
5044 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5045 return target;
5046 }
5047
5048 void
5049 expand_builtin_trap (void)
5050 {
5051 if (targetm.have_trap ())
5052 {
5053 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5054 /* For trap insns when not accumulating outgoing args force
5055 REG_ARGS_SIZE note to prevent crossjumping of calls with
5056 different args sizes. */
5057 if (!ACCUMULATE_OUTGOING_ARGS)
5058 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5059 }
5060 else
5061 {
5062 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5063 tree call_expr = build_call_expr (fn, 0);
5064 expand_call (call_expr, NULL_RTX, false);
5065 }
5066
5067 emit_barrier ();
5068 }
5069
5070 /* Expand a call to __builtin_unreachable. We do nothing except emit
5071 a barrier saying that control flow will not pass here.
5072
5073 It is the responsibility of the program being compiled to ensure
5074 that control flow does never reach __builtin_unreachable. */
5075 static void
5076 expand_builtin_unreachable (void)
5077 {
5078 emit_barrier ();
5079 }
5080
5081 /* Expand EXP, a call to fabs, fabsf or fabsl.
5082 Return NULL_RTX if a normal call should be emitted rather than expanding
5083 the function inline. If convenient, the result should be placed
5084 in TARGET. SUBTARGET may be used as the target for computing
5085 the operand. */
5086
5087 static rtx
5088 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5089 {
5090 machine_mode mode;
5091 tree arg;
5092 rtx op0;
5093
5094 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5095 return NULL_RTX;
5096
5097 arg = CALL_EXPR_ARG (exp, 0);
5098 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5099 mode = TYPE_MODE (TREE_TYPE (arg));
5100 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5101 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5102 }
5103
5104 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5105 Return NULL is a normal call should be emitted rather than expanding the
5106 function inline. If convenient, the result should be placed in TARGET.
5107 SUBTARGET may be used as the target for computing the operand. */
5108
5109 static rtx
5110 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5111 {
5112 rtx op0, op1;
5113 tree arg;
5114
5115 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5116 return NULL_RTX;
5117
5118 arg = CALL_EXPR_ARG (exp, 0);
5119 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5120
5121 arg = CALL_EXPR_ARG (exp, 1);
5122 op1 = expand_normal (arg);
5123
5124 return expand_copysign (op0, op1, target);
5125 }
5126
5127 /* Expand a call to __builtin___clear_cache. */
5128
5129 static rtx
5130 expand_builtin___clear_cache (tree exp)
5131 {
5132 if (!targetm.code_for_clear_cache)
5133 {
5134 #ifdef CLEAR_INSN_CACHE
5135 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5136 does something. Just do the default expansion to a call to
5137 __clear_cache(). */
5138 return NULL_RTX;
5139 #else
5140 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5141 does nothing. There is no need to call it. Do nothing. */
5142 return const0_rtx;
5143 #endif /* CLEAR_INSN_CACHE */
5144 }
5145
5146 /* We have a "clear_cache" insn, and it will handle everything. */
5147 tree begin, end;
5148 rtx begin_rtx, end_rtx;
5149
5150 /* We must not expand to a library call. If we did, any
5151 fallback library function in libgcc that might contain a call to
5152 __builtin___clear_cache() would recurse infinitely. */
5153 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5154 {
5155 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5156 return const0_rtx;
5157 }
5158
5159 if (targetm.have_clear_cache ())
5160 {
5161 struct expand_operand ops[2];
5162
5163 begin = CALL_EXPR_ARG (exp, 0);
5164 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5165
5166 end = CALL_EXPR_ARG (exp, 1);
5167 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5168
5169 create_address_operand (&ops[0], begin_rtx);
5170 create_address_operand (&ops[1], end_rtx);
5171 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5172 return const0_rtx;
5173 }
5174 return const0_rtx;
5175 }
5176
5177 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5178
5179 static rtx
5180 round_trampoline_addr (rtx tramp)
5181 {
5182 rtx temp, addend, mask;
5183
5184 /* If we don't need too much alignment, we'll have been guaranteed
5185 proper alignment by get_trampoline_type. */
5186 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5187 return tramp;
5188
5189 /* Round address up to desired boundary. */
5190 temp = gen_reg_rtx (Pmode);
5191 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5192 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5193
5194 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5195 temp, 0, OPTAB_LIB_WIDEN);
5196 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5197 temp, 0, OPTAB_LIB_WIDEN);
5198
5199 return tramp;
5200 }
5201
5202 static rtx
5203 expand_builtin_init_trampoline (tree exp, bool onstack)
5204 {
5205 tree t_tramp, t_func, t_chain;
5206 rtx m_tramp, r_tramp, r_chain, tmp;
5207
5208 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5209 POINTER_TYPE, VOID_TYPE))
5210 return NULL_RTX;
5211
5212 t_tramp = CALL_EXPR_ARG (exp, 0);
5213 t_func = CALL_EXPR_ARG (exp, 1);
5214 t_chain = CALL_EXPR_ARG (exp, 2);
5215
5216 r_tramp = expand_normal (t_tramp);
5217 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5218 MEM_NOTRAP_P (m_tramp) = 1;
5219
5220 /* If ONSTACK, the TRAMP argument should be the address of a field
5221 within the local function's FRAME decl. Either way, let's see if
5222 we can fill in the MEM_ATTRs for this memory. */
5223 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5224 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5225
5226 /* Creator of a heap trampoline is responsible for making sure the
5227 address is aligned to at least STACK_BOUNDARY. Normally malloc
5228 will ensure this anyhow. */
5229 tmp = round_trampoline_addr (r_tramp);
5230 if (tmp != r_tramp)
5231 {
5232 m_tramp = change_address (m_tramp, BLKmode, tmp);
5233 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5234 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5235 }
5236
5237 /* The FUNC argument should be the address of the nested function.
5238 Extract the actual function decl to pass to the hook. */
5239 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5240 t_func = TREE_OPERAND (t_func, 0);
5241 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5242
5243 r_chain = expand_normal (t_chain);
5244
5245 /* Generate insns to initialize the trampoline. */
5246 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5247
5248 if (onstack)
5249 {
5250 trampolines_created = 1;
5251
5252 if (targetm.calls.custom_function_descriptors != 0)
5253 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5254 "trampoline generated for nested function %qD", t_func);
5255 }
5256
5257 return const0_rtx;
5258 }
5259
5260 static rtx
5261 expand_builtin_adjust_trampoline (tree exp)
5262 {
5263 rtx tramp;
5264
5265 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5266 return NULL_RTX;
5267
5268 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5269 tramp = round_trampoline_addr (tramp);
5270 if (targetm.calls.trampoline_adjust_address)
5271 tramp = targetm.calls.trampoline_adjust_address (tramp);
5272
5273 return tramp;
5274 }
5275
5276 /* Expand a call to the builtin descriptor initialization routine.
5277 A descriptor is made up of a couple of pointers to the static
5278 chain and the code entry in this order. */
5279
5280 static rtx
5281 expand_builtin_init_descriptor (tree exp)
5282 {
5283 tree t_descr, t_func, t_chain;
5284 rtx m_descr, r_descr, r_func, r_chain;
5285
5286 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5287 VOID_TYPE))
5288 return NULL_RTX;
5289
5290 t_descr = CALL_EXPR_ARG (exp, 0);
5291 t_func = CALL_EXPR_ARG (exp, 1);
5292 t_chain = CALL_EXPR_ARG (exp, 2);
5293
5294 r_descr = expand_normal (t_descr);
5295 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5296 MEM_NOTRAP_P (m_descr) = 1;
5297
5298 r_func = expand_normal (t_func);
5299 r_chain = expand_normal (t_chain);
5300
5301 /* Generate insns to initialize the descriptor. */
5302 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5303 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5304 POINTER_SIZE / BITS_PER_UNIT), r_func);
5305
5306 return const0_rtx;
5307 }
5308
5309 /* Expand a call to the builtin descriptor adjustment routine. */
5310
5311 static rtx
5312 expand_builtin_adjust_descriptor (tree exp)
5313 {
5314 rtx tramp;
5315
5316 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5317 return NULL_RTX;
5318
5319 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5320
5321 /* Unalign the descriptor to allow runtime identification. */
5322 tramp = plus_constant (ptr_mode, tramp,
5323 targetm.calls.custom_function_descriptors);
5324
5325 return force_operand (tramp, NULL_RTX);
5326 }
5327
5328 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5329 function. The function first checks whether the back end provides
5330 an insn to implement signbit for the respective mode. If not, it
5331 checks whether the floating point format of the value is such that
5332 the sign bit can be extracted. If that is not the case, error out.
5333 EXP is the expression that is a call to the builtin function; if
5334 convenient, the result should be placed in TARGET. */
5335 static rtx
5336 expand_builtin_signbit (tree exp, rtx target)
5337 {
5338 const struct real_format *fmt;
5339 machine_mode fmode, imode, rmode;
5340 tree arg;
5341 int word, bitpos;
5342 enum insn_code icode;
5343 rtx temp;
5344 location_t loc = EXPR_LOCATION (exp);
5345
5346 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5347 return NULL_RTX;
5348
5349 arg = CALL_EXPR_ARG (exp, 0);
5350 fmode = TYPE_MODE (TREE_TYPE (arg));
5351 rmode = TYPE_MODE (TREE_TYPE (exp));
5352 fmt = REAL_MODE_FORMAT (fmode);
5353
5354 arg = builtin_save_expr (arg);
5355
5356 /* Expand the argument yielding a RTX expression. */
5357 temp = expand_normal (arg);
5358
5359 /* Check if the back end provides an insn that handles signbit for the
5360 argument's mode. */
5361 icode = optab_handler (signbit_optab, fmode);
5362 if (icode != CODE_FOR_nothing)
5363 {
5364 rtx_insn *last = get_last_insn ();
5365 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5366 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5367 return target;
5368 delete_insns_since (last);
5369 }
5370
5371 /* For floating point formats without a sign bit, implement signbit
5372 as "ARG < 0.0". */
5373 bitpos = fmt->signbit_ro;
5374 if (bitpos < 0)
5375 {
5376 /* But we can't do this if the format supports signed zero. */
5377 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5378
5379 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5380 build_real (TREE_TYPE (arg), dconst0));
5381 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5382 }
5383
5384 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5385 {
5386 imode = int_mode_for_mode (fmode);
5387 gcc_assert (imode != BLKmode);
5388 temp = gen_lowpart (imode, temp);
5389 }
5390 else
5391 {
5392 imode = word_mode;
5393 /* Handle targets with different FP word orders. */
5394 if (FLOAT_WORDS_BIG_ENDIAN)
5395 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5396 else
5397 word = bitpos / BITS_PER_WORD;
5398 temp = operand_subword_force (temp, word, fmode);
5399 bitpos = bitpos % BITS_PER_WORD;
5400 }
5401
5402 /* Force the intermediate word_mode (or narrower) result into a
5403 register. This avoids attempting to create paradoxical SUBREGs
5404 of floating point modes below. */
5405 temp = force_reg (imode, temp);
5406
5407 /* If the bitpos is within the "result mode" lowpart, the operation
5408 can be implement with a single bitwise AND. Otherwise, we need
5409 a right shift and an AND. */
5410
5411 if (bitpos < GET_MODE_BITSIZE (rmode))
5412 {
5413 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5414
5415 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5416 temp = gen_lowpart (rmode, temp);
5417 temp = expand_binop (rmode, and_optab, temp,
5418 immed_wide_int_const (mask, rmode),
5419 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5420 }
5421 else
5422 {
5423 /* Perform a logical right shift to place the signbit in the least
5424 significant bit, then truncate the result to the desired mode
5425 and mask just this bit. */
5426 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5427 temp = gen_lowpart (rmode, temp);
5428 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5429 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5430 }
5431
5432 return temp;
5433 }
5434
5435 /* Expand fork or exec calls. TARGET is the desired target of the
5436 call. EXP is the call. FN is the
5437 identificator of the actual function. IGNORE is nonzero if the
5438 value is to be ignored. */
5439
5440 static rtx
5441 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5442 {
5443 tree id, decl;
5444 tree call;
5445
5446 /* If we are not profiling, just call the function. */
5447 if (!profile_arc_flag)
5448 return NULL_RTX;
5449
5450 /* Otherwise call the wrapper. This should be equivalent for the rest of
5451 compiler, so the code does not diverge, and the wrapper may run the
5452 code necessary for keeping the profiling sane. */
5453
5454 switch (DECL_FUNCTION_CODE (fn))
5455 {
5456 case BUILT_IN_FORK:
5457 id = get_identifier ("__gcov_fork");
5458 break;
5459
5460 case BUILT_IN_EXECL:
5461 id = get_identifier ("__gcov_execl");
5462 break;
5463
5464 case BUILT_IN_EXECV:
5465 id = get_identifier ("__gcov_execv");
5466 break;
5467
5468 case BUILT_IN_EXECLP:
5469 id = get_identifier ("__gcov_execlp");
5470 break;
5471
5472 case BUILT_IN_EXECLE:
5473 id = get_identifier ("__gcov_execle");
5474 break;
5475
5476 case BUILT_IN_EXECVP:
5477 id = get_identifier ("__gcov_execvp");
5478 break;
5479
5480 case BUILT_IN_EXECVE:
5481 id = get_identifier ("__gcov_execve");
5482 break;
5483
5484 default:
5485 gcc_unreachable ();
5486 }
5487
5488 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5489 FUNCTION_DECL, id, TREE_TYPE (fn));
5490 DECL_EXTERNAL (decl) = 1;
5491 TREE_PUBLIC (decl) = 1;
5492 DECL_ARTIFICIAL (decl) = 1;
5493 TREE_NOTHROW (decl) = 1;
5494 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5495 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5496 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5497 return expand_call (call, target, ignore);
5498 }
5499
5500
5501 \f
5502 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5503 the pointer in these functions is void*, the tree optimizers may remove
5504 casts. The mode computed in expand_builtin isn't reliable either, due
5505 to __sync_bool_compare_and_swap.
5506
5507 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5508 group of builtins. This gives us log2 of the mode size. */
5509
5510 static inline machine_mode
5511 get_builtin_sync_mode (int fcode_diff)
5512 {
5513 /* The size is not negotiable, so ask not to get BLKmode in return
5514 if the target indicates that a smaller size would be better. */
5515 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5516 }
5517
5518 /* Expand the memory expression LOC and return the appropriate memory operand
5519 for the builtin_sync operations. */
5520
5521 static rtx
5522 get_builtin_sync_mem (tree loc, machine_mode mode)
5523 {
5524 rtx addr, mem;
5525
5526 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5527 addr = convert_memory_address (Pmode, addr);
5528
5529 /* Note that we explicitly do not want any alias information for this
5530 memory, so that we kill all other live memories. Otherwise we don't
5531 satisfy the full barrier semantics of the intrinsic. */
5532 mem = validize_mem (gen_rtx_MEM (mode, addr));
5533
5534 /* The alignment needs to be at least according to that of the mode. */
5535 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5536 get_pointer_alignment (loc)));
5537 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5538 MEM_VOLATILE_P (mem) = 1;
5539
5540 return mem;
5541 }
5542
5543 /* Make sure an argument is in the right mode.
5544 EXP is the tree argument.
5545 MODE is the mode it should be in. */
5546
5547 static rtx
5548 expand_expr_force_mode (tree exp, machine_mode mode)
5549 {
5550 rtx val;
5551 machine_mode old_mode;
5552
5553 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5554 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5555 of CONST_INTs, where we know the old_mode only from the call argument. */
5556
5557 old_mode = GET_MODE (val);
5558 if (old_mode == VOIDmode)
5559 old_mode = TYPE_MODE (TREE_TYPE (exp));
5560 val = convert_modes (mode, old_mode, val, 1);
5561 return val;
5562 }
5563
5564
5565 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5566 EXP is the CALL_EXPR. CODE is the rtx code
5567 that corresponds to the arithmetic or logical operation from the name;
5568 an exception here is that NOT actually means NAND. TARGET is an optional
5569 place for us to store the results; AFTER is true if this is the
5570 fetch_and_xxx form. */
5571
5572 static rtx
5573 expand_builtin_sync_operation (machine_mode mode, tree exp,
5574 enum rtx_code code, bool after,
5575 rtx target)
5576 {
5577 rtx val, mem;
5578 location_t loc = EXPR_LOCATION (exp);
5579
5580 if (code == NOT && warn_sync_nand)
5581 {
5582 tree fndecl = get_callee_fndecl (exp);
5583 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5584
5585 static bool warned_f_a_n, warned_n_a_f;
5586
5587 switch (fcode)
5588 {
5589 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5590 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5591 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5592 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5593 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5594 if (warned_f_a_n)
5595 break;
5596
5597 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5598 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5599 warned_f_a_n = true;
5600 break;
5601
5602 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5603 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5604 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5605 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5606 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5607 if (warned_n_a_f)
5608 break;
5609
5610 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5611 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5612 warned_n_a_f = true;
5613 break;
5614
5615 default:
5616 gcc_unreachable ();
5617 }
5618 }
5619
5620 /* Expand the operands. */
5621 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5622 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5623
5624 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5625 after);
5626 }
5627
5628 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5629 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5630 true if this is the boolean form. TARGET is a place for us to store the
5631 results; this is NOT optional if IS_BOOL is true. */
5632
5633 static rtx
5634 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5635 bool is_bool, rtx target)
5636 {
5637 rtx old_val, new_val, mem;
5638 rtx *pbool, *poval;
5639
5640 /* Expand the operands. */
5641 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5642 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5643 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5644
5645 pbool = poval = NULL;
5646 if (target != const0_rtx)
5647 {
5648 if (is_bool)
5649 pbool = &target;
5650 else
5651 poval = &target;
5652 }
5653 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5654 false, MEMMODEL_SYNC_SEQ_CST,
5655 MEMMODEL_SYNC_SEQ_CST))
5656 return NULL_RTX;
5657
5658 return target;
5659 }
5660
5661 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5662 general form is actually an atomic exchange, and some targets only
5663 support a reduced form with the second argument being a constant 1.
5664 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5665 the results. */
5666
5667 static rtx
5668 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5669 rtx target)
5670 {
5671 rtx val, mem;
5672
5673 /* Expand the operands. */
5674 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5675 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5676
5677 return expand_sync_lock_test_and_set (target, mem, val);
5678 }
5679
5680 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5681
5682 static void
5683 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5684 {
5685 rtx mem;
5686
5687 /* Expand the operands. */
5688 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5689
5690 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5691 }
5692
5693 /* Given an integer representing an ``enum memmodel'', verify its
5694 correctness and return the memory model enum. */
5695
5696 static enum memmodel
5697 get_memmodel (tree exp)
5698 {
5699 rtx op;
5700 unsigned HOST_WIDE_INT val;
5701 source_location loc
5702 = expansion_point_location_if_in_system_header (input_location);
5703
5704 /* If the parameter is not a constant, it's a run time value so we'll just
5705 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5706 if (TREE_CODE (exp) != INTEGER_CST)
5707 return MEMMODEL_SEQ_CST;
5708
5709 op = expand_normal (exp);
5710
5711 val = INTVAL (op);
5712 if (targetm.memmodel_check)
5713 val = targetm.memmodel_check (val);
5714 else if (val & ~MEMMODEL_MASK)
5715 {
5716 warning_at (loc, OPT_Winvalid_memory_model,
5717 "unknown architecture specifier in memory model to builtin");
5718 return MEMMODEL_SEQ_CST;
5719 }
5720
5721 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5722 if (memmodel_base (val) >= MEMMODEL_LAST)
5723 {
5724 warning_at (loc, OPT_Winvalid_memory_model,
5725 "invalid memory model argument to builtin");
5726 return MEMMODEL_SEQ_CST;
5727 }
5728
5729 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5730 be conservative and promote consume to acquire. */
5731 if (val == MEMMODEL_CONSUME)
5732 val = MEMMODEL_ACQUIRE;
5733
5734 return (enum memmodel) val;
5735 }
5736
5737 /* Expand the __atomic_exchange intrinsic:
5738 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5739 EXP is the CALL_EXPR.
5740 TARGET is an optional place for us to store the results. */
5741
5742 static rtx
5743 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5744 {
5745 rtx val, mem;
5746 enum memmodel model;
5747
5748 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5749
5750 if (!flag_inline_atomics)
5751 return NULL_RTX;
5752
5753 /* Expand the operands. */
5754 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5755 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5756
5757 return expand_atomic_exchange (target, mem, val, model);
5758 }
5759
5760 /* Expand the __atomic_compare_exchange intrinsic:
5761 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5762 TYPE desired, BOOL weak,
5763 enum memmodel success,
5764 enum memmodel failure)
5765 EXP is the CALL_EXPR.
5766 TARGET is an optional place for us to store the results. */
5767
5768 static rtx
5769 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5770 rtx target)
5771 {
5772 rtx expect, desired, mem, oldval;
5773 rtx_code_label *label;
5774 enum memmodel success, failure;
5775 tree weak;
5776 bool is_weak;
5777 source_location loc
5778 = expansion_point_location_if_in_system_header (input_location);
5779
5780 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5781 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5782
5783 if (failure > success)
5784 {
5785 warning_at (loc, OPT_Winvalid_memory_model,
5786 "failure memory model cannot be stronger than success "
5787 "memory model for %<__atomic_compare_exchange%>");
5788 success = MEMMODEL_SEQ_CST;
5789 }
5790
5791 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5792 {
5793 warning_at (loc, OPT_Winvalid_memory_model,
5794 "invalid failure memory model for "
5795 "%<__atomic_compare_exchange%>");
5796 failure = MEMMODEL_SEQ_CST;
5797 success = MEMMODEL_SEQ_CST;
5798 }
5799
5800
5801 if (!flag_inline_atomics)
5802 return NULL_RTX;
5803
5804 /* Expand the operands. */
5805 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5806
5807 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5808 expect = convert_memory_address (Pmode, expect);
5809 expect = gen_rtx_MEM (mode, expect);
5810 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5811
5812 weak = CALL_EXPR_ARG (exp, 3);
5813 is_weak = false;
5814 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5815 is_weak = true;
5816
5817 if (target == const0_rtx)
5818 target = NULL;
5819
5820 /* Lest the rtl backend create a race condition with an imporoper store
5821 to memory, always create a new pseudo for OLDVAL. */
5822 oldval = NULL;
5823
5824 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5825 is_weak, success, failure))
5826 return NULL_RTX;
5827
5828 /* Conditionally store back to EXPECT, lest we create a race condition
5829 with an improper store to memory. */
5830 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5831 the normal case where EXPECT is totally private, i.e. a register. At
5832 which point the store can be unconditional. */
5833 label = gen_label_rtx ();
5834 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5835 GET_MODE (target), 1, label);
5836 emit_move_insn (expect, oldval);
5837 emit_label (label);
5838
5839 return target;
5840 }
5841
5842 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5843 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5844 call. The weak parameter must be dropped to match the expected parameter
5845 list and the expected argument changed from value to pointer to memory
5846 slot. */
5847
5848 static void
5849 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5850 {
5851 unsigned int z;
5852 vec<tree, va_gc> *vec;
5853
5854 vec_alloc (vec, 5);
5855 vec->quick_push (gimple_call_arg (call, 0));
5856 tree expected = gimple_call_arg (call, 1);
5857 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5858 TREE_TYPE (expected));
5859 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5860 if (expd != x)
5861 emit_move_insn (x, expd);
5862 tree v = make_tree (TREE_TYPE (expected), x);
5863 vec->quick_push (build1 (ADDR_EXPR,
5864 build_pointer_type (TREE_TYPE (expected)), v));
5865 vec->quick_push (gimple_call_arg (call, 2));
5866 /* Skip the boolean weak parameter. */
5867 for (z = 4; z < 6; z++)
5868 vec->quick_push (gimple_call_arg (call, z));
5869 built_in_function fncode
5870 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5871 + exact_log2 (GET_MODE_SIZE (mode)));
5872 tree fndecl = builtin_decl_explicit (fncode);
5873 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5874 fndecl);
5875 tree exp = build_call_vec (boolean_type_node, fn, vec);
5876 tree lhs = gimple_call_lhs (call);
5877 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5878 if (lhs)
5879 {
5880 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5881 if (GET_MODE (boolret) != mode)
5882 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5883 x = force_reg (mode, x);
5884 write_complex_part (target, boolret, true);
5885 write_complex_part (target, x, false);
5886 }
5887 }
5888
5889 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5890
5891 void
5892 expand_ifn_atomic_compare_exchange (gcall *call)
5893 {
5894 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5895 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5896 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5897 rtx expect, desired, mem, oldval, boolret;
5898 enum memmodel success, failure;
5899 tree lhs;
5900 bool is_weak;
5901 source_location loc
5902 = expansion_point_location_if_in_system_header (gimple_location (call));
5903
5904 success = get_memmodel (gimple_call_arg (call, 4));
5905 failure = get_memmodel (gimple_call_arg (call, 5));
5906
5907 if (failure > success)
5908 {
5909 warning_at (loc, OPT_Winvalid_memory_model,
5910 "failure memory model cannot be stronger than success "
5911 "memory model for %<__atomic_compare_exchange%>");
5912 success = MEMMODEL_SEQ_CST;
5913 }
5914
5915 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5916 {
5917 warning_at (loc, OPT_Winvalid_memory_model,
5918 "invalid failure memory model for "
5919 "%<__atomic_compare_exchange%>");
5920 failure = MEMMODEL_SEQ_CST;
5921 success = MEMMODEL_SEQ_CST;
5922 }
5923
5924 if (!flag_inline_atomics)
5925 {
5926 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5927 return;
5928 }
5929
5930 /* Expand the operands. */
5931 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5932
5933 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5934 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5935
5936 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5937
5938 boolret = NULL;
5939 oldval = NULL;
5940
5941 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5942 is_weak, success, failure))
5943 {
5944 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5945 return;
5946 }
5947
5948 lhs = gimple_call_lhs (call);
5949 if (lhs)
5950 {
5951 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5952 if (GET_MODE (boolret) != mode)
5953 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5954 write_complex_part (target, boolret, true);
5955 write_complex_part (target, oldval, false);
5956 }
5957 }
5958
5959 /* Expand the __atomic_load intrinsic:
5960 TYPE __atomic_load (TYPE *object, enum memmodel)
5961 EXP is the CALL_EXPR.
5962 TARGET is an optional place for us to store the results. */
5963
5964 static rtx
5965 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5966 {
5967 rtx mem;
5968 enum memmodel model;
5969
5970 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5971 if (is_mm_release (model) || is_mm_acq_rel (model))
5972 {
5973 source_location loc
5974 = expansion_point_location_if_in_system_header (input_location);
5975 warning_at (loc, OPT_Winvalid_memory_model,
5976 "invalid memory model for %<__atomic_load%>");
5977 model = MEMMODEL_SEQ_CST;
5978 }
5979
5980 if (!flag_inline_atomics)
5981 return NULL_RTX;
5982
5983 /* Expand the operand. */
5984 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5985
5986 return expand_atomic_load (target, mem, model);
5987 }
5988
5989
5990 /* Expand the __atomic_store intrinsic:
5991 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5992 EXP is the CALL_EXPR.
5993 TARGET is an optional place for us to store the results. */
5994
5995 static rtx
5996 expand_builtin_atomic_store (machine_mode mode, tree exp)
5997 {
5998 rtx mem, val;
5999 enum memmodel model;
6000
6001 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6002 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6003 || is_mm_release (model)))
6004 {
6005 source_location loc
6006 = expansion_point_location_if_in_system_header (input_location);
6007 warning_at (loc, OPT_Winvalid_memory_model,
6008 "invalid memory model for %<__atomic_store%>");
6009 model = MEMMODEL_SEQ_CST;
6010 }
6011
6012 if (!flag_inline_atomics)
6013 return NULL_RTX;
6014
6015 /* Expand the operands. */
6016 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6017 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6018
6019 return expand_atomic_store (mem, val, model, false);
6020 }
6021
6022 /* Expand the __atomic_fetch_XXX intrinsic:
6023 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6024 EXP is the CALL_EXPR.
6025 TARGET is an optional place for us to store the results.
6026 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6027 FETCH_AFTER is true if returning the result of the operation.
6028 FETCH_AFTER is false if returning the value before the operation.
6029 IGNORE is true if the result is not used.
6030 EXT_CALL is the correct builtin for an external call if this cannot be
6031 resolved to an instruction sequence. */
6032
6033 static rtx
6034 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6035 enum rtx_code code, bool fetch_after,
6036 bool ignore, enum built_in_function ext_call)
6037 {
6038 rtx val, mem, ret;
6039 enum memmodel model;
6040 tree fndecl;
6041 tree addr;
6042
6043 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6044
6045 /* Expand the operands. */
6046 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6047 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6048
6049 /* Only try generating instructions if inlining is turned on. */
6050 if (flag_inline_atomics)
6051 {
6052 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6053 if (ret)
6054 return ret;
6055 }
6056
6057 /* Return if a different routine isn't needed for the library call. */
6058 if (ext_call == BUILT_IN_NONE)
6059 return NULL_RTX;
6060
6061 /* Change the call to the specified function. */
6062 fndecl = get_callee_fndecl (exp);
6063 addr = CALL_EXPR_FN (exp);
6064 STRIP_NOPS (addr);
6065
6066 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6067 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6068
6069 /* Expand the call here so we can emit trailing code. */
6070 ret = expand_call (exp, target, ignore);
6071
6072 /* Replace the original function just in case it matters. */
6073 TREE_OPERAND (addr, 0) = fndecl;
6074
6075 /* Then issue the arithmetic correction to return the right result. */
6076 if (!ignore)
6077 {
6078 if (code == NOT)
6079 {
6080 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6081 OPTAB_LIB_WIDEN);
6082 ret = expand_simple_unop (mode, NOT, ret, target, true);
6083 }
6084 else
6085 ret = expand_simple_binop (mode, code, ret, val, target, true,
6086 OPTAB_LIB_WIDEN);
6087 }
6088 return ret;
6089 }
6090
6091 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6092
6093 void
6094 expand_ifn_atomic_bit_test_and (gcall *call)
6095 {
6096 tree ptr = gimple_call_arg (call, 0);
6097 tree bit = gimple_call_arg (call, 1);
6098 tree flag = gimple_call_arg (call, 2);
6099 tree lhs = gimple_call_lhs (call);
6100 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6101 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6102 enum rtx_code code;
6103 optab optab;
6104 struct expand_operand ops[5];
6105
6106 gcc_assert (flag_inline_atomics);
6107
6108 if (gimple_call_num_args (call) == 4)
6109 model = get_memmodel (gimple_call_arg (call, 3));
6110
6111 rtx mem = get_builtin_sync_mem (ptr, mode);
6112 rtx val = expand_expr_force_mode (bit, mode);
6113
6114 switch (gimple_call_internal_fn (call))
6115 {
6116 case IFN_ATOMIC_BIT_TEST_AND_SET:
6117 code = IOR;
6118 optab = atomic_bit_test_and_set_optab;
6119 break;
6120 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6121 code = XOR;
6122 optab = atomic_bit_test_and_complement_optab;
6123 break;
6124 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6125 code = AND;
6126 optab = atomic_bit_test_and_reset_optab;
6127 break;
6128 default:
6129 gcc_unreachable ();
6130 }
6131
6132 if (lhs == NULL_TREE)
6133 {
6134 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6135 val, NULL_RTX, true, OPTAB_DIRECT);
6136 if (code == AND)
6137 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6138 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6139 return;
6140 }
6141
6142 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6143 enum insn_code icode = direct_optab_handler (optab, mode);
6144 gcc_assert (icode != CODE_FOR_nothing);
6145 create_output_operand (&ops[0], target, mode);
6146 create_fixed_operand (&ops[1], mem);
6147 create_convert_operand_to (&ops[2], val, mode, true);
6148 create_integer_operand (&ops[3], model);
6149 create_integer_operand (&ops[4], integer_onep (flag));
6150 if (maybe_expand_insn (icode, 5, ops))
6151 return;
6152
6153 rtx bitval = val;
6154 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6155 val, NULL_RTX, true, OPTAB_DIRECT);
6156 rtx maskval = val;
6157 if (code == AND)
6158 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6159 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6160 code, model, false);
6161 if (integer_onep (flag))
6162 {
6163 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6164 NULL_RTX, true, OPTAB_DIRECT);
6165 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6166 true, OPTAB_DIRECT);
6167 }
6168 else
6169 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6170 OPTAB_DIRECT);
6171 if (result != target)
6172 emit_move_insn (target, result);
6173 }
6174
6175 /* Expand an atomic clear operation.
6176 void _atomic_clear (BOOL *obj, enum memmodel)
6177 EXP is the call expression. */
6178
6179 static rtx
6180 expand_builtin_atomic_clear (tree exp)
6181 {
6182 machine_mode mode;
6183 rtx mem, ret;
6184 enum memmodel model;
6185
6186 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6187 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6188 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6189
6190 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6191 {
6192 source_location loc
6193 = expansion_point_location_if_in_system_header (input_location);
6194 warning_at (loc, OPT_Winvalid_memory_model,
6195 "invalid memory model for %<__atomic_store%>");
6196 model = MEMMODEL_SEQ_CST;
6197 }
6198
6199 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6200 Failing that, a store is issued by __atomic_store. The only way this can
6201 fail is if the bool type is larger than a word size. Unlikely, but
6202 handle it anyway for completeness. Assume a single threaded model since
6203 there is no atomic support in this case, and no barriers are required. */
6204 ret = expand_atomic_store (mem, const0_rtx, model, true);
6205 if (!ret)
6206 emit_move_insn (mem, const0_rtx);
6207 return const0_rtx;
6208 }
6209
6210 /* Expand an atomic test_and_set operation.
6211 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6212 EXP is the call expression. */
6213
6214 static rtx
6215 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6216 {
6217 rtx mem;
6218 enum memmodel model;
6219 machine_mode mode;
6220
6221 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6222 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6223 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6224
6225 return expand_atomic_test_and_set (target, mem, model);
6226 }
6227
6228
6229 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6230 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6231
6232 static tree
6233 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6234 {
6235 int size;
6236 machine_mode mode;
6237 unsigned int mode_align, type_align;
6238
6239 if (TREE_CODE (arg0) != INTEGER_CST)
6240 return NULL_TREE;
6241
6242 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6243 mode = mode_for_size (size, MODE_INT, 0);
6244 mode_align = GET_MODE_ALIGNMENT (mode);
6245
6246 if (TREE_CODE (arg1) == INTEGER_CST)
6247 {
6248 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6249
6250 /* Either this argument is null, or it's a fake pointer encoding
6251 the alignment of the object. */
6252 val = least_bit_hwi (val);
6253 val *= BITS_PER_UNIT;
6254
6255 if (val == 0 || mode_align < val)
6256 type_align = mode_align;
6257 else
6258 type_align = val;
6259 }
6260 else
6261 {
6262 tree ttype = TREE_TYPE (arg1);
6263
6264 /* This function is usually invoked and folded immediately by the front
6265 end before anything else has a chance to look at it. The pointer
6266 parameter at this point is usually cast to a void *, so check for that
6267 and look past the cast. */
6268 if (CONVERT_EXPR_P (arg1)
6269 && POINTER_TYPE_P (ttype)
6270 && VOID_TYPE_P (TREE_TYPE (ttype))
6271 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6272 arg1 = TREE_OPERAND (arg1, 0);
6273
6274 ttype = TREE_TYPE (arg1);
6275 gcc_assert (POINTER_TYPE_P (ttype));
6276
6277 /* Get the underlying type of the object. */
6278 ttype = TREE_TYPE (ttype);
6279 type_align = TYPE_ALIGN (ttype);
6280 }
6281
6282 /* If the object has smaller alignment, the lock free routines cannot
6283 be used. */
6284 if (type_align < mode_align)
6285 return boolean_false_node;
6286
6287 /* Check if a compare_and_swap pattern exists for the mode which represents
6288 the required size. The pattern is not allowed to fail, so the existence
6289 of the pattern indicates support is present. Also require that an
6290 atomic load exists for the required size. */
6291 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6292 return boolean_true_node;
6293 else
6294 return boolean_false_node;
6295 }
6296
6297 /* Return true if the parameters to call EXP represent an object which will
6298 always generate lock free instructions. The first argument represents the
6299 size of the object, and the second parameter is a pointer to the object
6300 itself. If NULL is passed for the object, then the result is based on
6301 typical alignment for an object of the specified size. Otherwise return
6302 false. */
6303
6304 static rtx
6305 expand_builtin_atomic_always_lock_free (tree exp)
6306 {
6307 tree size;
6308 tree arg0 = CALL_EXPR_ARG (exp, 0);
6309 tree arg1 = CALL_EXPR_ARG (exp, 1);
6310
6311 if (TREE_CODE (arg0) != INTEGER_CST)
6312 {
6313 error ("non-constant argument 1 to __atomic_always_lock_free");
6314 return const0_rtx;
6315 }
6316
6317 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6318 if (size == boolean_true_node)
6319 return const1_rtx;
6320 return const0_rtx;
6321 }
6322
6323 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6324 is lock free on this architecture. */
6325
6326 static tree
6327 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6328 {
6329 if (!flag_inline_atomics)
6330 return NULL_TREE;
6331
6332 /* If it isn't always lock free, don't generate a result. */
6333 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6334 return boolean_true_node;
6335
6336 return NULL_TREE;
6337 }
6338
6339 /* Return true if the parameters to call EXP represent an object which will
6340 always generate lock free instructions. The first argument represents the
6341 size of the object, and the second parameter is a pointer to the object
6342 itself. If NULL is passed for the object, then the result is based on
6343 typical alignment for an object of the specified size. Otherwise return
6344 NULL*/
6345
6346 static rtx
6347 expand_builtin_atomic_is_lock_free (tree exp)
6348 {
6349 tree size;
6350 tree arg0 = CALL_EXPR_ARG (exp, 0);
6351 tree arg1 = CALL_EXPR_ARG (exp, 1);
6352
6353 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6354 {
6355 error ("non-integer argument 1 to __atomic_is_lock_free");
6356 return NULL_RTX;
6357 }
6358
6359 if (!flag_inline_atomics)
6360 return NULL_RTX;
6361
6362 /* If the value is known at compile time, return the RTX for it. */
6363 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6364 if (size == boolean_true_node)
6365 return const1_rtx;
6366
6367 return NULL_RTX;
6368 }
6369
6370 /* Expand the __atomic_thread_fence intrinsic:
6371 void __atomic_thread_fence (enum memmodel)
6372 EXP is the CALL_EXPR. */
6373
6374 static void
6375 expand_builtin_atomic_thread_fence (tree exp)
6376 {
6377 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6378 expand_mem_thread_fence (model);
6379 }
6380
6381 /* Expand the __atomic_signal_fence intrinsic:
6382 void __atomic_signal_fence (enum memmodel)
6383 EXP is the CALL_EXPR. */
6384
6385 static void
6386 expand_builtin_atomic_signal_fence (tree exp)
6387 {
6388 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6389 expand_mem_signal_fence (model);
6390 }
6391
6392 /* Expand the __sync_synchronize intrinsic. */
6393
6394 static void
6395 expand_builtin_sync_synchronize (void)
6396 {
6397 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6398 }
6399
6400 static rtx
6401 expand_builtin_thread_pointer (tree exp, rtx target)
6402 {
6403 enum insn_code icode;
6404 if (!validate_arglist (exp, VOID_TYPE))
6405 return const0_rtx;
6406 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6407 if (icode != CODE_FOR_nothing)
6408 {
6409 struct expand_operand op;
6410 /* If the target is not sutitable then create a new target. */
6411 if (target == NULL_RTX
6412 || !REG_P (target)
6413 || GET_MODE (target) != Pmode)
6414 target = gen_reg_rtx (Pmode);
6415 create_output_operand (&op, target, Pmode);
6416 expand_insn (icode, 1, &op);
6417 return target;
6418 }
6419 error ("__builtin_thread_pointer is not supported on this target");
6420 return const0_rtx;
6421 }
6422
6423 static void
6424 expand_builtin_set_thread_pointer (tree exp)
6425 {
6426 enum insn_code icode;
6427 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6428 return;
6429 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6430 if (icode != CODE_FOR_nothing)
6431 {
6432 struct expand_operand op;
6433 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6434 Pmode, EXPAND_NORMAL);
6435 create_input_operand (&op, val, Pmode);
6436 expand_insn (icode, 1, &op);
6437 return;
6438 }
6439 error ("__builtin_set_thread_pointer is not supported on this target");
6440 }
6441
6442 \f
6443 /* Emit code to restore the current value of stack. */
6444
6445 static void
6446 expand_stack_restore (tree var)
6447 {
6448 rtx_insn *prev;
6449 rtx sa = expand_normal (var);
6450
6451 sa = convert_memory_address (Pmode, sa);
6452
6453 prev = get_last_insn ();
6454 emit_stack_restore (SAVE_BLOCK, sa);
6455
6456 record_new_stack_level ();
6457
6458 fixup_args_size_notes (prev, get_last_insn (), 0);
6459 }
6460
6461 /* Emit code to save the current value of stack. */
6462
6463 static rtx
6464 expand_stack_save (void)
6465 {
6466 rtx ret = NULL_RTX;
6467
6468 emit_stack_save (SAVE_BLOCK, &ret);
6469 return ret;
6470 }
6471
6472
6473 /* Expand an expression EXP that calls a built-in function,
6474 with result going to TARGET if that's convenient
6475 (and in mode MODE if that's convenient).
6476 SUBTARGET may be used as the target for computing one of EXP's operands.
6477 IGNORE is nonzero if the value is to be ignored. */
6478
6479 rtx
6480 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6481 int ignore)
6482 {
6483 tree fndecl = get_callee_fndecl (exp);
6484 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6485 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6486 int flags;
6487
6488 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6489 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6490
6491 /* When ASan is enabled, we don't want to expand some memory/string
6492 builtins and rely on libsanitizer's hooks. This allows us to avoid
6493 redundant checks and be sure, that possible overflow will be detected
6494 by ASan. */
6495
6496 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6497 return expand_call (exp, target, ignore);
6498
6499 /* When not optimizing, generate calls to library functions for a certain
6500 set of builtins. */
6501 if (!optimize
6502 && !called_as_built_in (fndecl)
6503 && fcode != BUILT_IN_FORK
6504 && fcode != BUILT_IN_EXECL
6505 && fcode != BUILT_IN_EXECV
6506 && fcode != BUILT_IN_EXECLP
6507 && fcode != BUILT_IN_EXECLE
6508 && fcode != BUILT_IN_EXECVP
6509 && fcode != BUILT_IN_EXECVE
6510 && fcode != BUILT_IN_ALLOCA
6511 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6512 && fcode != BUILT_IN_FREE
6513 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6514 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6515 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6516 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6517 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6518 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6519 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6520 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6521 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6522 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6523 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6524 && fcode != BUILT_IN_CHKP_BNDRET)
6525 return expand_call (exp, target, ignore);
6526
6527 /* The built-in function expanders test for target == const0_rtx
6528 to determine whether the function's result will be ignored. */
6529 if (ignore)
6530 target = const0_rtx;
6531
6532 /* If the result of a pure or const built-in function is ignored, and
6533 none of its arguments are volatile, we can avoid expanding the
6534 built-in call and just evaluate the arguments for side-effects. */
6535 if (target == const0_rtx
6536 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6537 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6538 {
6539 bool volatilep = false;
6540 tree arg;
6541 call_expr_arg_iterator iter;
6542
6543 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6544 if (TREE_THIS_VOLATILE (arg))
6545 {
6546 volatilep = true;
6547 break;
6548 }
6549
6550 if (! volatilep)
6551 {
6552 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6553 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6554 return const0_rtx;
6555 }
6556 }
6557
6558 /* expand_builtin_with_bounds is supposed to be used for
6559 instrumented builtin calls. */
6560 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6561
6562 switch (fcode)
6563 {
6564 CASE_FLT_FN (BUILT_IN_FABS):
6565 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6566 case BUILT_IN_FABSD32:
6567 case BUILT_IN_FABSD64:
6568 case BUILT_IN_FABSD128:
6569 target = expand_builtin_fabs (exp, target, subtarget);
6570 if (target)
6571 return target;
6572 break;
6573
6574 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6575 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6576 target = expand_builtin_copysign (exp, target, subtarget);
6577 if (target)
6578 return target;
6579 break;
6580
6581 /* Just do a normal library call if we were unable to fold
6582 the values. */
6583 CASE_FLT_FN (BUILT_IN_CABS):
6584 break;
6585
6586 CASE_FLT_FN (BUILT_IN_FMA):
6587 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6588 if (target)
6589 return target;
6590 break;
6591
6592 CASE_FLT_FN (BUILT_IN_ILOGB):
6593 if (! flag_unsafe_math_optimizations)
6594 break;
6595
6596 target = expand_builtin_interclass_mathfn (exp, target);
6597 if (target)
6598 return target;
6599 break;
6600
6601 CASE_FLT_FN (BUILT_IN_ICEIL):
6602 CASE_FLT_FN (BUILT_IN_LCEIL):
6603 CASE_FLT_FN (BUILT_IN_LLCEIL):
6604 CASE_FLT_FN (BUILT_IN_LFLOOR):
6605 CASE_FLT_FN (BUILT_IN_IFLOOR):
6606 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6607 target = expand_builtin_int_roundingfn (exp, target);
6608 if (target)
6609 return target;
6610 break;
6611
6612 CASE_FLT_FN (BUILT_IN_IRINT):
6613 CASE_FLT_FN (BUILT_IN_LRINT):
6614 CASE_FLT_FN (BUILT_IN_LLRINT):
6615 CASE_FLT_FN (BUILT_IN_IROUND):
6616 CASE_FLT_FN (BUILT_IN_LROUND):
6617 CASE_FLT_FN (BUILT_IN_LLROUND):
6618 target = expand_builtin_int_roundingfn_2 (exp, target);
6619 if (target)
6620 return target;
6621 break;
6622
6623 CASE_FLT_FN (BUILT_IN_POWI):
6624 target = expand_builtin_powi (exp, target);
6625 if (target)
6626 return target;
6627 break;
6628
6629 CASE_FLT_FN (BUILT_IN_CEXPI):
6630 target = expand_builtin_cexpi (exp, target);
6631 gcc_assert (target);
6632 return target;
6633
6634 CASE_FLT_FN (BUILT_IN_SIN):
6635 CASE_FLT_FN (BUILT_IN_COS):
6636 if (! flag_unsafe_math_optimizations)
6637 break;
6638 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6639 if (target)
6640 return target;
6641 break;
6642
6643 CASE_FLT_FN (BUILT_IN_SINCOS):
6644 if (! flag_unsafe_math_optimizations)
6645 break;
6646 target = expand_builtin_sincos (exp);
6647 if (target)
6648 return target;
6649 break;
6650
6651 case BUILT_IN_APPLY_ARGS:
6652 return expand_builtin_apply_args ();
6653
6654 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6655 FUNCTION with a copy of the parameters described by
6656 ARGUMENTS, and ARGSIZE. It returns a block of memory
6657 allocated on the stack into which is stored all the registers
6658 that might possibly be used for returning the result of a
6659 function. ARGUMENTS is the value returned by
6660 __builtin_apply_args. ARGSIZE is the number of bytes of
6661 arguments that must be copied. ??? How should this value be
6662 computed? We'll also need a safe worst case value for varargs
6663 functions. */
6664 case BUILT_IN_APPLY:
6665 if (!validate_arglist (exp, POINTER_TYPE,
6666 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6667 && !validate_arglist (exp, REFERENCE_TYPE,
6668 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6669 return const0_rtx;
6670 else
6671 {
6672 rtx ops[3];
6673
6674 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6675 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6676 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6677
6678 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6679 }
6680
6681 /* __builtin_return (RESULT) causes the function to return the
6682 value described by RESULT. RESULT is address of the block of
6683 memory returned by __builtin_apply. */
6684 case BUILT_IN_RETURN:
6685 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6686 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6687 return const0_rtx;
6688
6689 case BUILT_IN_SAVEREGS:
6690 return expand_builtin_saveregs ();
6691
6692 case BUILT_IN_VA_ARG_PACK:
6693 /* All valid uses of __builtin_va_arg_pack () are removed during
6694 inlining. */
6695 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6696 return const0_rtx;
6697
6698 case BUILT_IN_VA_ARG_PACK_LEN:
6699 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6700 inlining. */
6701 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6702 return const0_rtx;
6703
6704 /* Return the address of the first anonymous stack arg. */
6705 case BUILT_IN_NEXT_ARG:
6706 if (fold_builtin_next_arg (exp, false))
6707 return const0_rtx;
6708 return expand_builtin_next_arg ();
6709
6710 case BUILT_IN_CLEAR_CACHE:
6711 target = expand_builtin___clear_cache (exp);
6712 if (target)
6713 return target;
6714 break;
6715
6716 case BUILT_IN_CLASSIFY_TYPE:
6717 return expand_builtin_classify_type (exp);
6718
6719 case BUILT_IN_CONSTANT_P:
6720 return const0_rtx;
6721
6722 case BUILT_IN_FRAME_ADDRESS:
6723 case BUILT_IN_RETURN_ADDRESS:
6724 return expand_builtin_frame_address (fndecl, exp);
6725
6726 /* Returns the address of the area where the structure is returned.
6727 0 otherwise. */
6728 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6729 if (call_expr_nargs (exp) != 0
6730 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6731 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6732 return const0_rtx;
6733 else
6734 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6735
6736 case BUILT_IN_ALLOCA:
6737 case BUILT_IN_ALLOCA_WITH_ALIGN:
6738 target = expand_builtin_alloca (exp);
6739 if (target)
6740 return target;
6741 break;
6742
6743 case BUILT_IN_STACK_SAVE:
6744 return expand_stack_save ();
6745
6746 case BUILT_IN_STACK_RESTORE:
6747 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6748 return const0_rtx;
6749
6750 case BUILT_IN_BSWAP16:
6751 case BUILT_IN_BSWAP32:
6752 case BUILT_IN_BSWAP64:
6753 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6754 if (target)
6755 return target;
6756 break;
6757
6758 CASE_INT_FN (BUILT_IN_FFS):
6759 target = expand_builtin_unop (target_mode, exp, target,
6760 subtarget, ffs_optab);
6761 if (target)
6762 return target;
6763 break;
6764
6765 CASE_INT_FN (BUILT_IN_CLZ):
6766 target = expand_builtin_unop (target_mode, exp, target,
6767 subtarget, clz_optab);
6768 if (target)
6769 return target;
6770 break;
6771
6772 CASE_INT_FN (BUILT_IN_CTZ):
6773 target = expand_builtin_unop (target_mode, exp, target,
6774 subtarget, ctz_optab);
6775 if (target)
6776 return target;
6777 break;
6778
6779 CASE_INT_FN (BUILT_IN_CLRSB):
6780 target = expand_builtin_unop (target_mode, exp, target,
6781 subtarget, clrsb_optab);
6782 if (target)
6783 return target;
6784 break;
6785
6786 CASE_INT_FN (BUILT_IN_POPCOUNT):
6787 target = expand_builtin_unop (target_mode, exp, target,
6788 subtarget, popcount_optab);
6789 if (target)
6790 return target;
6791 break;
6792
6793 CASE_INT_FN (BUILT_IN_PARITY):
6794 target = expand_builtin_unop (target_mode, exp, target,
6795 subtarget, parity_optab);
6796 if (target)
6797 return target;
6798 break;
6799
6800 case BUILT_IN_STRLEN:
6801 target = expand_builtin_strlen (exp, target, target_mode);
6802 if (target)
6803 return target;
6804 break;
6805
6806 case BUILT_IN_STRCAT:
6807 target = expand_builtin_strcat (exp, target);
6808 if (target)
6809 return target;
6810 break;
6811
6812 case BUILT_IN_STRCPY:
6813 target = expand_builtin_strcpy (exp, target);
6814 if (target)
6815 return target;
6816 break;
6817
6818 case BUILT_IN_STRNCAT:
6819 target = expand_builtin_strncat (exp, target);
6820 if (target)
6821 return target;
6822 break;
6823
6824 case BUILT_IN_STRNCPY:
6825 target = expand_builtin_strncpy (exp, target);
6826 if (target)
6827 return target;
6828 break;
6829
6830 case BUILT_IN_STPCPY:
6831 target = expand_builtin_stpcpy (exp, target, mode);
6832 if (target)
6833 return target;
6834 break;
6835
6836 case BUILT_IN_STPNCPY:
6837 target = expand_builtin_stpncpy (exp, target);
6838 if (target)
6839 return target;
6840 break;
6841
6842 case BUILT_IN_MEMCHR:
6843 target = expand_builtin_memchr (exp, target);
6844 if (target)
6845 return target;
6846 break;
6847
6848 case BUILT_IN_MEMCPY:
6849 target = expand_builtin_memcpy (exp, target);
6850 if (target)
6851 return target;
6852 break;
6853
6854 case BUILT_IN_MEMMOVE:
6855 target = expand_builtin_memmove (exp, target);
6856 if (target)
6857 return target;
6858 break;
6859
6860 case BUILT_IN_MEMPCPY:
6861 target = expand_builtin_mempcpy (exp, target, mode);
6862 if (target)
6863 return target;
6864 break;
6865
6866 case BUILT_IN_MEMSET:
6867 target = expand_builtin_memset (exp, target, mode);
6868 if (target)
6869 return target;
6870 break;
6871
6872 case BUILT_IN_BZERO:
6873 target = expand_builtin_bzero (exp);
6874 if (target)
6875 return target;
6876 break;
6877
6878 case BUILT_IN_STRCMP:
6879 target = expand_builtin_strcmp (exp, target);
6880 if (target)
6881 return target;
6882 break;
6883
6884 case BUILT_IN_STRNCMP:
6885 target = expand_builtin_strncmp (exp, target, mode);
6886 if (target)
6887 return target;
6888 break;
6889
6890 case BUILT_IN_BCMP:
6891 case BUILT_IN_MEMCMP:
6892 case BUILT_IN_MEMCMP_EQ:
6893 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6894 if (target)
6895 return target;
6896 if (fcode == BUILT_IN_MEMCMP_EQ)
6897 {
6898 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6899 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6900 }
6901 break;
6902
6903 CASE_FLT_FN (BUILT_IN_ISINF):
6904 case BUILT_IN_ISNAND32:
6905 case BUILT_IN_ISNAND64:
6906 case BUILT_IN_ISNAND128:
6907 case BUILT_IN_ISNAN:
6908 case BUILT_IN_ISINFD32:
6909 case BUILT_IN_ISINFD64:
6910 case BUILT_IN_ISINFD128:
6911 case BUILT_IN_ISNORMAL:
6912 case BUILT_IN_ISZERO:
6913 case BUILT_IN_ISSUBNORMAL:
6914 case BUILT_IN_FPCLASSIFY:
6915 case BUILT_IN_SETJMP:
6916 CASE_FLT_FN (BUILT_IN_FINITE):
6917 case BUILT_IN_FINITED32:
6918 case BUILT_IN_FINITED64:
6919 case BUILT_IN_FINITED128:
6920 case BUILT_IN_ISFINITE:
6921 /* These should have been lowered to the builtins in gimple-low.c. */
6922 gcc_unreachable ();
6923
6924 case BUILT_IN_SETJMP_SETUP:
6925 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6926 and the receiver label. */
6927 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6928 {
6929 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6930 VOIDmode, EXPAND_NORMAL);
6931 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6932 rtx_insn *label_r = label_rtx (label);
6933
6934 /* This is copied from the handling of non-local gotos. */
6935 expand_builtin_setjmp_setup (buf_addr, label_r);
6936 nonlocal_goto_handler_labels
6937 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6938 nonlocal_goto_handler_labels);
6939 /* ??? Do not let expand_label treat us as such since we would
6940 not want to be both on the list of non-local labels and on
6941 the list of forced labels. */
6942 FORCED_LABEL (label) = 0;
6943 return const0_rtx;
6944 }
6945 break;
6946
6947 case BUILT_IN_SETJMP_RECEIVER:
6948 /* __builtin_setjmp_receiver is passed the receiver label. */
6949 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6950 {
6951 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6952 rtx_insn *label_r = label_rtx (label);
6953
6954 expand_builtin_setjmp_receiver (label_r);
6955 return const0_rtx;
6956 }
6957 break;
6958
6959 /* __builtin_longjmp is passed a pointer to an array of five words.
6960 It's similar to the C library longjmp function but works with
6961 __builtin_setjmp above. */
6962 case BUILT_IN_LONGJMP:
6963 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6964 {
6965 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6966 VOIDmode, EXPAND_NORMAL);
6967 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6968
6969 if (value != const1_rtx)
6970 {
6971 error ("%<__builtin_longjmp%> second argument must be 1");
6972 return const0_rtx;
6973 }
6974
6975 expand_builtin_longjmp (buf_addr, value);
6976 return const0_rtx;
6977 }
6978 break;
6979
6980 case BUILT_IN_NONLOCAL_GOTO:
6981 target = expand_builtin_nonlocal_goto (exp);
6982 if (target)
6983 return target;
6984 break;
6985
6986 /* This updates the setjmp buffer that is its argument with the value
6987 of the current stack pointer. */
6988 case BUILT_IN_UPDATE_SETJMP_BUF:
6989 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6990 {
6991 rtx buf_addr
6992 = expand_normal (CALL_EXPR_ARG (exp, 0));
6993
6994 expand_builtin_update_setjmp_buf (buf_addr);
6995 return const0_rtx;
6996 }
6997 break;
6998
6999 case BUILT_IN_TRAP:
7000 expand_builtin_trap ();
7001 return const0_rtx;
7002
7003 case BUILT_IN_UNREACHABLE:
7004 expand_builtin_unreachable ();
7005 return const0_rtx;
7006
7007 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7008 case BUILT_IN_SIGNBITD32:
7009 case BUILT_IN_SIGNBITD64:
7010 case BUILT_IN_SIGNBITD128:
7011 target = expand_builtin_signbit (exp, target);
7012 if (target)
7013 return target;
7014 break;
7015
7016 /* Various hooks for the DWARF 2 __throw routine. */
7017 case BUILT_IN_UNWIND_INIT:
7018 expand_builtin_unwind_init ();
7019 return const0_rtx;
7020 case BUILT_IN_DWARF_CFA:
7021 return virtual_cfa_rtx;
7022 #ifdef DWARF2_UNWIND_INFO
7023 case BUILT_IN_DWARF_SP_COLUMN:
7024 return expand_builtin_dwarf_sp_column ();
7025 case BUILT_IN_INIT_DWARF_REG_SIZES:
7026 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7027 return const0_rtx;
7028 #endif
7029 case BUILT_IN_FROB_RETURN_ADDR:
7030 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7031 case BUILT_IN_EXTRACT_RETURN_ADDR:
7032 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7033 case BUILT_IN_EH_RETURN:
7034 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7035 CALL_EXPR_ARG (exp, 1));
7036 return const0_rtx;
7037 case BUILT_IN_EH_RETURN_DATA_REGNO:
7038 return expand_builtin_eh_return_data_regno (exp);
7039 case BUILT_IN_EXTEND_POINTER:
7040 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7041 case BUILT_IN_EH_POINTER:
7042 return expand_builtin_eh_pointer (exp);
7043 case BUILT_IN_EH_FILTER:
7044 return expand_builtin_eh_filter (exp);
7045 case BUILT_IN_EH_COPY_VALUES:
7046 return expand_builtin_eh_copy_values (exp);
7047
7048 case BUILT_IN_VA_START:
7049 return expand_builtin_va_start (exp);
7050 case BUILT_IN_VA_END:
7051 return expand_builtin_va_end (exp);
7052 case BUILT_IN_VA_COPY:
7053 return expand_builtin_va_copy (exp);
7054 case BUILT_IN_EXPECT:
7055 return expand_builtin_expect (exp, target);
7056 case BUILT_IN_ASSUME_ALIGNED:
7057 return expand_builtin_assume_aligned (exp, target);
7058 case BUILT_IN_PREFETCH:
7059 expand_builtin_prefetch (exp);
7060 return const0_rtx;
7061
7062 case BUILT_IN_INIT_TRAMPOLINE:
7063 return expand_builtin_init_trampoline (exp, true);
7064 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7065 return expand_builtin_init_trampoline (exp, false);
7066 case BUILT_IN_ADJUST_TRAMPOLINE:
7067 return expand_builtin_adjust_trampoline (exp);
7068
7069 case BUILT_IN_INIT_DESCRIPTOR:
7070 return expand_builtin_init_descriptor (exp);
7071 case BUILT_IN_ADJUST_DESCRIPTOR:
7072 return expand_builtin_adjust_descriptor (exp);
7073
7074 case BUILT_IN_FORK:
7075 case BUILT_IN_EXECL:
7076 case BUILT_IN_EXECV:
7077 case BUILT_IN_EXECLP:
7078 case BUILT_IN_EXECLE:
7079 case BUILT_IN_EXECVP:
7080 case BUILT_IN_EXECVE:
7081 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7082 if (target)
7083 return target;
7084 break;
7085
7086 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7087 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7088 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7089 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7090 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7091 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7092 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7093 if (target)
7094 return target;
7095 break;
7096
7097 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7098 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7099 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7100 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7101 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7102 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7103 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7104 if (target)
7105 return target;
7106 break;
7107
7108 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7109 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7110 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7111 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7112 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7113 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7114 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7115 if (target)
7116 return target;
7117 break;
7118
7119 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7120 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7121 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7122 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7123 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7124 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7125 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7126 if (target)
7127 return target;
7128 break;
7129
7130 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7131 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7132 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7133 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7134 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7135 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7136 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7137 if (target)
7138 return target;
7139 break;
7140
7141 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7142 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7143 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7144 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7145 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7146 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7147 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7148 if (target)
7149 return target;
7150 break;
7151
7152 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7153 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7154 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7155 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7156 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7157 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7158 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7159 if (target)
7160 return target;
7161 break;
7162
7163 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7164 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7165 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7166 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7167 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7168 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7169 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7170 if (target)
7171 return target;
7172 break;
7173
7174 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7175 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7176 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7177 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7178 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7179 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7180 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7181 if (target)
7182 return target;
7183 break;
7184
7185 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7186 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7187 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7188 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7189 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7190 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7191 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7192 if (target)
7193 return target;
7194 break;
7195
7196 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7197 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7198 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7199 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7200 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7201 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7202 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7203 if (target)
7204 return target;
7205 break;
7206
7207 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7208 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7209 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7210 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7211 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7212 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7213 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7214 if (target)
7215 return target;
7216 break;
7217
7218 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7219 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7220 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7221 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7222 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7223 if (mode == VOIDmode)
7224 mode = TYPE_MODE (boolean_type_node);
7225 if (!target || !register_operand (target, mode))
7226 target = gen_reg_rtx (mode);
7227
7228 mode = get_builtin_sync_mode
7229 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7230 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7231 if (target)
7232 return target;
7233 break;
7234
7235 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7236 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7237 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7238 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7239 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7240 mode = get_builtin_sync_mode
7241 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7242 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7243 if (target)
7244 return target;
7245 break;
7246
7247 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7248 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7249 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7250 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7251 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7252 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7253 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7254 if (target)
7255 return target;
7256 break;
7257
7258 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7259 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7260 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7261 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7262 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7263 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7264 expand_builtin_sync_lock_release (mode, exp);
7265 return const0_rtx;
7266
7267 case BUILT_IN_SYNC_SYNCHRONIZE:
7268 expand_builtin_sync_synchronize ();
7269 return const0_rtx;
7270
7271 case BUILT_IN_ATOMIC_EXCHANGE_1:
7272 case BUILT_IN_ATOMIC_EXCHANGE_2:
7273 case BUILT_IN_ATOMIC_EXCHANGE_4:
7274 case BUILT_IN_ATOMIC_EXCHANGE_8:
7275 case BUILT_IN_ATOMIC_EXCHANGE_16:
7276 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7277 target = expand_builtin_atomic_exchange (mode, exp, target);
7278 if (target)
7279 return target;
7280 break;
7281
7282 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7283 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7284 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7285 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7286 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7287 {
7288 unsigned int nargs, z;
7289 vec<tree, va_gc> *vec;
7290
7291 mode =
7292 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7293 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7294 if (target)
7295 return target;
7296
7297 /* If this is turned into an external library call, the weak parameter
7298 must be dropped to match the expected parameter list. */
7299 nargs = call_expr_nargs (exp);
7300 vec_alloc (vec, nargs - 1);
7301 for (z = 0; z < 3; z++)
7302 vec->quick_push (CALL_EXPR_ARG (exp, z));
7303 /* Skip the boolean weak parameter. */
7304 for (z = 4; z < 6; z++)
7305 vec->quick_push (CALL_EXPR_ARG (exp, z));
7306 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7307 break;
7308 }
7309
7310 case BUILT_IN_ATOMIC_LOAD_1:
7311 case BUILT_IN_ATOMIC_LOAD_2:
7312 case BUILT_IN_ATOMIC_LOAD_4:
7313 case BUILT_IN_ATOMIC_LOAD_8:
7314 case BUILT_IN_ATOMIC_LOAD_16:
7315 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7316 target = expand_builtin_atomic_load (mode, exp, target);
7317 if (target)
7318 return target;
7319 break;
7320
7321 case BUILT_IN_ATOMIC_STORE_1:
7322 case BUILT_IN_ATOMIC_STORE_2:
7323 case BUILT_IN_ATOMIC_STORE_4:
7324 case BUILT_IN_ATOMIC_STORE_8:
7325 case BUILT_IN_ATOMIC_STORE_16:
7326 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7327 target = expand_builtin_atomic_store (mode, exp);
7328 if (target)
7329 return const0_rtx;
7330 break;
7331
7332 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7333 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7334 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7335 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7336 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7337 {
7338 enum built_in_function lib;
7339 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7340 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7341 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7342 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7343 ignore, lib);
7344 if (target)
7345 return target;
7346 break;
7347 }
7348 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7349 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7350 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7351 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7352 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7353 {
7354 enum built_in_function lib;
7355 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7356 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7357 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7358 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7359 ignore, lib);
7360 if (target)
7361 return target;
7362 break;
7363 }
7364 case BUILT_IN_ATOMIC_AND_FETCH_1:
7365 case BUILT_IN_ATOMIC_AND_FETCH_2:
7366 case BUILT_IN_ATOMIC_AND_FETCH_4:
7367 case BUILT_IN_ATOMIC_AND_FETCH_8:
7368 case BUILT_IN_ATOMIC_AND_FETCH_16:
7369 {
7370 enum built_in_function lib;
7371 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7372 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7373 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7374 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7375 ignore, lib);
7376 if (target)
7377 return target;
7378 break;
7379 }
7380 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7381 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7382 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7383 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7384 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7385 {
7386 enum built_in_function lib;
7387 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7388 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7389 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7390 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7391 ignore, lib);
7392 if (target)
7393 return target;
7394 break;
7395 }
7396 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7397 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7398 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7399 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7400 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7401 {
7402 enum built_in_function lib;
7403 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7404 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7405 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7406 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7407 ignore, lib);
7408 if (target)
7409 return target;
7410 break;
7411 }
7412 case BUILT_IN_ATOMIC_OR_FETCH_1:
7413 case BUILT_IN_ATOMIC_OR_FETCH_2:
7414 case BUILT_IN_ATOMIC_OR_FETCH_4:
7415 case BUILT_IN_ATOMIC_OR_FETCH_8:
7416 case BUILT_IN_ATOMIC_OR_FETCH_16:
7417 {
7418 enum built_in_function lib;
7419 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7420 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7421 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7422 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7423 ignore, lib);
7424 if (target)
7425 return target;
7426 break;
7427 }
7428 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7429 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7430 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7431 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7432 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7433 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7434 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7435 ignore, BUILT_IN_NONE);
7436 if (target)
7437 return target;
7438 break;
7439
7440 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7441 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7442 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7443 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7444 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7445 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7446 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7447 ignore, BUILT_IN_NONE);
7448 if (target)
7449 return target;
7450 break;
7451
7452 case BUILT_IN_ATOMIC_FETCH_AND_1:
7453 case BUILT_IN_ATOMIC_FETCH_AND_2:
7454 case BUILT_IN_ATOMIC_FETCH_AND_4:
7455 case BUILT_IN_ATOMIC_FETCH_AND_8:
7456 case BUILT_IN_ATOMIC_FETCH_AND_16:
7457 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7458 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7459 ignore, BUILT_IN_NONE);
7460 if (target)
7461 return target;
7462 break;
7463
7464 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7465 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7466 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7467 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7468 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7469 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7470 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7471 ignore, BUILT_IN_NONE);
7472 if (target)
7473 return target;
7474 break;
7475
7476 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7477 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7478 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7479 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7480 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7481 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7482 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7483 ignore, BUILT_IN_NONE);
7484 if (target)
7485 return target;
7486 break;
7487
7488 case BUILT_IN_ATOMIC_FETCH_OR_1:
7489 case BUILT_IN_ATOMIC_FETCH_OR_2:
7490 case BUILT_IN_ATOMIC_FETCH_OR_4:
7491 case BUILT_IN_ATOMIC_FETCH_OR_8:
7492 case BUILT_IN_ATOMIC_FETCH_OR_16:
7493 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7494 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7495 ignore, BUILT_IN_NONE);
7496 if (target)
7497 return target;
7498 break;
7499
7500 case BUILT_IN_ATOMIC_TEST_AND_SET:
7501 return expand_builtin_atomic_test_and_set (exp, target);
7502
7503 case BUILT_IN_ATOMIC_CLEAR:
7504 return expand_builtin_atomic_clear (exp);
7505
7506 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7507 return expand_builtin_atomic_always_lock_free (exp);
7508
7509 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7510 target = expand_builtin_atomic_is_lock_free (exp);
7511 if (target)
7512 return target;
7513 break;
7514
7515 case BUILT_IN_ATOMIC_THREAD_FENCE:
7516 expand_builtin_atomic_thread_fence (exp);
7517 return const0_rtx;
7518
7519 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7520 expand_builtin_atomic_signal_fence (exp);
7521 return const0_rtx;
7522
7523 case BUILT_IN_OBJECT_SIZE:
7524 return expand_builtin_object_size (exp);
7525
7526 case BUILT_IN_MEMCPY_CHK:
7527 case BUILT_IN_MEMPCPY_CHK:
7528 case BUILT_IN_MEMMOVE_CHK:
7529 case BUILT_IN_MEMSET_CHK:
7530 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7531 if (target)
7532 return target;
7533 break;
7534
7535 case BUILT_IN_STRCPY_CHK:
7536 case BUILT_IN_STPCPY_CHK:
7537 case BUILT_IN_STRNCPY_CHK:
7538 case BUILT_IN_STPNCPY_CHK:
7539 case BUILT_IN_STRCAT_CHK:
7540 case BUILT_IN_STRNCAT_CHK:
7541 case BUILT_IN_SNPRINTF_CHK:
7542 case BUILT_IN_VSNPRINTF_CHK:
7543 maybe_emit_chk_warning (exp, fcode);
7544 break;
7545
7546 case BUILT_IN_SPRINTF_CHK:
7547 case BUILT_IN_VSPRINTF_CHK:
7548 maybe_emit_sprintf_chk_warning (exp, fcode);
7549 break;
7550
7551 case BUILT_IN_FREE:
7552 if (warn_free_nonheap_object)
7553 maybe_emit_free_warning (exp);
7554 break;
7555
7556 case BUILT_IN_THREAD_POINTER:
7557 return expand_builtin_thread_pointer (exp, target);
7558
7559 case BUILT_IN_SET_THREAD_POINTER:
7560 expand_builtin_set_thread_pointer (exp);
7561 return const0_rtx;
7562
7563 case BUILT_IN_CILK_DETACH:
7564 expand_builtin_cilk_detach (exp);
7565 return const0_rtx;
7566
7567 case BUILT_IN_CILK_POP_FRAME:
7568 expand_builtin_cilk_pop_frame (exp);
7569 return const0_rtx;
7570
7571 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7572 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7573 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7574 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7575 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7576 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7577 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7578 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7579 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7580 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7581 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7582 /* We allow user CHKP builtins if Pointer Bounds
7583 Checker is off. */
7584 if (!chkp_function_instrumented_p (current_function_decl))
7585 {
7586 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7587 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7588 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7589 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7590 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7591 return expand_normal (CALL_EXPR_ARG (exp, 0));
7592 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7593 return expand_normal (size_zero_node);
7594 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7595 return expand_normal (size_int (-1));
7596 else
7597 return const0_rtx;
7598 }
7599 /* FALLTHROUGH */
7600
7601 case BUILT_IN_CHKP_BNDMK:
7602 case BUILT_IN_CHKP_BNDSTX:
7603 case BUILT_IN_CHKP_BNDCL:
7604 case BUILT_IN_CHKP_BNDCU:
7605 case BUILT_IN_CHKP_BNDLDX:
7606 case BUILT_IN_CHKP_BNDRET:
7607 case BUILT_IN_CHKP_INTERSECT:
7608 case BUILT_IN_CHKP_NARROW:
7609 case BUILT_IN_CHKP_EXTRACT_LOWER:
7610 case BUILT_IN_CHKP_EXTRACT_UPPER:
7611 /* Software implementation of Pointer Bounds Checker is NYI.
7612 Target support is required. */
7613 error ("Your target platform does not support -fcheck-pointer-bounds");
7614 break;
7615
7616 case BUILT_IN_ACC_ON_DEVICE:
7617 /* Do library call, if we failed to expand the builtin when
7618 folding. */
7619 break;
7620
7621 default: /* just do library call, if unknown builtin */
7622 break;
7623 }
7624
7625 /* The switch statement above can drop through to cause the function
7626 to be called normally. */
7627 return expand_call (exp, target, ignore);
7628 }
7629
7630 /* Similar to expand_builtin but is used for instrumented calls. */
7631
7632 rtx
7633 expand_builtin_with_bounds (tree exp, rtx target,
7634 rtx subtarget ATTRIBUTE_UNUSED,
7635 machine_mode mode, int ignore)
7636 {
7637 tree fndecl = get_callee_fndecl (exp);
7638 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7639
7640 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7641
7642 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7643 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7644
7645 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7646 && fcode < END_CHKP_BUILTINS);
7647
7648 switch (fcode)
7649 {
7650 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7651 target = expand_builtin_memcpy_with_bounds (exp, target);
7652 if (target)
7653 return target;
7654 break;
7655
7656 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7657 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7658 if (target)
7659 return target;
7660 break;
7661
7662 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7663 target = expand_builtin_memset_with_bounds (exp, target, mode);
7664 if (target)
7665 return target;
7666 break;
7667
7668 default:
7669 break;
7670 }
7671
7672 /* The switch statement above can drop through to cause the function
7673 to be called normally. */
7674 return expand_call (exp, target, ignore);
7675 }
7676
7677 /* Determine whether a tree node represents a call to a built-in
7678 function. If the tree T is a call to a built-in function with
7679 the right number of arguments of the appropriate types, return
7680 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7681 Otherwise the return value is END_BUILTINS. */
7682
7683 enum built_in_function
7684 builtin_mathfn_code (const_tree t)
7685 {
7686 const_tree fndecl, arg, parmlist;
7687 const_tree argtype, parmtype;
7688 const_call_expr_arg_iterator iter;
7689
7690 if (TREE_CODE (t) != CALL_EXPR
7691 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7692 return END_BUILTINS;
7693
7694 fndecl = get_callee_fndecl (t);
7695 if (fndecl == NULL_TREE
7696 || TREE_CODE (fndecl) != FUNCTION_DECL
7697 || ! DECL_BUILT_IN (fndecl)
7698 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7699 return END_BUILTINS;
7700
7701 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7702 init_const_call_expr_arg_iterator (t, &iter);
7703 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7704 {
7705 /* If a function doesn't take a variable number of arguments,
7706 the last element in the list will have type `void'. */
7707 parmtype = TREE_VALUE (parmlist);
7708 if (VOID_TYPE_P (parmtype))
7709 {
7710 if (more_const_call_expr_args_p (&iter))
7711 return END_BUILTINS;
7712 return DECL_FUNCTION_CODE (fndecl);
7713 }
7714
7715 if (! more_const_call_expr_args_p (&iter))
7716 return END_BUILTINS;
7717
7718 arg = next_const_call_expr_arg (&iter);
7719 argtype = TREE_TYPE (arg);
7720
7721 if (SCALAR_FLOAT_TYPE_P (parmtype))
7722 {
7723 if (! SCALAR_FLOAT_TYPE_P (argtype))
7724 return END_BUILTINS;
7725 }
7726 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7727 {
7728 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7729 return END_BUILTINS;
7730 }
7731 else if (POINTER_TYPE_P (parmtype))
7732 {
7733 if (! POINTER_TYPE_P (argtype))
7734 return END_BUILTINS;
7735 }
7736 else if (INTEGRAL_TYPE_P (parmtype))
7737 {
7738 if (! INTEGRAL_TYPE_P (argtype))
7739 return END_BUILTINS;
7740 }
7741 else
7742 return END_BUILTINS;
7743 }
7744
7745 /* Variable-length argument list. */
7746 return DECL_FUNCTION_CODE (fndecl);
7747 }
7748
7749 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7750 evaluate to a constant. */
7751
7752 static tree
7753 fold_builtin_constant_p (tree arg)
7754 {
7755 /* We return 1 for a numeric type that's known to be a constant
7756 value at compile-time or for an aggregate type that's a
7757 literal constant. */
7758 STRIP_NOPS (arg);
7759
7760 /* If we know this is a constant, emit the constant of one. */
7761 if (CONSTANT_CLASS_P (arg)
7762 || (TREE_CODE (arg) == CONSTRUCTOR
7763 && TREE_CONSTANT (arg)))
7764 return integer_one_node;
7765 if (TREE_CODE (arg) == ADDR_EXPR)
7766 {
7767 tree op = TREE_OPERAND (arg, 0);
7768 if (TREE_CODE (op) == STRING_CST
7769 || (TREE_CODE (op) == ARRAY_REF
7770 && integer_zerop (TREE_OPERAND (op, 1))
7771 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7772 return integer_one_node;
7773 }
7774
7775 /* If this expression has side effects, show we don't know it to be a
7776 constant. Likewise if it's a pointer or aggregate type since in
7777 those case we only want literals, since those are only optimized
7778 when generating RTL, not later.
7779 And finally, if we are compiling an initializer, not code, we
7780 need to return a definite result now; there's not going to be any
7781 more optimization done. */
7782 if (TREE_SIDE_EFFECTS (arg)
7783 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7784 || POINTER_TYPE_P (TREE_TYPE (arg))
7785 || cfun == 0
7786 || folding_initializer
7787 || force_folding_builtin_constant_p)
7788 return integer_zero_node;
7789
7790 return NULL_TREE;
7791 }
7792
7793 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7794 return it as a truthvalue. */
7795
7796 static tree
7797 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7798 tree predictor)
7799 {
7800 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7801
7802 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7803 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7804 ret_type = TREE_TYPE (TREE_TYPE (fn));
7805 pred_type = TREE_VALUE (arg_types);
7806 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7807
7808 pred = fold_convert_loc (loc, pred_type, pred);
7809 expected = fold_convert_loc (loc, expected_type, expected);
7810 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7811 predictor);
7812
7813 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7814 build_int_cst (ret_type, 0));
7815 }
7816
7817 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7818 NULL_TREE if no simplification is possible. */
7819
7820 tree
7821 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7822 {
7823 tree inner, fndecl, inner_arg0;
7824 enum tree_code code;
7825
7826 /* Distribute the expected value over short-circuiting operators.
7827 See through the cast from truthvalue_type_node to long. */
7828 inner_arg0 = arg0;
7829 while (CONVERT_EXPR_P (inner_arg0)
7830 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7831 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7832 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7833
7834 /* If this is a builtin_expect within a builtin_expect keep the
7835 inner one. See through a comparison against a constant. It
7836 might have been added to create a thruthvalue. */
7837 inner = inner_arg0;
7838
7839 if (COMPARISON_CLASS_P (inner)
7840 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7841 inner = TREE_OPERAND (inner, 0);
7842
7843 if (TREE_CODE (inner) == CALL_EXPR
7844 && (fndecl = get_callee_fndecl (inner))
7845 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7846 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7847 return arg0;
7848
7849 inner = inner_arg0;
7850 code = TREE_CODE (inner);
7851 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7852 {
7853 tree op0 = TREE_OPERAND (inner, 0);
7854 tree op1 = TREE_OPERAND (inner, 1);
7855
7856 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7857 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7858 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7859
7860 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7861 }
7862
7863 /* If the argument isn't invariant then there's nothing else we can do. */
7864 if (!TREE_CONSTANT (inner_arg0))
7865 return NULL_TREE;
7866
7867 /* If we expect that a comparison against the argument will fold to
7868 a constant return the constant. In practice, this means a true
7869 constant or the address of a non-weak symbol. */
7870 inner = inner_arg0;
7871 STRIP_NOPS (inner);
7872 if (TREE_CODE (inner) == ADDR_EXPR)
7873 {
7874 do
7875 {
7876 inner = TREE_OPERAND (inner, 0);
7877 }
7878 while (TREE_CODE (inner) == COMPONENT_REF
7879 || TREE_CODE (inner) == ARRAY_REF);
7880 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7881 return NULL_TREE;
7882 }
7883
7884 /* Otherwise, ARG0 already has the proper type for the return value. */
7885 return arg0;
7886 }
7887
7888 /* Fold a call to __builtin_classify_type with argument ARG. */
7889
7890 static tree
7891 fold_builtin_classify_type (tree arg)
7892 {
7893 if (arg == 0)
7894 return build_int_cst (integer_type_node, no_type_class);
7895
7896 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7897 }
7898
7899 /* Fold a call to __builtin_strlen with argument ARG. */
7900
7901 static tree
7902 fold_builtin_strlen (location_t loc, tree type, tree arg)
7903 {
7904 if (!validate_arg (arg, POINTER_TYPE))
7905 return NULL_TREE;
7906 else
7907 {
7908 tree len = c_strlen (arg, 0);
7909
7910 if (len)
7911 return fold_convert_loc (loc, type, len);
7912
7913 return NULL_TREE;
7914 }
7915 }
7916
7917 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7918
7919 static tree
7920 fold_builtin_inf (location_t loc, tree type, int warn)
7921 {
7922 REAL_VALUE_TYPE real;
7923
7924 /* __builtin_inff is intended to be usable to define INFINITY on all
7925 targets. If an infinity is not available, INFINITY expands "to a
7926 positive constant of type float that overflows at translation
7927 time", footnote "In this case, using INFINITY will violate the
7928 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7929 Thus we pedwarn to ensure this constraint violation is
7930 diagnosed. */
7931 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7932 pedwarn (loc, 0, "target format does not support infinity");
7933
7934 real_inf (&real);
7935 return build_real (type, real);
7936 }
7937
7938 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7939 NULL_TREE if no simplification can be made. */
7940
7941 static tree
7942 fold_builtin_sincos (location_t loc,
7943 tree arg0, tree arg1, tree arg2)
7944 {
7945 tree type;
7946 tree fndecl, call = NULL_TREE;
7947
7948 if (!validate_arg (arg0, REAL_TYPE)
7949 || !validate_arg (arg1, POINTER_TYPE)
7950 || !validate_arg (arg2, POINTER_TYPE))
7951 return NULL_TREE;
7952
7953 type = TREE_TYPE (arg0);
7954
7955 /* Calculate the result when the argument is a constant. */
7956 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7957 if (fn == END_BUILTINS)
7958 return NULL_TREE;
7959
7960 /* Canonicalize sincos to cexpi. */
7961 if (TREE_CODE (arg0) == REAL_CST)
7962 {
7963 tree complex_type = build_complex_type (type);
7964 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7965 }
7966 if (!call)
7967 {
7968 if (!targetm.libc_has_function (function_c99_math_complex)
7969 || !builtin_decl_implicit_p (fn))
7970 return NULL_TREE;
7971 fndecl = builtin_decl_explicit (fn);
7972 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7973 call = builtin_save_expr (call);
7974 }
7975
7976 return build2 (COMPOUND_EXPR, void_type_node,
7977 build2 (MODIFY_EXPR, void_type_node,
7978 build_fold_indirect_ref_loc (loc, arg1),
7979 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7980 build2 (MODIFY_EXPR, void_type_node,
7981 build_fold_indirect_ref_loc (loc, arg2),
7982 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7983 }
7984
7985 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7986 Return NULL_TREE if no simplification can be made. */
7987
7988 static tree
7989 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7990 {
7991 if (!validate_arg (arg1, POINTER_TYPE)
7992 || !validate_arg (arg2, POINTER_TYPE)
7993 || !validate_arg (len, INTEGER_TYPE))
7994 return NULL_TREE;
7995
7996 /* If the LEN parameter is zero, return zero. */
7997 if (integer_zerop (len))
7998 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7999 arg1, arg2);
8000
8001 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8002 if (operand_equal_p (arg1, arg2, 0))
8003 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8004
8005 /* If len parameter is one, return an expression corresponding to
8006 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8007 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8008 {
8009 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8010 tree cst_uchar_ptr_node
8011 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8012
8013 tree ind1
8014 = fold_convert_loc (loc, integer_type_node,
8015 build1 (INDIRECT_REF, cst_uchar_node,
8016 fold_convert_loc (loc,
8017 cst_uchar_ptr_node,
8018 arg1)));
8019 tree ind2
8020 = fold_convert_loc (loc, integer_type_node,
8021 build1 (INDIRECT_REF, cst_uchar_node,
8022 fold_convert_loc (loc,
8023 cst_uchar_ptr_node,
8024 arg2)));
8025 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8026 }
8027
8028 return NULL_TREE;
8029 }
8030
8031 /* Fold a call to builtin isascii with argument ARG. */
8032
8033 static tree
8034 fold_builtin_isascii (location_t loc, tree arg)
8035 {
8036 if (!validate_arg (arg, INTEGER_TYPE))
8037 return NULL_TREE;
8038 else
8039 {
8040 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8041 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8042 build_int_cst (integer_type_node,
8043 ~ (unsigned HOST_WIDE_INT) 0x7f));
8044 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8045 arg, integer_zero_node);
8046 }
8047 }
8048
8049 /* Fold a call to builtin toascii with argument ARG. */
8050
8051 static tree
8052 fold_builtin_toascii (location_t loc, tree arg)
8053 {
8054 if (!validate_arg (arg, INTEGER_TYPE))
8055 return NULL_TREE;
8056
8057 /* Transform toascii(c) -> (c & 0x7f). */
8058 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8059 build_int_cst (integer_type_node, 0x7f));
8060 }
8061
8062 /* Fold a call to builtin isdigit with argument ARG. */
8063
8064 static tree
8065 fold_builtin_isdigit (location_t loc, tree arg)
8066 {
8067 if (!validate_arg (arg, INTEGER_TYPE))
8068 return NULL_TREE;
8069 else
8070 {
8071 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8072 /* According to the C standard, isdigit is unaffected by locale.
8073 However, it definitely is affected by the target character set. */
8074 unsigned HOST_WIDE_INT target_digit0
8075 = lang_hooks.to_target_charset ('0');
8076
8077 if (target_digit0 == 0)
8078 return NULL_TREE;
8079
8080 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8081 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8082 build_int_cst (unsigned_type_node, target_digit0));
8083 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8084 build_int_cst (unsigned_type_node, 9));
8085 }
8086 }
8087
8088 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8089
8090 static tree
8091 fold_builtin_fabs (location_t loc, tree arg, tree type)
8092 {
8093 if (!validate_arg (arg, REAL_TYPE))
8094 return NULL_TREE;
8095
8096 arg = fold_convert_loc (loc, type, arg);
8097 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8098 }
8099
8100 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8101
8102 static tree
8103 fold_builtin_abs (location_t loc, tree arg, tree type)
8104 {
8105 if (!validate_arg (arg, INTEGER_TYPE))
8106 return NULL_TREE;
8107
8108 arg = fold_convert_loc (loc, type, arg);
8109 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8110 }
8111
8112 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8113
8114 static tree
8115 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8116 {
8117 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8118 if (validate_arg (arg0, REAL_TYPE)
8119 && validate_arg (arg1, REAL_TYPE)
8120 && validate_arg (arg2, REAL_TYPE)
8121 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8122 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8123
8124 return NULL_TREE;
8125 }
8126
8127 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8128
8129 static tree
8130 fold_builtin_carg (location_t loc, tree arg, tree type)
8131 {
8132 if (validate_arg (arg, COMPLEX_TYPE)
8133 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8134 {
8135 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8136
8137 if (atan2_fn)
8138 {
8139 tree new_arg = builtin_save_expr (arg);
8140 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8141 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8142 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8143 }
8144 }
8145
8146 return NULL_TREE;
8147 }
8148
8149 /* Fold a call to builtin frexp, we can assume the base is 2. */
8150
8151 static tree
8152 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8153 {
8154 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8155 return NULL_TREE;
8156
8157 STRIP_NOPS (arg0);
8158
8159 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8160 return NULL_TREE;
8161
8162 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8163
8164 /* Proceed if a valid pointer type was passed in. */
8165 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8166 {
8167 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8168 tree frac, exp;
8169
8170 switch (value->cl)
8171 {
8172 case rvc_zero:
8173 /* For +-0, return (*exp = 0, +-0). */
8174 exp = integer_zero_node;
8175 frac = arg0;
8176 break;
8177 case rvc_nan:
8178 case rvc_inf:
8179 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8180 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8181 case rvc_normal:
8182 {
8183 /* Since the frexp function always expects base 2, and in
8184 GCC normalized significands are already in the range
8185 [0.5, 1.0), we have exactly what frexp wants. */
8186 REAL_VALUE_TYPE frac_rvt = *value;
8187 SET_REAL_EXP (&frac_rvt, 0);
8188 frac = build_real (rettype, frac_rvt);
8189 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8190 }
8191 break;
8192 default:
8193 gcc_unreachable ();
8194 }
8195
8196 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8197 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8198 TREE_SIDE_EFFECTS (arg1) = 1;
8199 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8200 }
8201
8202 return NULL_TREE;
8203 }
8204
8205 /* Fold a call to builtin modf. */
8206
8207 static tree
8208 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8209 {
8210 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8211 return NULL_TREE;
8212
8213 STRIP_NOPS (arg0);
8214
8215 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8216 return NULL_TREE;
8217
8218 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8219
8220 /* Proceed if a valid pointer type was passed in. */
8221 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8222 {
8223 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8224 REAL_VALUE_TYPE trunc, frac;
8225
8226 switch (value->cl)
8227 {
8228 case rvc_nan:
8229 case rvc_zero:
8230 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8231 trunc = frac = *value;
8232 break;
8233 case rvc_inf:
8234 /* For +-Inf, return (*arg1 = arg0, +-0). */
8235 frac = dconst0;
8236 frac.sign = value->sign;
8237 trunc = *value;
8238 break;
8239 case rvc_normal:
8240 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8241 real_trunc (&trunc, VOIDmode, value);
8242 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8243 /* If the original number was negative and already
8244 integral, then the fractional part is -0.0. */
8245 if (value->sign && frac.cl == rvc_zero)
8246 frac.sign = value->sign;
8247 break;
8248 }
8249
8250 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8251 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8252 build_real (rettype, trunc));
8253 TREE_SIDE_EFFECTS (arg1) = 1;
8254 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8255 build_real (rettype, frac));
8256 }
8257
8258 return NULL_TREE;
8259 }
8260
8261
8262
8263 /* Fold a call to __builtin_isinf_sign.
8264 ARG is the argument for the call. */
8265
8266 static tree
8267 fold_builtin_classify (location_t loc, tree arg, int builtin_index)
8268 {
8269 if (!validate_arg (arg, REAL_TYPE))
8270 return NULL_TREE;
8271
8272 switch (builtin_index)
8273 {
8274 case BUILT_IN_ISINF_SIGN:
8275 {
8276 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8277 /* In a boolean context, GCC will fold the inner COND_EXPR to
8278 1. So e.g. "if (isinf_sign(x))" would be folded to just
8279 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8280 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8281 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8282 tree tmp = NULL_TREE;
8283
8284 arg = builtin_save_expr (arg);
8285
8286 if (signbit_fn && isinf_fn)
8287 {
8288 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8289 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8290
8291 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8292 signbit_call, integer_zero_node);
8293 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8294 isinf_call, integer_zero_node);
8295
8296 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8297 integer_minus_one_node, integer_one_node);
8298 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8299 isinf_call, tmp,
8300 integer_zero_node);
8301 }
8302
8303 return tmp;
8304 }
8305
8306 default:
8307 gcc_unreachable ();
8308 }
8309 }
8310
8311 /* Fold a call to an unordered comparison function such as
8312 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8313 being called and ARG0 and ARG1 are the arguments for the call.
8314 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8315 the opposite of the desired result. UNORDERED_CODE is used
8316 for modes that can hold NaNs and ORDERED_CODE is used for
8317 the rest. */
8318
8319 static tree
8320 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8321 enum tree_code unordered_code,
8322 enum tree_code ordered_code)
8323 {
8324 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8325 enum tree_code code;
8326 tree type0, type1;
8327 enum tree_code code0, code1;
8328 tree cmp_type = NULL_TREE;
8329
8330 type0 = TREE_TYPE (arg0);
8331 type1 = TREE_TYPE (arg1);
8332
8333 code0 = TREE_CODE (type0);
8334 code1 = TREE_CODE (type1);
8335
8336 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8337 /* Choose the wider of two real types. */
8338 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8339 ? type0 : type1;
8340 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8341 cmp_type = type0;
8342 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8343 cmp_type = type1;
8344
8345 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8346 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8347
8348 if (unordered_code == UNORDERED_EXPR)
8349 {
8350 if (!HONOR_NANS (arg0))
8351 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8352 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8353 }
8354
8355 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8356 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8357 fold_build2_loc (loc, code, type, arg0, arg1));
8358 }
8359
8360 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8361 arithmetics if it can never overflow, or into internal functions that
8362 return both result of arithmetics and overflowed boolean flag in
8363 a complex integer result, or some other check for overflow.
8364 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8365 checking part of that. */
8366
8367 static tree
8368 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8369 tree arg0, tree arg1, tree arg2)
8370 {
8371 enum internal_fn ifn = IFN_LAST;
8372 /* The code of the expression corresponding to the type-generic
8373 built-in, or ERROR_MARK for the type-specific ones. */
8374 enum tree_code opcode = ERROR_MARK;
8375 bool ovf_only = false;
8376
8377 switch (fcode)
8378 {
8379 case BUILT_IN_ADD_OVERFLOW_P:
8380 ovf_only = true;
8381 /* FALLTHRU */
8382 case BUILT_IN_ADD_OVERFLOW:
8383 opcode = PLUS_EXPR;
8384 /* FALLTHRU */
8385 case BUILT_IN_SADD_OVERFLOW:
8386 case BUILT_IN_SADDL_OVERFLOW:
8387 case BUILT_IN_SADDLL_OVERFLOW:
8388 case BUILT_IN_UADD_OVERFLOW:
8389 case BUILT_IN_UADDL_OVERFLOW:
8390 case BUILT_IN_UADDLL_OVERFLOW:
8391 ifn = IFN_ADD_OVERFLOW;
8392 break;
8393 case BUILT_IN_SUB_OVERFLOW_P:
8394 ovf_only = true;
8395 /* FALLTHRU */
8396 case BUILT_IN_SUB_OVERFLOW:
8397 opcode = MINUS_EXPR;
8398 /* FALLTHRU */
8399 case BUILT_IN_SSUB_OVERFLOW:
8400 case BUILT_IN_SSUBL_OVERFLOW:
8401 case BUILT_IN_SSUBLL_OVERFLOW:
8402 case BUILT_IN_USUB_OVERFLOW:
8403 case BUILT_IN_USUBL_OVERFLOW:
8404 case BUILT_IN_USUBLL_OVERFLOW:
8405 ifn = IFN_SUB_OVERFLOW;
8406 break;
8407 case BUILT_IN_MUL_OVERFLOW_P:
8408 ovf_only = true;
8409 /* FALLTHRU */
8410 case BUILT_IN_MUL_OVERFLOW:
8411 opcode = MULT_EXPR;
8412 /* FALLTHRU */
8413 case BUILT_IN_SMUL_OVERFLOW:
8414 case BUILT_IN_SMULL_OVERFLOW:
8415 case BUILT_IN_SMULLL_OVERFLOW:
8416 case BUILT_IN_UMUL_OVERFLOW:
8417 case BUILT_IN_UMULL_OVERFLOW:
8418 case BUILT_IN_UMULLL_OVERFLOW:
8419 ifn = IFN_MUL_OVERFLOW;
8420 break;
8421 default:
8422 gcc_unreachable ();
8423 }
8424
8425 /* For the "generic" overloads, the first two arguments can have different
8426 types and the last argument determines the target type to use to check
8427 for overflow. The arguments of the other overloads all have the same
8428 type. */
8429 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8430
8431 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8432 arguments are constant, attempt to fold the built-in call into a constant
8433 expression indicating whether or not it detected an overflow. */
8434 if (ovf_only
8435 && TREE_CODE (arg0) == INTEGER_CST
8436 && TREE_CODE (arg1) == INTEGER_CST)
8437 /* Perform the computation in the target type and check for overflow. */
8438 return omit_one_operand_loc (loc, boolean_type_node,
8439 arith_overflowed_p (opcode, type, arg0, arg1)
8440 ? boolean_true_node : boolean_false_node,
8441 arg2);
8442
8443 tree ctype = build_complex_type (type);
8444 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8445 2, arg0, arg1);
8446 tree tgt = save_expr (call);
8447 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8448 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8449 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8450
8451 if (ovf_only)
8452 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8453
8454 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8455 tree store
8456 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8457 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8458 }
8459
8460 /* Fold a call to __builtin_FILE to a constant string. */
8461
8462 static inline tree
8463 fold_builtin_FILE (location_t loc)
8464 {
8465 if (const char *fname = LOCATION_FILE (loc))
8466 return build_string_literal (strlen (fname) + 1, fname);
8467
8468 return build_string_literal (1, "");
8469 }
8470
8471 /* Fold a call to __builtin_FUNCTION to a constant string. */
8472
8473 static inline tree
8474 fold_builtin_FUNCTION ()
8475 {
8476 if (current_function_decl)
8477 {
8478 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8479 return build_string_literal (strlen (name) + 1, name);
8480 }
8481
8482 return build_string_literal (1, "");
8483 }
8484
8485 /* Fold a call to __builtin_LINE to an integer constant. */
8486
8487 static inline tree
8488 fold_builtin_LINE (location_t loc, tree type)
8489 {
8490 return build_int_cst (type, LOCATION_LINE (loc));
8491 }
8492
8493 /* Fold a call to built-in function FNDECL with 0 arguments.
8494 This function returns NULL_TREE if no simplification was possible. */
8495
8496 static tree
8497 fold_builtin_0 (location_t loc, tree fndecl)
8498 {
8499 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8500 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8501 switch (fcode)
8502 {
8503 case BUILT_IN_FILE:
8504 return fold_builtin_FILE (loc);
8505
8506 case BUILT_IN_FUNCTION:
8507 return fold_builtin_FUNCTION ();
8508
8509 case BUILT_IN_LINE:
8510 return fold_builtin_LINE (loc, type);
8511
8512 CASE_FLT_FN (BUILT_IN_INF):
8513 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8514 case BUILT_IN_INFD32:
8515 case BUILT_IN_INFD64:
8516 case BUILT_IN_INFD128:
8517 return fold_builtin_inf (loc, type, true);
8518
8519 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8520 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8521 return fold_builtin_inf (loc, type, false);
8522
8523 case BUILT_IN_CLASSIFY_TYPE:
8524 return fold_builtin_classify_type (NULL_TREE);
8525
8526 default:
8527 break;
8528 }
8529 return NULL_TREE;
8530 }
8531
8532 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8533 This function returns NULL_TREE if no simplification was possible. */
8534
8535 static tree
8536 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8537 {
8538 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8539 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8540
8541 if (TREE_CODE (arg0) == ERROR_MARK)
8542 return NULL_TREE;
8543
8544 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8545 return ret;
8546
8547 switch (fcode)
8548 {
8549 case BUILT_IN_CONSTANT_P:
8550 {
8551 tree val = fold_builtin_constant_p (arg0);
8552
8553 /* Gimplification will pull the CALL_EXPR for the builtin out of
8554 an if condition. When not optimizing, we'll not CSE it back.
8555 To avoid link error types of regressions, return false now. */
8556 if (!val && !optimize)
8557 val = integer_zero_node;
8558
8559 return val;
8560 }
8561
8562 case BUILT_IN_CLASSIFY_TYPE:
8563 return fold_builtin_classify_type (arg0);
8564
8565 case BUILT_IN_STRLEN:
8566 return fold_builtin_strlen (loc, type, arg0);
8567
8568 CASE_FLT_FN (BUILT_IN_FABS):
8569 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8570 case BUILT_IN_FABSD32:
8571 case BUILT_IN_FABSD64:
8572 case BUILT_IN_FABSD128:
8573 return fold_builtin_fabs (loc, arg0, type);
8574
8575 case BUILT_IN_ABS:
8576 case BUILT_IN_LABS:
8577 case BUILT_IN_LLABS:
8578 case BUILT_IN_IMAXABS:
8579 return fold_builtin_abs (loc, arg0, type);
8580
8581 CASE_FLT_FN (BUILT_IN_CONJ):
8582 if (validate_arg (arg0, COMPLEX_TYPE)
8583 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8584 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8585 break;
8586
8587 CASE_FLT_FN (BUILT_IN_CREAL):
8588 if (validate_arg (arg0, COMPLEX_TYPE)
8589 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8590 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8591 break;
8592
8593 CASE_FLT_FN (BUILT_IN_CIMAG):
8594 if (validate_arg (arg0, COMPLEX_TYPE)
8595 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8596 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8597 break;
8598
8599 CASE_FLT_FN (BUILT_IN_CARG):
8600 return fold_builtin_carg (loc, arg0, type);
8601
8602 case BUILT_IN_ISASCII:
8603 return fold_builtin_isascii (loc, arg0);
8604
8605 case BUILT_IN_TOASCII:
8606 return fold_builtin_toascii (loc, arg0);
8607
8608 case BUILT_IN_ISDIGIT:
8609 return fold_builtin_isdigit (loc, arg0);
8610
8611 case BUILT_IN_ISINF_SIGN:
8612 return fold_builtin_classify (loc, arg0, BUILT_IN_ISINF_SIGN);
8613
8614 case BUILT_IN_FREE:
8615 if (integer_zerop (arg0))
8616 return build_empty_stmt (loc);
8617 break;
8618
8619 default:
8620 break;
8621 }
8622
8623 return NULL_TREE;
8624
8625 }
8626
8627 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8628 This function returns NULL_TREE if no simplification was possible. */
8629
8630 static tree
8631 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8632 {
8633 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8634 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8635
8636 if (TREE_CODE (arg0) == ERROR_MARK
8637 || TREE_CODE (arg1) == ERROR_MARK)
8638 return NULL_TREE;
8639
8640 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8641 return ret;
8642
8643 switch (fcode)
8644 {
8645 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8646 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8647 if (validate_arg (arg0, REAL_TYPE)
8648 && validate_arg (arg1, POINTER_TYPE))
8649 return do_mpfr_lgamma_r (arg0, arg1, type);
8650 break;
8651
8652 CASE_FLT_FN (BUILT_IN_FREXP):
8653 return fold_builtin_frexp (loc, arg0, arg1, type);
8654
8655 CASE_FLT_FN (BUILT_IN_MODF):
8656 return fold_builtin_modf (loc, arg0, arg1, type);
8657
8658 case BUILT_IN_STRSPN:
8659 return fold_builtin_strspn (loc, arg0, arg1);
8660
8661 case BUILT_IN_STRCSPN:
8662 return fold_builtin_strcspn (loc, arg0, arg1);
8663
8664 case BUILT_IN_STRPBRK:
8665 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8666
8667 case BUILT_IN_EXPECT:
8668 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8669
8670 case BUILT_IN_ISGREATER:
8671 return fold_builtin_unordered_cmp (loc, fndecl,
8672 arg0, arg1, UNLE_EXPR, LE_EXPR);
8673 case BUILT_IN_ISGREATEREQUAL:
8674 return fold_builtin_unordered_cmp (loc, fndecl,
8675 arg0, arg1, UNLT_EXPR, LT_EXPR);
8676 case BUILT_IN_ISLESS:
8677 return fold_builtin_unordered_cmp (loc, fndecl,
8678 arg0, arg1, UNGE_EXPR, GE_EXPR);
8679 case BUILT_IN_ISLESSEQUAL:
8680 return fold_builtin_unordered_cmp (loc, fndecl,
8681 arg0, arg1, UNGT_EXPR, GT_EXPR);
8682 case BUILT_IN_ISLESSGREATER:
8683 return fold_builtin_unordered_cmp (loc, fndecl,
8684 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8685 case BUILT_IN_ISUNORDERED:
8686 return fold_builtin_unordered_cmp (loc, fndecl,
8687 arg0, arg1, UNORDERED_EXPR,
8688 NOP_EXPR);
8689
8690 /* We do the folding for va_start in the expander. */
8691 case BUILT_IN_VA_START:
8692 break;
8693
8694 case BUILT_IN_OBJECT_SIZE:
8695 return fold_builtin_object_size (arg0, arg1);
8696
8697 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8698 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8699
8700 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8701 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8702
8703 default:
8704 break;
8705 }
8706 return NULL_TREE;
8707 }
8708
8709 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8710 and ARG2.
8711 This function returns NULL_TREE if no simplification was possible. */
8712
8713 static tree
8714 fold_builtin_3 (location_t loc, tree fndecl,
8715 tree arg0, tree arg1, tree arg2)
8716 {
8717 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8718 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8719
8720 if (TREE_CODE (arg0) == ERROR_MARK
8721 || TREE_CODE (arg1) == ERROR_MARK
8722 || TREE_CODE (arg2) == ERROR_MARK)
8723 return NULL_TREE;
8724
8725 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8726 arg0, arg1, arg2))
8727 return ret;
8728
8729 switch (fcode)
8730 {
8731
8732 CASE_FLT_FN (BUILT_IN_SINCOS):
8733 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8734
8735 CASE_FLT_FN (BUILT_IN_FMA):
8736 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8737
8738 CASE_FLT_FN (BUILT_IN_REMQUO):
8739 if (validate_arg (arg0, REAL_TYPE)
8740 && validate_arg (arg1, REAL_TYPE)
8741 && validate_arg (arg2, POINTER_TYPE))
8742 return do_mpfr_remquo (arg0, arg1, arg2);
8743 break;
8744
8745 case BUILT_IN_BCMP:
8746 case BUILT_IN_MEMCMP:
8747 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8748
8749 case BUILT_IN_EXPECT:
8750 return fold_builtin_expect (loc, arg0, arg1, arg2);
8751
8752 case BUILT_IN_ADD_OVERFLOW:
8753 case BUILT_IN_SUB_OVERFLOW:
8754 case BUILT_IN_MUL_OVERFLOW:
8755 case BUILT_IN_ADD_OVERFLOW_P:
8756 case BUILT_IN_SUB_OVERFLOW_P:
8757 case BUILT_IN_MUL_OVERFLOW_P:
8758 case BUILT_IN_SADD_OVERFLOW:
8759 case BUILT_IN_SADDL_OVERFLOW:
8760 case BUILT_IN_SADDLL_OVERFLOW:
8761 case BUILT_IN_SSUB_OVERFLOW:
8762 case BUILT_IN_SSUBL_OVERFLOW:
8763 case BUILT_IN_SSUBLL_OVERFLOW:
8764 case BUILT_IN_SMUL_OVERFLOW:
8765 case BUILT_IN_SMULL_OVERFLOW:
8766 case BUILT_IN_SMULLL_OVERFLOW:
8767 case BUILT_IN_UADD_OVERFLOW:
8768 case BUILT_IN_UADDL_OVERFLOW:
8769 case BUILT_IN_UADDLL_OVERFLOW:
8770 case BUILT_IN_USUB_OVERFLOW:
8771 case BUILT_IN_USUBL_OVERFLOW:
8772 case BUILT_IN_USUBLL_OVERFLOW:
8773 case BUILT_IN_UMUL_OVERFLOW:
8774 case BUILT_IN_UMULL_OVERFLOW:
8775 case BUILT_IN_UMULLL_OVERFLOW:
8776 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8777
8778 default:
8779 break;
8780 }
8781 return NULL_TREE;
8782 }
8783
8784 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8785 arguments. IGNORE is true if the result of the
8786 function call is ignored. This function returns NULL_TREE if no
8787 simplification was possible. */
8788
8789 tree
8790 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8791 {
8792 tree ret = NULL_TREE;
8793
8794 switch (nargs)
8795 {
8796 case 0:
8797 ret = fold_builtin_0 (loc, fndecl);
8798 break;
8799 case 1:
8800 ret = fold_builtin_1 (loc, fndecl, args[0]);
8801 break;
8802 case 2:
8803 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8804 break;
8805 case 3:
8806 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8807 break;
8808 default:
8809 break;
8810 }
8811 if (ret)
8812 {
8813 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8814 SET_EXPR_LOCATION (ret, loc);
8815 TREE_NO_WARNING (ret) = 1;
8816 return ret;
8817 }
8818 return NULL_TREE;
8819 }
8820
8821 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8822 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8823 of arguments in ARGS to be omitted. OLDNARGS is the number of
8824 elements in ARGS. */
8825
8826 static tree
8827 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8828 int skip, tree fndecl, int n, va_list newargs)
8829 {
8830 int nargs = oldnargs - skip + n;
8831 tree *buffer;
8832
8833 if (n > 0)
8834 {
8835 int i, j;
8836
8837 buffer = XALLOCAVEC (tree, nargs);
8838 for (i = 0; i < n; i++)
8839 buffer[i] = va_arg (newargs, tree);
8840 for (j = skip; j < oldnargs; j++, i++)
8841 buffer[i] = args[j];
8842 }
8843 else
8844 buffer = args + skip;
8845
8846 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8847 }
8848
8849 /* Return true if FNDECL shouldn't be folded right now.
8850 If a built-in function has an inline attribute always_inline
8851 wrapper, defer folding it after always_inline functions have
8852 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8853 might not be performed. */
8854
8855 bool
8856 avoid_folding_inline_builtin (tree fndecl)
8857 {
8858 return (DECL_DECLARED_INLINE_P (fndecl)
8859 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8860 && cfun
8861 && !cfun->always_inline_functions_inlined
8862 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8863 }
8864
8865 /* A wrapper function for builtin folding that prevents warnings for
8866 "statement without effect" and the like, caused by removing the
8867 call node earlier than the warning is generated. */
8868
8869 tree
8870 fold_call_expr (location_t loc, tree exp, bool ignore)
8871 {
8872 tree ret = NULL_TREE;
8873 tree fndecl = get_callee_fndecl (exp);
8874 if (fndecl
8875 && TREE_CODE (fndecl) == FUNCTION_DECL
8876 && DECL_BUILT_IN (fndecl)
8877 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8878 yet. Defer folding until we see all the arguments
8879 (after inlining). */
8880 && !CALL_EXPR_VA_ARG_PACK (exp))
8881 {
8882 int nargs = call_expr_nargs (exp);
8883
8884 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8885 instead last argument is __builtin_va_arg_pack (). Defer folding
8886 even in that case, until arguments are finalized. */
8887 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8888 {
8889 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8890 if (fndecl2
8891 && TREE_CODE (fndecl2) == FUNCTION_DECL
8892 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8893 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8894 return NULL_TREE;
8895 }
8896
8897 if (avoid_folding_inline_builtin (fndecl))
8898 return NULL_TREE;
8899
8900 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8901 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8902 CALL_EXPR_ARGP (exp), ignore);
8903 else
8904 {
8905 tree *args = CALL_EXPR_ARGP (exp);
8906 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8907 if (ret)
8908 return ret;
8909 }
8910 }
8911 return NULL_TREE;
8912 }
8913
8914 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8915 N arguments are passed in the array ARGARRAY. Return a folded
8916 expression or NULL_TREE if no simplification was possible. */
8917
8918 tree
8919 fold_builtin_call_array (location_t loc, tree,
8920 tree fn,
8921 int n,
8922 tree *argarray)
8923 {
8924 if (TREE_CODE (fn) != ADDR_EXPR)
8925 return NULL_TREE;
8926
8927 tree fndecl = TREE_OPERAND (fn, 0);
8928 if (TREE_CODE (fndecl) == FUNCTION_DECL
8929 && DECL_BUILT_IN (fndecl))
8930 {
8931 /* If last argument is __builtin_va_arg_pack (), arguments to this
8932 function are not finalized yet. Defer folding until they are. */
8933 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8934 {
8935 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8936 if (fndecl2
8937 && TREE_CODE (fndecl2) == FUNCTION_DECL
8938 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8939 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8940 return NULL_TREE;
8941 }
8942 if (avoid_folding_inline_builtin (fndecl))
8943 return NULL_TREE;
8944 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8945 return targetm.fold_builtin (fndecl, n, argarray, false);
8946 else
8947 return fold_builtin_n (loc, fndecl, argarray, n, false);
8948 }
8949
8950 return NULL_TREE;
8951 }
8952
8953 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8954 along with N new arguments specified as the "..." parameters. SKIP
8955 is the number of arguments in EXP to be omitted. This function is used
8956 to do varargs-to-varargs transformations. */
8957
8958 static tree
8959 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8960 {
8961 va_list ap;
8962 tree t;
8963
8964 va_start (ap, n);
8965 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8966 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8967 va_end (ap);
8968
8969 return t;
8970 }
8971
8972 /* Validate a single argument ARG against a tree code CODE representing
8973 a type. Return true when argument is valid. */
8974
8975 static bool
8976 validate_arg (const_tree arg, enum tree_code code)
8977 {
8978 if (!arg)
8979 return false;
8980 else if (code == POINTER_TYPE)
8981 return POINTER_TYPE_P (TREE_TYPE (arg));
8982 else if (code == INTEGER_TYPE)
8983 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8984 return code == TREE_CODE (TREE_TYPE (arg));
8985 }
8986
8987 /* This function validates the types of a function call argument list
8988 against a specified list of tree_codes. If the last specifier is a 0,
8989 that represents an ellipses, otherwise the last specifier must be a
8990 VOID_TYPE.
8991
8992 This is the GIMPLE version of validate_arglist. Eventually we want to
8993 completely convert builtins.c to work from GIMPLEs and the tree based
8994 validate_arglist will then be removed. */
8995
8996 bool
8997 validate_gimple_arglist (const gcall *call, ...)
8998 {
8999 enum tree_code code;
9000 bool res = 0;
9001 va_list ap;
9002 const_tree arg;
9003 size_t i;
9004
9005 va_start (ap, call);
9006 i = 0;
9007
9008 do
9009 {
9010 code = (enum tree_code) va_arg (ap, int);
9011 switch (code)
9012 {
9013 case 0:
9014 /* This signifies an ellipses, any further arguments are all ok. */
9015 res = true;
9016 goto end;
9017 case VOID_TYPE:
9018 /* This signifies an endlink, if no arguments remain, return
9019 true, otherwise return false. */
9020 res = (i == gimple_call_num_args (call));
9021 goto end;
9022 default:
9023 /* If no parameters remain or the parameter's code does not
9024 match the specified code, return false. Otherwise continue
9025 checking any remaining arguments. */
9026 arg = gimple_call_arg (call, i++);
9027 if (!validate_arg (arg, code))
9028 goto end;
9029 break;
9030 }
9031 }
9032 while (1);
9033
9034 /* We need gotos here since we can only have one VA_CLOSE in a
9035 function. */
9036 end: ;
9037 va_end (ap);
9038
9039 return res;
9040 }
9041
9042 /* Default target-specific builtin expander that does nothing. */
9043
9044 rtx
9045 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9046 rtx target ATTRIBUTE_UNUSED,
9047 rtx subtarget ATTRIBUTE_UNUSED,
9048 machine_mode mode ATTRIBUTE_UNUSED,
9049 int ignore ATTRIBUTE_UNUSED)
9050 {
9051 return NULL_RTX;
9052 }
9053
9054 /* Returns true is EXP represents data that would potentially reside
9055 in a readonly section. */
9056
9057 bool
9058 readonly_data_expr (tree exp)
9059 {
9060 STRIP_NOPS (exp);
9061
9062 if (TREE_CODE (exp) != ADDR_EXPR)
9063 return false;
9064
9065 exp = get_base_address (TREE_OPERAND (exp, 0));
9066 if (!exp)
9067 return false;
9068
9069 /* Make sure we call decl_readonly_section only for trees it
9070 can handle (since it returns true for everything it doesn't
9071 understand). */
9072 if (TREE_CODE (exp) == STRING_CST
9073 || TREE_CODE (exp) == CONSTRUCTOR
9074 || (VAR_P (exp) && TREE_STATIC (exp)))
9075 return decl_readonly_section (exp, 0);
9076 else
9077 return false;
9078 }
9079
9080 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9081 to the call, and TYPE is its return type.
9082
9083 Return NULL_TREE if no simplification was possible, otherwise return the
9084 simplified form of the call as a tree.
9085
9086 The simplified form may be a constant or other expression which
9087 computes the same value, but in a more efficient manner (including
9088 calls to other builtin functions).
9089
9090 The call may contain arguments which need to be evaluated, but
9091 which are not useful to determine the result of the call. In
9092 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9093 COMPOUND_EXPR will be an argument which must be evaluated.
9094 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9095 COMPOUND_EXPR in the chain will contain the tree for the simplified
9096 form of the builtin function call. */
9097
9098 static tree
9099 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9100 {
9101 if (!validate_arg (s1, POINTER_TYPE)
9102 || !validate_arg (s2, POINTER_TYPE))
9103 return NULL_TREE;
9104 else
9105 {
9106 tree fn;
9107 const char *p1, *p2;
9108
9109 p2 = c_getstr (s2);
9110 if (p2 == NULL)
9111 return NULL_TREE;
9112
9113 p1 = c_getstr (s1);
9114 if (p1 != NULL)
9115 {
9116 const char *r = strpbrk (p1, p2);
9117 tree tem;
9118
9119 if (r == NULL)
9120 return build_int_cst (TREE_TYPE (s1), 0);
9121
9122 /* Return an offset into the constant string argument. */
9123 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9124 return fold_convert_loc (loc, type, tem);
9125 }
9126
9127 if (p2[0] == '\0')
9128 /* strpbrk(x, "") == NULL.
9129 Evaluate and ignore s1 in case it had side-effects. */
9130 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9131
9132 if (p2[1] != '\0')
9133 return NULL_TREE; /* Really call strpbrk. */
9134
9135 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9136 if (!fn)
9137 return NULL_TREE;
9138
9139 /* New argument list transforming strpbrk(s1, s2) to
9140 strchr(s1, s2[0]). */
9141 return build_call_expr_loc (loc, fn, 2, s1,
9142 build_int_cst (integer_type_node, p2[0]));
9143 }
9144 }
9145
9146 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9147 to the call.
9148
9149 Return NULL_TREE if no simplification was possible, otherwise return the
9150 simplified form of the call as a tree.
9151
9152 The simplified form may be a constant or other expression which
9153 computes the same value, but in a more efficient manner (including
9154 calls to other builtin functions).
9155
9156 The call may contain arguments which need to be evaluated, but
9157 which are not useful to determine the result of the call. In
9158 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9159 COMPOUND_EXPR will be an argument which must be evaluated.
9160 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9161 COMPOUND_EXPR in the chain will contain the tree for the simplified
9162 form of the builtin function call. */
9163
9164 static tree
9165 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9166 {
9167 if (!validate_arg (s1, POINTER_TYPE)
9168 || !validate_arg (s2, POINTER_TYPE))
9169 return NULL_TREE;
9170 else
9171 {
9172 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9173
9174 /* If either argument is "", return NULL_TREE. */
9175 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9176 /* Evaluate and ignore both arguments in case either one has
9177 side-effects. */
9178 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9179 s1, s2);
9180 return NULL_TREE;
9181 }
9182 }
9183
9184 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9185 to the call.
9186
9187 Return NULL_TREE if no simplification was possible, otherwise return the
9188 simplified form of the call as a tree.
9189
9190 The simplified form may be a constant or other expression which
9191 computes the same value, but in a more efficient manner (including
9192 calls to other builtin functions).
9193
9194 The call may contain arguments which need to be evaluated, but
9195 which are not useful to determine the result of the call. In
9196 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9197 COMPOUND_EXPR will be an argument which must be evaluated.
9198 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9199 COMPOUND_EXPR in the chain will contain the tree for the simplified
9200 form of the builtin function call. */
9201
9202 static tree
9203 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9204 {
9205 if (!validate_arg (s1, POINTER_TYPE)
9206 || !validate_arg (s2, POINTER_TYPE))
9207 return NULL_TREE;
9208 else
9209 {
9210 /* If the first argument is "", return NULL_TREE. */
9211 const char *p1 = c_getstr (s1);
9212 if (p1 && *p1 == '\0')
9213 {
9214 /* Evaluate and ignore argument s2 in case it has
9215 side-effects. */
9216 return omit_one_operand_loc (loc, size_type_node,
9217 size_zero_node, s2);
9218 }
9219
9220 /* If the second argument is "", return __builtin_strlen(s1). */
9221 const char *p2 = c_getstr (s2);
9222 if (p2 && *p2 == '\0')
9223 {
9224 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9225
9226 /* If the replacement _DECL isn't initialized, don't do the
9227 transformation. */
9228 if (!fn)
9229 return NULL_TREE;
9230
9231 return build_call_expr_loc (loc, fn, 1, s1);
9232 }
9233 return NULL_TREE;
9234 }
9235 }
9236
9237 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9238 produced. False otherwise. This is done so that we don't output the error
9239 or warning twice or three times. */
9240
9241 bool
9242 fold_builtin_next_arg (tree exp, bool va_start_p)
9243 {
9244 tree fntype = TREE_TYPE (current_function_decl);
9245 int nargs = call_expr_nargs (exp);
9246 tree arg;
9247 /* There is good chance the current input_location points inside the
9248 definition of the va_start macro (perhaps on the token for
9249 builtin) in a system header, so warnings will not be emitted.
9250 Use the location in real source code. */
9251 source_location current_location =
9252 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9253 NULL);
9254
9255 if (!stdarg_p (fntype))
9256 {
9257 error ("%<va_start%> used in function with fixed args");
9258 return true;
9259 }
9260
9261 if (va_start_p)
9262 {
9263 if (va_start_p && (nargs != 2))
9264 {
9265 error ("wrong number of arguments to function %<va_start%>");
9266 return true;
9267 }
9268 arg = CALL_EXPR_ARG (exp, 1);
9269 }
9270 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9271 when we checked the arguments and if needed issued a warning. */
9272 else
9273 {
9274 if (nargs == 0)
9275 {
9276 /* Evidently an out of date version of <stdarg.h>; can't validate
9277 va_start's second argument, but can still work as intended. */
9278 warning_at (current_location,
9279 OPT_Wvarargs,
9280 "%<__builtin_next_arg%> called without an argument");
9281 return true;
9282 }
9283 else if (nargs > 1)
9284 {
9285 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9286 return true;
9287 }
9288 arg = CALL_EXPR_ARG (exp, 0);
9289 }
9290
9291 if (TREE_CODE (arg) == SSA_NAME)
9292 arg = SSA_NAME_VAR (arg);
9293
9294 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9295 or __builtin_next_arg (0) the first time we see it, after checking
9296 the arguments and if needed issuing a warning. */
9297 if (!integer_zerop (arg))
9298 {
9299 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9300
9301 /* Strip off all nops for the sake of the comparison. This
9302 is not quite the same as STRIP_NOPS. It does more.
9303 We must also strip off INDIRECT_EXPR for C++ reference
9304 parameters. */
9305 while (CONVERT_EXPR_P (arg)
9306 || TREE_CODE (arg) == INDIRECT_REF)
9307 arg = TREE_OPERAND (arg, 0);
9308 if (arg != last_parm)
9309 {
9310 /* FIXME: Sometimes with the tree optimizers we can get the
9311 not the last argument even though the user used the last
9312 argument. We just warn and set the arg to be the last
9313 argument so that we will get wrong-code because of
9314 it. */
9315 warning_at (current_location,
9316 OPT_Wvarargs,
9317 "second parameter of %<va_start%> not last named argument");
9318 }
9319
9320 /* Undefined by C99 7.15.1.4p4 (va_start):
9321 "If the parameter parmN is declared with the register storage
9322 class, with a function or array type, or with a type that is
9323 not compatible with the type that results after application of
9324 the default argument promotions, the behavior is undefined."
9325 */
9326 else if (DECL_REGISTER (arg))
9327 {
9328 warning_at (current_location,
9329 OPT_Wvarargs,
9330 "undefined behavior when second parameter of "
9331 "%<va_start%> is declared with %<register%> storage");
9332 }
9333
9334 /* We want to verify the second parameter just once before the tree
9335 optimizers are run and then avoid keeping it in the tree,
9336 as otherwise we could warn even for correct code like:
9337 void foo (int i, ...)
9338 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9339 if (va_start_p)
9340 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9341 else
9342 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9343 }
9344 return false;
9345 }
9346
9347
9348 /* Expand a call EXP to __builtin_object_size. */
9349
9350 static rtx
9351 expand_builtin_object_size (tree exp)
9352 {
9353 tree ost;
9354 int object_size_type;
9355 tree fndecl = get_callee_fndecl (exp);
9356
9357 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9358 {
9359 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9360 exp, fndecl);
9361 expand_builtin_trap ();
9362 return const0_rtx;
9363 }
9364
9365 ost = CALL_EXPR_ARG (exp, 1);
9366 STRIP_NOPS (ost);
9367
9368 if (TREE_CODE (ost) != INTEGER_CST
9369 || tree_int_cst_sgn (ost) < 0
9370 || compare_tree_int (ost, 3) > 0)
9371 {
9372 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9373 exp, fndecl);
9374 expand_builtin_trap ();
9375 return const0_rtx;
9376 }
9377
9378 object_size_type = tree_to_shwi (ost);
9379
9380 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9381 }
9382
9383 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9384 FCODE is the BUILT_IN_* to use.
9385 Return NULL_RTX if we failed; the caller should emit a normal call,
9386 otherwise try to get the result in TARGET, if convenient (and in
9387 mode MODE if that's convenient). */
9388
9389 static rtx
9390 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9391 enum built_in_function fcode)
9392 {
9393 tree dest, src, len, size;
9394
9395 if (!validate_arglist (exp,
9396 POINTER_TYPE,
9397 fcode == BUILT_IN_MEMSET_CHK
9398 ? INTEGER_TYPE : POINTER_TYPE,
9399 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9400 return NULL_RTX;
9401
9402 dest = CALL_EXPR_ARG (exp, 0);
9403 src = CALL_EXPR_ARG (exp, 1);
9404 len = CALL_EXPR_ARG (exp, 2);
9405 size = CALL_EXPR_ARG (exp, 3);
9406
9407 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9408 exp, len, /*maxlen=*/NULL_TREE,
9409 /*str=*/NULL_TREE, size);
9410
9411 if (!tree_fits_uhwi_p (size))
9412 return NULL_RTX;
9413
9414 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9415 {
9416 /* Avoid transforming the checking call to an ordinary one when
9417 an overflow has been detected or when the call couldn't be
9418 validated because the size is not constant. */
9419 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9420 return NULL_RTX;
9421
9422 tree fn = NULL_TREE;
9423 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9424 mem{cpy,pcpy,move,set} is available. */
9425 switch (fcode)
9426 {
9427 case BUILT_IN_MEMCPY_CHK:
9428 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9429 break;
9430 case BUILT_IN_MEMPCPY_CHK:
9431 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9432 break;
9433 case BUILT_IN_MEMMOVE_CHK:
9434 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9435 break;
9436 case BUILT_IN_MEMSET_CHK:
9437 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9438 break;
9439 default:
9440 break;
9441 }
9442
9443 if (! fn)
9444 return NULL_RTX;
9445
9446 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9447 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9448 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9449 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9450 }
9451 else if (fcode == BUILT_IN_MEMSET_CHK)
9452 return NULL_RTX;
9453 else
9454 {
9455 unsigned int dest_align = get_pointer_alignment (dest);
9456
9457 /* If DEST is not a pointer type, call the normal function. */
9458 if (dest_align == 0)
9459 return NULL_RTX;
9460
9461 /* If SRC and DEST are the same (and not volatile), do nothing. */
9462 if (operand_equal_p (src, dest, 0))
9463 {
9464 tree expr;
9465
9466 if (fcode != BUILT_IN_MEMPCPY_CHK)
9467 {
9468 /* Evaluate and ignore LEN in case it has side-effects. */
9469 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9470 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9471 }
9472
9473 expr = fold_build_pointer_plus (dest, len);
9474 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9475 }
9476
9477 /* __memmove_chk special case. */
9478 if (fcode == BUILT_IN_MEMMOVE_CHK)
9479 {
9480 unsigned int src_align = get_pointer_alignment (src);
9481
9482 if (src_align == 0)
9483 return NULL_RTX;
9484
9485 /* If src is categorized for a readonly section we can use
9486 normal __memcpy_chk. */
9487 if (readonly_data_expr (src))
9488 {
9489 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9490 if (!fn)
9491 return NULL_RTX;
9492 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9493 dest, src, len, size);
9494 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9495 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9496 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9497 }
9498 }
9499 return NULL_RTX;
9500 }
9501 }
9502
9503 /* Emit warning if a buffer overflow is detected at compile time. */
9504
9505 static void
9506 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9507 {
9508 /* The source string. */
9509 tree srcstr = NULL_TREE;
9510 /* The size of the destination object. */
9511 tree objsize = NULL_TREE;
9512 /* The string that is being concatenated with (as in __strcat_chk)
9513 or null if it isn't. */
9514 tree catstr = NULL_TREE;
9515 /* The maximum length of the source sequence in a bounded operation
9516 (such as __strncat_chk) or null if the operation isn't bounded
9517 (such as __strcat_chk). */
9518 tree maxlen = NULL_TREE;
9519
9520 switch (fcode)
9521 {
9522 case BUILT_IN_STRCPY_CHK:
9523 case BUILT_IN_STPCPY_CHK:
9524 srcstr = CALL_EXPR_ARG (exp, 1);
9525 objsize = CALL_EXPR_ARG (exp, 2);
9526 break;
9527
9528 case BUILT_IN_STRCAT_CHK:
9529 /* For __strcat_chk the warning will be emitted only if overflowing
9530 by at least strlen (dest) + 1 bytes. */
9531 catstr = CALL_EXPR_ARG (exp, 0);
9532 srcstr = CALL_EXPR_ARG (exp, 1);
9533 objsize = CALL_EXPR_ARG (exp, 2);
9534 break;
9535
9536 case BUILT_IN_STRNCAT_CHK:
9537 catstr = CALL_EXPR_ARG (exp, 0);
9538 srcstr = CALL_EXPR_ARG (exp, 1);
9539 maxlen = CALL_EXPR_ARG (exp, 2);
9540 objsize = CALL_EXPR_ARG (exp, 3);
9541 break;
9542
9543 case BUILT_IN_STRNCPY_CHK:
9544 case BUILT_IN_STPNCPY_CHK:
9545 srcstr = CALL_EXPR_ARG (exp, 1);
9546 maxlen = CALL_EXPR_ARG (exp, 2);
9547 objsize = CALL_EXPR_ARG (exp, 3);
9548 break;
9549
9550 case BUILT_IN_SNPRINTF_CHK:
9551 case BUILT_IN_VSNPRINTF_CHK:
9552 maxlen = CALL_EXPR_ARG (exp, 1);
9553 objsize = CALL_EXPR_ARG (exp, 3);
9554 break;
9555 default:
9556 gcc_unreachable ();
9557 }
9558
9559 if (catstr && maxlen)
9560 {
9561 /* Check __strncat_chk. There is no way to determine the length
9562 of the string to which the source string is being appended so
9563 just warn when the length of the source string is not known. */
9564 check_strncat_sizes (exp, objsize);
9565 return;
9566 }
9567
9568 check_sizes (OPT_Wstringop_overflow_, exp,
9569 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9570 }
9571
9572 /* Emit warning if a buffer overflow is detected at compile time
9573 in __sprintf_chk/__vsprintf_chk calls. */
9574
9575 static void
9576 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9577 {
9578 tree size, len, fmt;
9579 const char *fmt_str;
9580 int nargs = call_expr_nargs (exp);
9581
9582 /* Verify the required arguments in the original call. */
9583
9584 if (nargs < 4)
9585 return;
9586 size = CALL_EXPR_ARG (exp, 2);
9587 fmt = CALL_EXPR_ARG (exp, 3);
9588
9589 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9590 return;
9591
9592 /* Check whether the format is a literal string constant. */
9593 fmt_str = c_getstr (fmt);
9594 if (fmt_str == NULL)
9595 return;
9596
9597 if (!init_target_chars ())
9598 return;
9599
9600 /* If the format doesn't contain % args or %%, we know its size. */
9601 if (strchr (fmt_str, target_percent) == 0)
9602 len = build_int_cstu (size_type_node, strlen (fmt_str));
9603 /* If the format is "%s" and first ... argument is a string literal,
9604 we know it too. */
9605 else if (fcode == BUILT_IN_SPRINTF_CHK
9606 && strcmp (fmt_str, target_percent_s) == 0)
9607 {
9608 tree arg;
9609
9610 if (nargs < 5)
9611 return;
9612 arg = CALL_EXPR_ARG (exp, 4);
9613 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9614 return;
9615
9616 len = c_strlen (arg, 1);
9617 if (!len || ! tree_fits_uhwi_p (len))
9618 return;
9619 }
9620 else
9621 return;
9622
9623 /* Add one for the terminating nul. */
9624 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9625 check_sizes (OPT_Wstringop_overflow_,
9626 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9627 }
9628
9629 /* Emit warning if a free is called with address of a variable. */
9630
9631 static void
9632 maybe_emit_free_warning (tree exp)
9633 {
9634 tree arg = CALL_EXPR_ARG (exp, 0);
9635
9636 STRIP_NOPS (arg);
9637 if (TREE_CODE (arg) != ADDR_EXPR)
9638 return;
9639
9640 arg = get_base_address (TREE_OPERAND (arg, 0));
9641 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9642 return;
9643
9644 if (SSA_VAR_P (arg))
9645 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9646 "%Kattempt to free a non-heap object %qD", exp, arg);
9647 else
9648 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9649 "%Kattempt to free a non-heap object", exp);
9650 }
9651
9652 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9653 if possible. */
9654
9655 static tree
9656 fold_builtin_object_size (tree ptr, tree ost)
9657 {
9658 unsigned HOST_WIDE_INT bytes;
9659 int object_size_type;
9660
9661 if (!validate_arg (ptr, POINTER_TYPE)
9662 || !validate_arg (ost, INTEGER_TYPE))
9663 return NULL_TREE;
9664
9665 STRIP_NOPS (ost);
9666
9667 if (TREE_CODE (ost) != INTEGER_CST
9668 || tree_int_cst_sgn (ost) < 0
9669 || compare_tree_int (ost, 3) > 0)
9670 return NULL_TREE;
9671
9672 object_size_type = tree_to_shwi (ost);
9673
9674 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9675 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9676 and (size_t) 0 for types 2 and 3. */
9677 if (TREE_SIDE_EFFECTS (ptr))
9678 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9679
9680 if (TREE_CODE (ptr) == ADDR_EXPR)
9681 {
9682 compute_builtin_object_size (ptr, object_size_type, &bytes);
9683 if (wi::fits_to_tree_p (bytes, size_type_node))
9684 return build_int_cstu (size_type_node, bytes);
9685 }
9686 else if (TREE_CODE (ptr) == SSA_NAME)
9687 {
9688 /* If object size is not known yet, delay folding until
9689 later. Maybe subsequent passes will help determining
9690 it. */
9691 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9692 && wi::fits_to_tree_p (bytes, size_type_node))
9693 return build_int_cstu (size_type_node, bytes);
9694 }
9695
9696 return NULL_TREE;
9697 }
9698
9699 /* Initialize format string characters in the target charset. */
9700
9701 bool
9702 init_target_chars (void)
9703 {
9704 static bool init;
9705 if (!init)
9706 {
9707 target_newline = lang_hooks.to_target_charset ('\n');
9708 target_percent = lang_hooks.to_target_charset ('%');
9709 target_c = lang_hooks.to_target_charset ('c');
9710 target_s = lang_hooks.to_target_charset ('s');
9711 if (target_newline == 0 || target_percent == 0 || target_c == 0
9712 || target_s == 0)
9713 return false;
9714
9715 target_percent_c[0] = target_percent;
9716 target_percent_c[1] = target_c;
9717 target_percent_c[2] = '\0';
9718
9719 target_percent_s[0] = target_percent;
9720 target_percent_s[1] = target_s;
9721 target_percent_s[2] = '\0';
9722
9723 target_percent_s_newline[0] = target_percent;
9724 target_percent_s_newline[1] = target_s;
9725 target_percent_s_newline[2] = target_newline;
9726 target_percent_s_newline[3] = '\0';
9727
9728 init = true;
9729 }
9730 return true;
9731 }
9732
9733 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9734 and no overflow/underflow occurred. INEXACT is true if M was not
9735 exactly calculated. TYPE is the tree type for the result. This
9736 function assumes that you cleared the MPFR flags and then
9737 calculated M to see if anything subsequently set a flag prior to
9738 entering this function. Return NULL_TREE if any checks fail. */
9739
9740 static tree
9741 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9742 {
9743 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9744 overflow/underflow occurred. If -frounding-math, proceed iff the
9745 result of calling FUNC was exact. */
9746 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9747 && (!flag_rounding_math || !inexact))
9748 {
9749 REAL_VALUE_TYPE rr;
9750
9751 real_from_mpfr (&rr, m, type, GMP_RNDN);
9752 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9753 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9754 but the mpft_t is not, then we underflowed in the
9755 conversion. */
9756 if (real_isfinite (&rr)
9757 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9758 {
9759 REAL_VALUE_TYPE rmode;
9760
9761 real_convert (&rmode, TYPE_MODE (type), &rr);
9762 /* Proceed iff the specified mode can hold the value. */
9763 if (real_identical (&rmode, &rr))
9764 return build_real (type, rmode);
9765 }
9766 }
9767 return NULL_TREE;
9768 }
9769
9770 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9771 number and no overflow/underflow occurred. INEXACT is true if M
9772 was not exactly calculated. TYPE is the tree type for the result.
9773 This function assumes that you cleared the MPFR flags and then
9774 calculated M to see if anything subsequently set a flag prior to
9775 entering this function. Return NULL_TREE if any checks fail, if
9776 FORCE_CONVERT is true, then bypass the checks. */
9777
9778 static tree
9779 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9780 {
9781 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9782 overflow/underflow occurred. If -frounding-math, proceed iff the
9783 result of calling FUNC was exact. */
9784 if (force_convert
9785 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9786 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9787 && (!flag_rounding_math || !inexact)))
9788 {
9789 REAL_VALUE_TYPE re, im;
9790
9791 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9792 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9793 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9794 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9795 but the mpft_t is not, then we underflowed in the
9796 conversion. */
9797 if (force_convert
9798 || (real_isfinite (&re) && real_isfinite (&im)
9799 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9800 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9801 {
9802 REAL_VALUE_TYPE re_mode, im_mode;
9803
9804 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9805 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9806 /* Proceed iff the specified mode can hold the value. */
9807 if (force_convert
9808 || (real_identical (&re_mode, &re)
9809 && real_identical (&im_mode, &im)))
9810 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9811 build_real (TREE_TYPE (type), im_mode));
9812 }
9813 }
9814 return NULL_TREE;
9815 }
9816
9817 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9818 the pointer *(ARG_QUO) and return the result. The type is taken
9819 from the type of ARG0 and is used for setting the precision of the
9820 calculation and results. */
9821
9822 static tree
9823 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9824 {
9825 tree const type = TREE_TYPE (arg0);
9826 tree result = NULL_TREE;
9827
9828 STRIP_NOPS (arg0);
9829 STRIP_NOPS (arg1);
9830
9831 /* To proceed, MPFR must exactly represent the target floating point
9832 format, which only happens when the target base equals two. */
9833 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9834 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9835 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9836 {
9837 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9838 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9839
9840 if (real_isfinite (ra0) && real_isfinite (ra1))
9841 {
9842 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9843 const int prec = fmt->p;
9844 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9845 tree result_rem;
9846 long integer_quo;
9847 mpfr_t m0, m1;
9848
9849 mpfr_inits2 (prec, m0, m1, NULL);
9850 mpfr_from_real (m0, ra0, GMP_RNDN);
9851 mpfr_from_real (m1, ra1, GMP_RNDN);
9852 mpfr_clear_flags ();
9853 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9854 /* Remquo is independent of the rounding mode, so pass
9855 inexact=0 to do_mpfr_ckconv(). */
9856 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9857 mpfr_clears (m0, m1, NULL);
9858 if (result_rem)
9859 {
9860 /* MPFR calculates quo in the host's long so it may
9861 return more bits in quo than the target int can hold
9862 if sizeof(host long) > sizeof(target int). This can
9863 happen even for native compilers in LP64 mode. In
9864 these cases, modulo the quo value with the largest
9865 number that the target int can hold while leaving one
9866 bit for the sign. */
9867 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9868 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9869
9870 /* Dereference the quo pointer argument. */
9871 arg_quo = build_fold_indirect_ref (arg_quo);
9872 /* Proceed iff a valid pointer type was passed in. */
9873 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9874 {
9875 /* Set the value. */
9876 tree result_quo
9877 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9878 build_int_cst (TREE_TYPE (arg_quo),
9879 integer_quo));
9880 TREE_SIDE_EFFECTS (result_quo) = 1;
9881 /* Combine the quo assignment with the rem. */
9882 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9883 result_quo, result_rem));
9884 }
9885 }
9886 }
9887 }
9888 return result;
9889 }
9890
9891 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9892 resulting value as a tree with type TYPE. The mpfr precision is
9893 set to the precision of TYPE. We assume that this mpfr function
9894 returns zero if the result could be calculated exactly within the
9895 requested precision. In addition, the integer pointer represented
9896 by ARG_SG will be dereferenced and set to the appropriate signgam
9897 (-1,1) value. */
9898
9899 static tree
9900 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9901 {
9902 tree result = NULL_TREE;
9903
9904 STRIP_NOPS (arg);
9905
9906 /* To proceed, MPFR must exactly represent the target floating point
9907 format, which only happens when the target base equals two. Also
9908 verify ARG is a constant and that ARG_SG is an int pointer. */
9909 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9910 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9911 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9912 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9913 {
9914 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9915
9916 /* In addition to NaN and Inf, the argument cannot be zero or a
9917 negative integer. */
9918 if (real_isfinite (ra)
9919 && ra->cl != rvc_zero
9920 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9921 {
9922 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9923 const int prec = fmt->p;
9924 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9925 int inexact, sg;
9926 mpfr_t m;
9927 tree result_lg;
9928
9929 mpfr_init2 (m, prec);
9930 mpfr_from_real (m, ra, GMP_RNDN);
9931 mpfr_clear_flags ();
9932 inexact = mpfr_lgamma (m, &sg, m, rnd);
9933 result_lg = do_mpfr_ckconv (m, type, inexact);
9934 mpfr_clear (m);
9935 if (result_lg)
9936 {
9937 tree result_sg;
9938
9939 /* Dereference the arg_sg pointer argument. */
9940 arg_sg = build_fold_indirect_ref (arg_sg);
9941 /* Assign the signgam value into *arg_sg. */
9942 result_sg = fold_build2 (MODIFY_EXPR,
9943 TREE_TYPE (arg_sg), arg_sg,
9944 build_int_cst (TREE_TYPE (arg_sg), sg));
9945 TREE_SIDE_EFFECTS (result_sg) = 1;
9946 /* Combine the signgam assignment with the lgamma result. */
9947 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9948 result_sg, result_lg));
9949 }
9950 }
9951 }
9952
9953 return result;
9954 }
9955
9956 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9957 mpc function FUNC on it and return the resulting value as a tree
9958 with type TYPE. The mpfr precision is set to the precision of
9959 TYPE. We assume that function FUNC returns zero if the result
9960 could be calculated exactly within the requested precision. If
9961 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9962 in the arguments and/or results. */
9963
9964 tree
9965 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9966 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9967 {
9968 tree result = NULL_TREE;
9969
9970 STRIP_NOPS (arg0);
9971 STRIP_NOPS (arg1);
9972
9973 /* To proceed, MPFR must exactly represent the target floating point
9974 format, which only happens when the target base equals two. */
9975 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9977 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9978 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9979 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9980 {
9981 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9982 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9983 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9984 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9985
9986 if (do_nonfinite
9987 || (real_isfinite (re0) && real_isfinite (im0)
9988 && real_isfinite (re1) && real_isfinite (im1)))
9989 {
9990 const struct real_format *const fmt =
9991 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9992 const int prec = fmt->p;
9993 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9994 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9995 int inexact;
9996 mpc_t m0, m1;
9997
9998 mpc_init2 (m0, prec);
9999 mpc_init2 (m1, prec);
10000 mpfr_from_real (mpc_realref (m0), re0, rnd);
10001 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10002 mpfr_from_real (mpc_realref (m1), re1, rnd);
10003 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10004 mpfr_clear_flags ();
10005 inexact = func (m0, m0, m1, crnd);
10006 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10007 mpc_clear (m0);
10008 mpc_clear (m1);
10009 }
10010 }
10011
10012 return result;
10013 }
10014
10015 /* A wrapper function for builtin folding that prevents warnings for
10016 "statement without effect" and the like, caused by removing the
10017 call node earlier than the warning is generated. */
10018
10019 tree
10020 fold_call_stmt (gcall *stmt, bool ignore)
10021 {
10022 tree ret = NULL_TREE;
10023 tree fndecl = gimple_call_fndecl (stmt);
10024 location_t loc = gimple_location (stmt);
10025 if (fndecl
10026 && TREE_CODE (fndecl) == FUNCTION_DECL
10027 && DECL_BUILT_IN (fndecl)
10028 && !gimple_call_va_arg_pack_p (stmt))
10029 {
10030 int nargs = gimple_call_num_args (stmt);
10031 tree *args = (nargs > 0
10032 ? gimple_call_arg_ptr (stmt, 0)
10033 : &error_mark_node);
10034
10035 if (avoid_folding_inline_builtin (fndecl))
10036 return NULL_TREE;
10037 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10038 {
10039 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10040 }
10041 else
10042 {
10043 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10044 if (ret)
10045 {
10046 /* Propagate location information from original call to
10047 expansion of builtin. Otherwise things like
10048 maybe_emit_chk_warning, that operate on the expansion
10049 of a builtin, will use the wrong location information. */
10050 if (gimple_has_location (stmt))
10051 {
10052 tree realret = ret;
10053 if (TREE_CODE (ret) == NOP_EXPR)
10054 realret = TREE_OPERAND (ret, 0);
10055 if (CAN_HAVE_LOCATION_P (realret)
10056 && !EXPR_HAS_LOCATION (realret))
10057 SET_EXPR_LOCATION (realret, loc);
10058 return realret;
10059 }
10060 return ret;
10061 }
10062 }
10063 }
10064 return NULL_TREE;
10065 }
10066
10067 /* Look up the function in builtin_decl that corresponds to DECL
10068 and set ASMSPEC as its user assembler name. DECL must be a
10069 function decl that declares a builtin. */
10070
10071 void
10072 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10073 {
10074 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10075 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10076 && asmspec != 0);
10077
10078 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10079 set_user_assembler_name (builtin, asmspec);
10080
10081 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10082 && INT_TYPE_SIZE < BITS_PER_WORD)
10083 {
10084 set_user_assembler_libfunc ("ffs", asmspec);
10085 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10086 "ffs");
10087 }
10088 }
10089
10090 /* Return true if DECL is a builtin that expands to a constant or similarly
10091 simple code. */
10092 bool
10093 is_simple_builtin (tree decl)
10094 {
10095 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10096 switch (DECL_FUNCTION_CODE (decl))
10097 {
10098 /* Builtins that expand to constants. */
10099 case BUILT_IN_CONSTANT_P:
10100 case BUILT_IN_EXPECT:
10101 case BUILT_IN_OBJECT_SIZE:
10102 case BUILT_IN_UNREACHABLE:
10103 /* Simple register moves or loads from stack. */
10104 case BUILT_IN_ASSUME_ALIGNED:
10105 case BUILT_IN_RETURN_ADDRESS:
10106 case BUILT_IN_EXTRACT_RETURN_ADDR:
10107 case BUILT_IN_FROB_RETURN_ADDR:
10108 case BUILT_IN_RETURN:
10109 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10110 case BUILT_IN_FRAME_ADDRESS:
10111 case BUILT_IN_VA_END:
10112 case BUILT_IN_STACK_SAVE:
10113 case BUILT_IN_STACK_RESTORE:
10114 /* Exception state returns or moves registers around. */
10115 case BUILT_IN_EH_FILTER:
10116 case BUILT_IN_EH_POINTER:
10117 case BUILT_IN_EH_COPY_VALUES:
10118 return true;
10119
10120 default:
10121 return false;
10122 }
10123
10124 return false;
10125 }
10126
10127 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10128 most probably expanded inline into reasonably simple code. This is a
10129 superset of is_simple_builtin. */
10130 bool
10131 is_inexpensive_builtin (tree decl)
10132 {
10133 if (!decl)
10134 return false;
10135 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10136 return true;
10137 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10138 switch (DECL_FUNCTION_CODE (decl))
10139 {
10140 case BUILT_IN_ABS:
10141 case BUILT_IN_ALLOCA:
10142 case BUILT_IN_ALLOCA_WITH_ALIGN:
10143 case BUILT_IN_BSWAP16:
10144 case BUILT_IN_BSWAP32:
10145 case BUILT_IN_BSWAP64:
10146 case BUILT_IN_CLZ:
10147 case BUILT_IN_CLZIMAX:
10148 case BUILT_IN_CLZL:
10149 case BUILT_IN_CLZLL:
10150 case BUILT_IN_CTZ:
10151 case BUILT_IN_CTZIMAX:
10152 case BUILT_IN_CTZL:
10153 case BUILT_IN_CTZLL:
10154 case BUILT_IN_FFS:
10155 case BUILT_IN_FFSIMAX:
10156 case BUILT_IN_FFSL:
10157 case BUILT_IN_FFSLL:
10158 case BUILT_IN_IMAXABS:
10159 case BUILT_IN_FINITE:
10160 case BUILT_IN_FINITEF:
10161 case BUILT_IN_FINITEL:
10162 case BUILT_IN_FINITED32:
10163 case BUILT_IN_FINITED64:
10164 case BUILT_IN_FINITED128:
10165 case BUILT_IN_FPCLASSIFY:
10166 case BUILT_IN_ISFINITE:
10167 case BUILT_IN_ISINF_SIGN:
10168 case BUILT_IN_ISINF:
10169 case BUILT_IN_ISINFF:
10170 case BUILT_IN_ISINFL:
10171 case BUILT_IN_ISINFD32:
10172 case BUILT_IN_ISINFD64:
10173 case BUILT_IN_ISINFD128:
10174 case BUILT_IN_ISNAN:
10175 case BUILT_IN_ISNANF:
10176 case BUILT_IN_ISNANL:
10177 case BUILT_IN_ISNAND32:
10178 case BUILT_IN_ISNAND64:
10179 case BUILT_IN_ISNAND128:
10180 case BUILT_IN_ISNORMAL:
10181 case BUILT_IN_ISGREATER:
10182 case BUILT_IN_ISGREATEREQUAL:
10183 case BUILT_IN_ISLESS:
10184 case BUILT_IN_ISLESSEQUAL:
10185 case BUILT_IN_ISLESSGREATER:
10186 case BUILT_IN_ISUNORDERED:
10187 case BUILT_IN_VA_ARG_PACK:
10188 case BUILT_IN_VA_ARG_PACK_LEN:
10189 case BUILT_IN_VA_COPY:
10190 case BUILT_IN_TRAP:
10191 case BUILT_IN_SAVEREGS:
10192 case BUILT_IN_POPCOUNTL:
10193 case BUILT_IN_POPCOUNTLL:
10194 case BUILT_IN_POPCOUNTIMAX:
10195 case BUILT_IN_POPCOUNT:
10196 case BUILT_IN_PARITYL:
10197 case BUILT_IN_PARITYLL:
10198 case BUILT_IN_PARITYIMAX:
10199 case BUILT_IN_PARITY:
10200 case BUILT_IN_LABS:
10201 case BUILT_IN_LLABS:
10202 case BUILT_IN_PREFETCH:
10203 case BUILT_IN_ACC_ON_DEVICE:
10204 return true;
10205
10206 default:
10207 return is_simple_builtin (decl);
10208 }
10209
10210 return false;
10211 }
10212
10213 /* Return true if T is a constant and the value cast to a target char
10214 can be represented by a host char.
10215 Store the casted char constant in *P if so. */
10216
10217 bool
10218 target_char_cst_p (tree t, char *p)
10219 {
10220 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10221 return false;
10222
10223 *p = (char)tree_to_uhwi (t);
10224 return true;
10225 }