Clean up of new format of -falign-FOO.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
80
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
87 {
88 #include "builtins.def"
89 };
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx, bool);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_classify_type (tree);
153 static tree fold_builtin_strlen (location_t, tree, tree);
154 static tree fold_builtin_inf (location_t, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
160 static tree fold_builtin_isascii (location_t, tree);
161 static tree fold_builtin_toascii (location_t, tree);
162 static tree fold_builtin_isdigit (location_t, tree);
163 static tree fold_builtin_fabs (location_t, tree, tree);
164 static tree fold_builtin_abs (location_t, tree, tree);
165 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
166 enum tree_code);
167 static tree fold_builtin_0 (location_t, tree);
168 static tree fold_builtin_1 (location_t, tree, tree);
169 static tree fold_builtin_2 (location_t, tree, tree, tree);
170 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172
173 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
174 static tree fold_builtin_strspn (location_t, tree, tree);
175 static tree fold_builtin_strcspn (location_t, tree, tree);
176
177 static rtx expand_builtin_object_size (tree);
178 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
179 enum built_in_function);
180 static void maybe_emit_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_free_warning (tree);
183 static tree fold_builtin_object_size (tree, tree);
184
185 unsigned HOST_WIDE_INT target_newline;
186 unsigned HOST_WIDE_INT target_percent;
187 static unsigned HOST_WIDE_INT target_c;
188 static unsigned HOST_WIDE_INT target_s;
189 char target_percent_c[3];
190 char target_percent_s[3];
191 char target_percent_s_newline[4];
192 static tree do_mpfr_remquo (tree, tree, tree);
193 static tree do_mpfr_lgamma_r (tree, tree, tree);
194 static void expand_builtin_sync_synchronize (void);
195
196 /* Return true if NAME starts with __builtin_ or __sync_. */
197
198 static bool
199 is_builtin_name (const char *name)
200 {
201 if (strncmp (name, "__builtin_", 10) == 0)
202 return true;
203 if (strncmp (name, "__sync_", 7) == 0)
204 return true;
205 if (strncmp (name, "__atomic_", 9) == 0)
206 return true;
207 return false;
208 }
209
210
211 /* Return true if DECL is a function symbol representing a built-in. */
212
213 bool
214 is_builtin_fn (tree decl)
215 {
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
217 }
218
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
222
223 bool
224 called_as_built_in (tree node)
225 {
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
231 }
232
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
237
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
244
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
247
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
251 {
252 poly_int64 bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
258
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
263
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
267 {
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
274 }
275 else if (TREE_CODE (exp) == LABEL_DECL)
276 ;
277 else if (TREE_CODE (exp) == CONST_DECL)
278 {
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = targetm.constant_alignment (exp, align);
284
285 known_alignment = true;
286 }
287 else if (DECL_P (exp))
288 {
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
291 }
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
295 {
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
300
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
304 {
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = least_bit_hwi (ptr_bitmask);
308 addr = TREE_OPERAND (addr, 0);
309 }
310
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
314
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
317
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
321 {
322 if (TMR_INDEX (exp))
323 {
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
328 }
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
332 }
333
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 unsigned int talign;
340 if (!addr_p && !known_alignment
341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
344 else
345 {
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
350 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
351 }
352 }
353 else if (TREE_CODE (exp) == STRING_CST)
354 {
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
358 if (CONSTANT_CLASS_P (exp))
359 align = targetm.constant_alignment (exp, align);
360
361 known_alignment = true;
362 }
363
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
366 if (offset)
367 {
368 unsigned int trailing_zeros = tree_ctz (offset);
369 if (trailing_zeros < HOST_BITS_PER_INT)
370 {
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
374 }
375 }
376
377 /* Account for the alignment of runtime coefficients, so that the constant
378 bitpos is guaranteed to be accurate. */
379 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
380 if (alt_align != 0 && alt_align < align)
381 {
382 align = alt_align;
383 known_alignment = false;
384 }
385
386 *alignp = align;
387 *bitposp = bitpos.coeffs[0] & (align - 1);
388 return known_alignment;
389 }
390
391 /* For a memory reference expression EXP compute values M and N such that M
392 divides (&EXP - N) and such that N < M. If these numbers can be determined,
393 store M in alignp and N in *BITPOSP and return true. Otherwise return false
394 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
395
396 bool
397 get_object_alignment_1 (tree exp, unsigned int *alignp,
398 unsigned HOST_WIDE_INT *bitposp)
399 {
400 return get_object_alignment_2 (exp, alignp, bitposp, false);
401 }
402
403 /* Return the alignment in bits of EXP, an object. */
404
405 unsigned int
406 get_object_alignment (tree exp)
407 {
408 unsigned HOST_WIDE_INT bitpos = 0;
409 unsigned int align;
410
411 get_object_alignment_1 (exp, &align, &bitpos);
412
413 /* align and bitpos now specify known low bits of the pointer.
414 ptr & (align - 1) == bitpos. */
415
416 if (bitpos != 0)
417 align = least_bit_hwi (bitpos);
418 return align;
419 }
420
421 /* For a pointer valued expression EXP compute values M and N such that M
422 divides (EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Return false if
424 the results are just a conservative approximation.
425
426 If EXP is not a pointer, false is returned too. */
427
428 bool
429 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
430 unsigned HOST_WIDE_INT *bitposp)
431 {
432 STRIP_NOPS (exp);
433
434 if (TREE_CODE (exp) == ADDR_EXPR)
435 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
436 alignp, bitposp, true);
437 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
438 {
439 unsigned int align;
440 unsigned HOST_WIDE_INT bitpos;
441 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
442 &align, &bitpos);
443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
444 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
445 else
446 {
447 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
448 if (trailing_zeros < HOST_BITS_PER_INT)
449 {
450 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
451 if (inner)
452 align = MIN (align, inner);
453 }
454 }
455 *alignp = align;
456 *bitposp = bitpos & (align - 1);
457 return res;
458 }
459 else if (TREE_CODE (exp) == SSA_NAME
460 && POINTER_TYPE_P (TREE_TYPE (exp)))
461 {
462 unsigned int ptr_align, ptr_misalign;
463 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
464
465 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
466 {
467 *bitposp = ptr_misalign * BITS_PER_UNIT;
468 *alignp = ptr_align * BITS_PER_UNIT;
469 /* Make sure to return a sensible alignment when the multiplication
470 by BITS_PER_UNIT overflowed. */
471 if (*alignp == 0)
472 *alignp = 1u << (HOST_BITS_PER_INT - 1);
473 /* We cannot really tell whether this result is an approximation. */
474 return false;
475 }
476 else
477 {
478 *bitposp = 0;
479 *alignp = BITS_PER_UNIT;
480 return false;
481 }
482 }
483 else if (TREE_CODE (exp) == INTEGER_CST)
484 {
485 *alignp = BIGGEST_ALIGNMENT;
486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
489 }
490
491 *bitposp = 0;
492 *alignp = BITS_PER_UNIT;
493 return false;
494 }
495
496 /* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
502
503 unsigned int
504 get_pointer_alignment (tree exp)
505 {
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
508
509 get_pointer_alignment_1 (exp, &align, &bitpos);
510
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
513
514 if (bitpos != 0)
515 align = least_bit_hwi (bitpos);
516
517 return align;
518 }
519
520 /* Return the number of non-zero elements in the sequence
521 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
522 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
523
524 static unsigned
525 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
526 {
527 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
528
529 unsigned n;
530
531 if (eltsize == 1)
532 {
533 /* Optimize the common case of plain char. */
534 for (n = 0; n < maxelts; n++)
535 {
536 const char *elt = (const char*) ptr + n;
537 if (!*elt)
538 break;
539 }
540 }
541 else
542 {
543 for (n = 0; n < maxelts; n++)
544 {
545 const char *elt = (const char*) ptr + n * eltsize;
546 if (!memcmp (elt, "\0\0\0\0", eltsize))
547 break;
548 }
549 }
550 return n;
551 }
552
553 /* Compute the length of a null-terminated character string or wide
554 character string handling character sizes of 1, 2, and 4 bytes.
555 TREE_STRING_LENGTH is not the right way because it evaluates to
556 the size of the character array in bytes (as opposed to characters)
557 and because it can contain a zero byte in the middle.
558
559 ONLY_VALUE should be nonzero if the result is not going to be emitted
560 into the instruction stream and zero if it is going to be expanded.
561 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
562 is returned, otherwise NULL, since
563 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
564 evaluate the side-effects.
565
566 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
567 accesses. Note that this implies the result is not going to be emitted
568 into the instruction stream.
569
570 The value returned is of type `ssizetype'.
571
572 Unfortunately, string_constant can't access the values of const char
573 arrays with initializers, so neither can we do so here. */
574
575 tree
576 c_strlen (tree src, int only_value)
577 {
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 {
582 tree len1, len2;
583
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
586 if (tree_int_cst_equal (len1, len2))
587 return len1;
588 }
589
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
593
594 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
595
596 /* Offset from the beginning of the string in bytes. */
597 tree byteoff;
598 src = string_constant (src, &byteoff);
599 if (src == 0)
600 return NULL_TREE;
601
602 /* Determine the size of the string element. */
603 unsigned eltsize
604 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
605
606 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
607 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
608 in case the latter is less than the size of the array. */
609 HOST_WIDE_INT maxelts = TREE_STRING_LENGTH (src);
610 tree type = TREE_TYPE (src);
611 if (tree size = TYPE_SIZE_UNIT (type))
612 if (tree_fits_shwi_p (size))
613 maxelts = tree_to_uhwi (size);
614
615 maxelts = maxelts / eltsize - 1;
616
617 /* PTR can point to the byte representation of any string type, including
618 char* and wchar_t*. */
619 const char *ptr = TREE_STRING_POINTER (src);
620
621 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
622 {
623 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
624 compute the offset to the following null if we don't know where to
625 start searching for it. */
626 if (string_length (ptr, eltsize, maxelts) < maxelts)
627 {
628 /* Return when an embedded null character is found. */
629 return NULL_TREE;
630 }
631
632 if (!maxelts)
633 return ssize_int (0);
634
635 /* We don't know the starting offset, but we do know that the string
636 has no internal zero bytes. We can assume that the offset falls
637 within the bounds of the string; otherwise, the programmer deserves
638 what he gets. Subtract the offset from the length of the string,
639 and return that. This would perhaps not be valid if we were dealing
640 with named arrays in addition to literal string constants. */
641 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
642 }
643
644 /* Offset from the beginning of the string in elements. */
645 HOST_WIDE_INT eltoff;
646
647 /* We have a known offset into the string. Start searching there for
648 a null character if we can represent it as a single HOST_WIDE_INT. */
649 if (byteoff == 0)
650 eltoff = 0;
651 else if (! tree_fits_shwi_p (byteoff))
652 eltoff = -1;
653 else
654 eltoff = tree_to_shwi (byteoff) / eltsize;
655
656 /* If the offset is known to be out of bounds, warn, and call strlen at
657 runtime. */
658 if (eltoff < 0 || eltoff > maxelts)
659 {
660 /* Suppress multiple warnings for propagated constant strings. */
661 if (only_value != 2
662 && !TREE_NO_WARNING (src))
663 {
664 warning_at (loc, OPT_Warray_bounds,
665 "offset %qwi outside bounds of constant string",
666 eltoff);
667 TREE_NO_WARNING (src) = 1;
668 }
669 return NULL_TREE;
670 }
671
672 /* Use strlen to search for the first zero byte. Since any strings
673 constructed with build_string will have nulls appended, we win even
674 if we get handed something like (char[4])"abcd".
675
676 Since ELTOFF is our starting index into the string, no further
677 calculation is needed. */
678 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
679 maxelts - eltoff);
680
681 return ssize_int (len);
682 }
683
684 /* Return a constant integer corresponding to target reading
685 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
686
687 static rtx
688 c_readstr (const char *str, scalar_int_mode mode)
689 {
690 HOST_WIDE_INT ch;
691 unsigned int i, j;
692 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
693
694 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
695 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
696 / HOST_BITS_PER_WIDE_INT;
697
698 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
699 for (i = 0; i < len; i++)
700 tmp[i] = 0;
701
702 ch = 1;
703 for (i = 0; i < GET_MODE_SIZE (mode); i++)
704 {
705 j = i;
706 if (WORDS_BIG_ENDIAN)
707 j = GET_MODE_SIZE (mode) - i - 1;
708 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
709 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
710 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
711 j *= BITS_PER_UNIT;
712
713 if (ch)
714 ch = (unsigned char) str[i];
715 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
716 }
717
718 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
719 return immed_wide_int_const (c, mode);
720 }
721
722 /* Cast a target constant CST to target CHAR and if that value fits into
723 host char type, return zero and put that value into variable pointed to by
724 P. */
725
726 static int
727 target_char_cast (tree cst, char *p)
728 {
729 unsigned HOST_WIDE_INT val, hostval;
730
731 if (TREE_CODE (cst) != INTEGER_CST
732 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
733 return 1;
734
735 /* Do not care if it fits or not right here. */
736 val = TREE_INT_CST_LOW (cst);
737
738 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
739 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
740
741 hostval = val;
742 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
743 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
744
745 if (val != hostval)
746 return 1;
747
748 *p = hostval;
749 return 0;
750 }
751
752 /* Similar to save_expr, but assumes that arbitrary code is not executed
753 in between the multiple evaluations. In particular, we assume that a
754 non-addressable local variable will not be modified. */
755
756 static tree
757 builtin_save_expr (tree exp)
758 {
759 if (TREE_CODE (exp) == SSA_NAME
760 || (TREE_ADDRESSABLE (exp) == 0
761 && (TREE_CODE (exp) == PARM_DECL
762 || (VAR_P (exp) && !TREE_STATIC (exp)))))
763 return exp;
764
765 return save_expr (exp);
766 }
767
768 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
769 times to get the address of either a higher stack frame, or a return
770 address located within it (depending on FNDECL_CODE). */
771
772 static rtx
773 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
774 {
775 int i;
776 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
777 if (tem == NULL_RTX)
778 {
779 /* For a zero count with __builtin_return_address, we don't care what
780 frame address we return, because target-specific definitions will
781 override us. Therefore frame pointer elimination is OK, and using
782 the soft frame pointer is OK.
783
784 For a nonzero count, or a zero count with __builtin_frame_address,
785 we require a stable offset from the current frame pointer to the
786 previous one, so we must use the hard frame pointer, and
787 we must disable frame pointer elimination. */
788 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
789 tem = frame_pointer_rtx;
790 else
791 {
792 tem = hard_frame_pointer_rtx;
793
794 /* Tell reload not to eliminate the frame pointer. */
795 crtl->accesses_prior_frames = 1;
796 }
797 }
798
799 if (count > 0)
800 SETUP_FRAME_ADDRESSES ();
801
802 /* On the SPARC, the return address is not in the frame, it is in a
803 register. There is no way to access it off of the current frame
804 pointer, but it can be accessed off the previous frame pointer by
805 reading the value from the register window save area. */
806 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
807 count--;
808
809 /* Scan back COUNT frames to the specified frame. */
810 for (i = 0; i < count; i++)
811 {
812 /* Assume the dynamic chain pointer is in the word that the
813 frame address points to, unless otherwise specified. */
814 tem = DYNAMIC_CHAIN_ADDRESS (tem);
815 tem = memory_address (Pmode, tem);
816 tem = gen_frame_mem (Pmode, tem);
817 tem = copy_to_reg (tem);
818 }
819
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
823 return FRAME_ADDR_RTX (tem);
824
825 /* For __builtin_return_address, get the return address from that frame. */
826 #ifdef RETURN_ADDR_RTX
827 tem = RETURN_ADDR_RTX (count, tem);
828 #else
829 tem = memory_address (Pmode,
830 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
831 tem = gen_frame_mem (Pmode, tem);
832 #endif
833 return tem;
834 }
835
836 /* Alias set used for setjmp buffer. */
837 static alias_set_type setjmp_alias_set = -1;
838
839 /* Construct the leading half of a __builtin_setjmp call. Control will
840 return to RECEIVER_LABEL. This is also called directly by the SJLJ
841 exception handling code. */
842
843 void
844 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
845 {
846 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
847 rtx stack_save;
848 rtx mem;
849
850 if (setjmp_alias_set == -1)
851 setjmp_alias_set = new_alias_set ();
852
853 buf_addr = convert_memory_address (Pmode, buf_addr);
854
855 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
856
857 /* We store the frame pointer and the address of receiver_label in
858 the buffer and use the rest of it for the stack save area, which
859 is machine-dependent. */
860
861 mem = gen_rtx_MEM (Pmode, buf_addr);
862 set_mem_alias_set (mem, setjmp_alias_set);
863 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
864
865 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
866 GET_MODE_SIZE (Pmode))),
867 set_mem_alias_set (mem, setjmp_alias_set);
868
869 emit_move_insn (validize_mem (mem),
870 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
871
872 stack_save = gen_rtx_MEM (sa_mode,
873 plus_constant (Pmode, buf_addr,
874 2 * GET_MODE_SIZE (Pmode)));
875 set_mem_alias_set (stack_save, setjmp_alias_set);
876 emit_stack_save (SAVE_NONLOCAL, &stack_save);
877
878 /* If there is further processing to do, do it. */
879 if (targetm.have_builtin_setjmp_setup ())
880 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
881
882 /* We have a nonlocal label. */
883 cfun->has_nonlocal_label = 1;
884 }
885
886 /* Construct the trailing part of a __builtin_setjmp call. This is
887 also called directly by the SJLJ exception handling code.
888 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
889
890 void
891 expand_builtin_setjmp_receiver (rtx receiver_label)
892 {
893 rtx chain;
894
895 /* Mark the FP as used when we get here, so we have to make sure it's
896 marked as used by this function. */
897 emit_use (hard_frame_pointer_rtx);
898
899 /* Mark the static chain as clobbered here so life information
900 doesn't get messed up for it. */
901 chain = rtx_for_static_chain (current_function_decl, true);
902 if (chain && REG_P (chain))
903 emit_clobber (chain);
904
905 /* Now put in the code to restore the frame pointer, and argument
906 pointer, if needed. */
907 if (! targetm.have_nonlocal_goto ())
908 {
909 /* First adjust our frame pointer to its actual value. It was
910 previously set to the start of the virtual area corresponding to
911 the stacked variables when we branched here and now needs to be
912 adjusted to the actual hardware fp value.
913
914 Assignments to virtual registers are converted by
915 instantiate_virtual_regs into the corresponding assignment
916 to the underlying register (fp in this case) that makes
917 the original assignment true.
918 So the following insn will actually be decrementing fp by
919 TARGET_STARTING_FRAME_OFFSET. */
920 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
921
922 /* Restoring the frame pointer also modifies the hard frame pointer.
923 Mark it used (so that the previous assignment remains live once
924 the frame pointer is eliminated) and clobbered (to represent the
925 implicit update from the assignment). */
926 emit_use (hard_frame_pointer_rtx);
927 emit_clobber (hard_frame_pointer_rtx);
928 }
929
930 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
931 {
932 /* If the argument pointer can be eliminated in favor of the
933 frame pointer, we don't need to restore it. We assume here
934 that if such an elimination is present, it can always be used.
935 This is the case on all known machines; if we don't make this
936 assumption, we do unnecessary saving on many machines. */
937 size_t i;
938 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
939
940 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
941 if (elim_regs[i].from == ARG_POINTER_REGNUM
942 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
943 break;
944
945 if (i == ARRAY_SIZE (elim_regs))
946 {
947 /* Now restore our arg pointer from the address at which it
948 was saved in our stack frame. */
949 emit_move_insn (crtl->args.internal_arg_pointer,
950 copy_to_reg (get_arg_pointer_save_area ()));
951 }
952 }
953
954 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
955 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
956 else if (targetm.have_nonlocal_goto_receiver ())
957 emit_insn (targetm.gen_nonlocal_goto_receiver ());
958 else
959 { /* Nothing */ }
960
961 /* We must not allow the code we just generated to be reordered by
962 scheduling. Specifically, the update of the frame pointer must
963 happen immediately, not later. */
964 emit_insn (gen_blockage ());
965 }
966
967 /* __builtin_longjmp is passed a pointer to an array of five words (not
968 all will be used on all machines). It operates similarly to the C
969 library function of the same name, but is more efficient. Much of
970 the code below is copied from the handling of non-local gotos. */
971
972 static void
973 expand_builtin_longjmp (rtx buf_addr, rtx value)
974 {
975 rtx fp, lab, stack;
976 rtx_insn *insn, *last;
977 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
978
979 /* DRAP is needed for stack realign if longjmp is expanded to current
980 function */
981 if (SUPPORTS_STACK_ALIGNMENT)
982 crtl->need_drap = true;
983
984 if (setjmp_alias_set == -1)
985 setjmp_alias_set = new_alias_set ();
986
987 buf_addr = convert_memory_address (Pmode, buf_addr);
988
989 buf_addr = force_reg (Pmode, buf_addr);
990
991 /* We require that the user must pass a second argument of 1, because
992 that is what builtin_setjmp will return. */
993 gcc_assert (value == const1_rtx);
994
995 last = get_last_insn ();
996 if (targetm.have_builtin_longjmp ())
997 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
998 else
999 {
1000 fp = gen_rtx_MEM (Pmode, buf_addr);
1001 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1002 GET_MODE_SIZE (Pmode)));
1003
1004 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1005 2 * GET_MODE_SIZE (Pmode)));
1006 set_mem_alias_set (fp, setjmp_alias_set);
1007 set_mem_alias_set (lab, setjmp_alias_set);
1008 set_mem_alias_set (stack, setjmp_alias_set);
1009
1010 /* Pick up FP, label, and SP from the block and jump. This code is
1011 from expand_goto in stmt.c; see there for detailed comments. */
1012 if (targetm.have_nonlocal_goto ())
1013 /* We have to pass a value to the nonlocal_goto pattern that will
1014 get copied into the static_chain pointer, but it does not matter
1015 what that value is, because builtin_setjmp does not use it. */
1016 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1017 else
1018 {
1019 lab = copy_to_reg (lab);
1020
1021 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1022 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1023
1024 emit_move_insn (hard_frame_pointer_rtx, fp);
1025 emit_stack_restore (SAVE_NONLOCAL, stack);
1026
1027 emit_use (hard_frame_pointer_rtx);
1028 emit_use (stack_pointer_rtx);
1029 emit_indirect_jump (lab);
1030 }
1031 }
1032
1033 /* Search backwards and mark the jump insn as a non-local goto.
1034 Note that this precludes the use of __builtin_longjmp to a
1035 __builtin_setjmp target in the same function. However, we've
1036 already cautioned the user that these functions are for
1037 internal exception handling use only. */
1038 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1039 {
1040 gcc_assert (insn != last);
1041
1042 if (JUMP_P (insn))
1043 {
1044 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1045 break;
1046 }
1047 else if (CALL_P (insn))
1048 break;
1049 }
1050 }
1051
1052 static inline bool
1053 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1054 {
1055 return (iter->i < iter->n);
1056 }
1057
1058 /* This function validates the types of a function call argument list
1059 against a specified list of tree_codes. If the last specifier is a 0,
1060 that represents an ellipsis, otherwise the last specifier must be a
1061 VOID_TYPE. */
1062
1063 static bool
1064 validate_arglist (const_tree callexpr, ...)
1065 {
1066 enum tree_code code;
1067 bool res = 0;
1068 va_list ap;
1069 const_call_expr_arg_iterator iter;
1070 const_tree arg;
1071
1072 va_start (ap, callexpr);
1073 init_const_call_expr_arg_iterator (callexpr, &iter);
1074
1075 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1076 tree fn = CALL_EXPR_FN (callexpr);
1077 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1078
1079 for (unsigned argno = 1; ; ++argno)
1080 {
1081 code = (enum tree_code) va_arg (ap, int);
1082
1083 switch (code)
1084 {
1085 case 0:
1086 /* This signifies an ellipses, any further arguments are all ok. */
1087 res = true;
1088 goto end;
1089 case VOID_TYPE:
1090 /* This signifies an endlink, if no arguments remain, return
1091 true, otherwise return false. */
1092 res = !more_const_call_expr_args_p (&iter);
1093 goto end;
1094 case POINTER_TYPE:
1095 /* The actual argument must be nonnull when either the whole
1096 called function has been declared nonnull, or when the formal
1097 argument corresponding to the actual argument has been. */
1098 if (argmap
1099 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1100 {
1101 arg = next_const_call_expr_arg (&iter);
1102 if (!validate_arg (arg, code) || integer_zerop (arg))
1103 goto end;
1104 break;
1105 }
1106 /* FALLTHRU */
1107 default:
1108 /* If no parameters remain or the parameter's code does not
1109 match the specified code, return false. Otherwise continue
1110 checking any remaining arguments. */
1111 arg = next_const_call_expr_arg (&iter);
1112 if (!validate_arg (arg, code))
1113 goto end;
1114 break;
1115 }
1116 }
1117
1118 /* We need gotos here since we can only have one VA_CLOSE in a
1119 function. */
1120 end: ;
1121 va_end (ap);
1122
1123 BITMAP_FREE (argmap);
1124
1125 return res;
1126 }
1127
1128 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1129 and the address of the save area. */
1130
1131 static rtx
1132 expand_builtin_nonlocal_goto (tree exp)
1133 {
1134 tree t_label, t_save_area;
1135 rtx r_label, r_save_area, r_fp, r_sp;
1136 rtx_insn *insn;
1137
1138 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1139 return NULL_RTX;
1140
1141 t_label = CALL_EXPR_ARG (exp, 0);
1142 t_save_area = CALL_EXPR_ARG (exp, 1);
1143
1144 r_label = expand_normal (t_label);
1145 r_label = convert_memory_address (Pmode, r_label);
1146 r_save_area = expand_normal (t_save_area);
1147 r_save_area = convert_memory_address (Pmode, r_save_area);
1148 /* Copy the address of the save location to a register just in case it was
1149 based on the frame pointer. */
1150 r_save_area = copy_to_reg (r_save_area);
1151 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1152 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1153 plus_constant (Pmode, r_save_area,
1154 GET_MODE_SIZE (Pmode)));
1155
1156 crtl->has_nonlocal_goto = 1;
1157
1158 /* ??? We no longer need to pass the static chain value, afaik. */
1159 if (targetm.have_nonlocal_goto ())
1160 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1161 else
1162 {
1163 r_label = copy_to_reg (r_label);
1164
1165 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1166 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1167
1168 /* Restore frame pointer for containing function. */
1169 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1170 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1171
1172 /* USE of hard_frame_pointer_rtx added for consistency;
1173 not clear if really needed. */
1174 emit_use (hard_frame_pointer_rtx);
1175 emit_use (stack_pointer_rtx);
1176
1177 /* If the architecture is using a GP register, we must
1178 conservatively assume that the target function makes use of it.
1179 The prologue of functions with nonlocal gotos must therefore
1180 initialize the GP register to the appropriate value, and we
1181 must then make sure that this value is live at the point
1182 of the jump. (Note that this doesn't necessarily apply
1183 to targets with a nonlocal_goto pattern; they are free
1184 to implement it in their own way. Note also that this is
1185 a no-op if the GP register is a global invariant.) */
1186 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1187 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1188 emit_use (pic_offset_table_rtx);
1189
1190 emit_indirect_jump (r_label);
1191 }
1192
1193 /* Search backwards to the jump insn and mark it as a
1194 non-local goto. */
1195 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1196 {
1197 if (JUMP_P (insn))
1198 {
1199 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1200 break;
1201 }
1202 else if (CALL_P (insn))
1203 break;
1204 }
1205
1206 return const0_rtx;
1207 }
1208
1209 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1210 (not all will be used on all machines) that was passed to __builtin_setjmp.
1211 It updates the stack pointer in that block to the current value. This is
1212 also called directly by the SJLJ exception handling code. */
1213
1214 void
1215 expand_builtin_update_setjmp_buf (rtx buf_addr)
1216 {
1217 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1218 buf_addr = convert_memory_address (Pmode, buf_addr);
1219 rtx stack_save
1220 = gen_rtx_MEM (sa_mode,
1221 memory_address
1222 (sa_mode,
1223 plus_constant (Pmode, buf_addr,
1224 2 * GET_MODE_SIZE (Pmode))));
1225
1226 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1227 }
1228
1229 /* Expand a call to __builtin_prefetch. For a target that does not support
1230 data prefetch, evaluate the memory address argument in case it has side
1231 effects. */
1232
1233 static void
1234 expand_builtin_prefetch (tree exp)
1235 {
1236 tree arg0, arg1, arg2;
1237 int nargs;
1238 rtx op0, op1, op2;
1239
1240 if (!validate_arglist (exp, POINTER_TYPE, 0))
1241 return;
1242
1243 arg0 = CALL_EXPR_ARG (exp, 0);
1244
1245 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1246 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1247 locality). */
1248 nargs = call_expr_nargs (exp);
1249 if (nargs > 1)
1250 arg1 = CALL_EXPR_ARG (exp, 1);
1251 else
1252 arg1 = integer_zero_node;
1253 if (nargs > 2)
1254 arg2 = CALL_EXPR_ARG (exp, 2);
1255 else
1256 arg2 = integer_three_node;
1257
1258 /* Argument 0 is an address. */
1259 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1260
1261 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1262 if (TREE_CODE (arg1) != INTEGER_CST)
1263 {
1264 error ("second argument to %<__builtin_prefetch%> must be a constant");
1265 arg1 = integer_zero_node;
1266 }
1267 op1 = expand_normal (arg1);
1268 /* Argument 1 must be either zero or one. */
1269 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1270 {
1271 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1272 " using zero");
1273 op1 = const0_rtx;
1274 }
1275
1276 /* Argument 2 (locality) must be a compile-time constant int. */
1277 if (TREE_CODE (arg2) != INTEGER_CST)
1278 {
1279 error ("third argument to %<__builtin_prefetch%> must be a constant");
1280 arg2 = integer_zero_node;
1281 }
1282 op2 = expand_normal (arg2);
1283 /* Argument 2 must be 0, 1, 2, or 3. */
1284 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1285 {
1286 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1287 op2 = const0_rtx;
1288 }
1289
1290 if (targetm.have_prefetch ())
1291 {
1292 struct expand_operand ops[3];
1293
1294 create_address_operand (&ops[0], op0);
1295 create_integer_operand (&ops[1], INTVAL (op1));
1296 create_integer_operand (&ops[2], INTVAL (op2));
1297 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1298 return;
1299 }
1300
1301 /* Don't do anything with direct references to volatile memory, but
1302 generate code to handle other side effects. */
1303 if (!MEM_P (op0) && side_effects_p (op0))
1304 emit_insn (op0);
1305 }
1306
1307 /* Get a MEM rtx for expression EXP which is the address of an operand
1308 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1309 the maximum length of the block of memory that might be accessed or
1310 NULL if unknown. */
1311
1312 static rtx
1313 get_memory_rtx (tree exp, tree len)
1314 {
1315 tree orig_exp = exp;
1316 rtx addr, mem;
1317
1318 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1319 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1320 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1321 exp = TREE_OPERAND (exp, 0);
1322
1323 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1324 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1325
1326 /* Get an expression we can use to find the attributes to assign to MEM.
1327 First remove any nops. */
1328 while (CONVERT_EXPR_P (exp)
1329 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1330 exp = TREE_OPERAND (exp, 0);
1331
1332 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1333 (as builtin stringops may alias with anything). */
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_one_node, len)),
1338 exp, build_int_cst (ptr_type_node, 0));
1339
1340 /* If the MEM_REF has no acceptable address, try to get the base object
1341 from the original address we got, and build an all-aliasing
1342 unknown-sized access to that one. */
1343 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1344 set_mem_attributes (mem, exp, 0);
1345 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1346 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1347 0))))
1348 {
1349 exp = build_fold_addr_expr (exp);
1350 exp = fold_build2 (MEM_REF,
1351 build_array_type (char_type_node,
1352 build_range_type (sizetype,
1353 size_zero_node,
1354 NULL)),
1355 exp, build_int_cst (ptr_type_node, 0));
1356 set_mem_attributes (mem, exp, 0);
1357 }
1358 set_mem_alias_set (mem, 0);
1359 return mem;
1360 }
1361 \f
1362 /* Built-in functions to perform an untyped call and return. */
1363
1364 #define apply_args_mode \
1365 (this_target_builtins->x_apply_args_mode)
1366 #define apply_result_mode \
1367 (this_target_builtins->x_apply_result_mode)
1368
1369 /* Return the size required for the block returned by __builtin_apply_args,
1370 and initialize apply_args_mode. */
1371
1372 static int
1373 apply_args_size (void)
1374 {
1375 static int size = -1;
1376 int align;
1377 unsigned int regno;
1378
1379 /* The values computed by this function never change. */
1380 if (size < 0)
1381 {
1382 /* The first value is the incoming arg-pointer. */
1383 size = GET_MODE_SIZE (Pmode);
1384
1385 /* The second value is the structure value address unless this is
1386 passed as an "invisible" first argument. */
1387 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1388 size += GET_MODE_SIZE (Pmode);
1389
1390 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1391 if (FUNCTION_ARG_REGNO_P (regno))
1392 {
1393 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1394
1395 gcc_assert (mode != VOIDmode);
1396
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1400 size += GET_MODE_SIZE (mode);
1401 apply_args_mode[regno] = mode;
1402 }
1403 else
1404 {
1405 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1406 }
1407 }
1408 return size;
1409 }
1410
1411 /* Return the size required for the block returned by __builtin_apply,
1412 and initialize apply_result_mode. */
1413
1414 static int
1415 apply_result_size (void)
1416 {
1417 static int size = -1;
1418 int align, regno;
1419
1420 /* The values computed by this function never change. */
1421 if (size < 0)
1422 {
1423 size = 0;
1424
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1426 if (targetm.calls.function_value_regno_p (regno))
1427 {
1428 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1429
1430 gcc_assert (mode != VOIDmode);
1431
1432 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1433 if (size % align != 0)
1434 size = CEIL (size, align) * align;
1435 size += GET_MODE_SIZE (mode);
1436 apply_result_mode[regno] = mode;
1437 }
1438 else
1439 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1440
1441 /* Allow targets that use untyped_call and untyped_return to override
1442 the size so that machine-specific information can be stored here. */
1443 #ifdef APPLY_RESULT_SIZE
1444 size = APPLY_RESULT_SIZE;
1445 #endif
1446 }
1447 return size;
1448 }
1449
1450 /* Create a vector describing the result block RESULT. If SAVEP is true,
1451 the result block is used to save the values; otherwise it is used to
1452 restore the values. */
1453
1454 static rtx
1455 result_vector (int savep, rtx result)
1456 {
1457 int regno, size, align, nelts;
1458 fixed_size_mode mode;
1459 rtx reg, mem;
1460 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1461
1462 size = nelts = 0;
1463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1464 if ((mode = apply_result_mode[regno]) != VOIDmode)
1465 {
1466 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1467 if (size % align != 0)
1468 size = CEIL (size, align) * align;
1469 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1470 mem = adjust_address (result, mode, size);
1471 savevec[nelts++] = (savep
1472 ? gen_rtx_SET (mem, reg)
1473 : gen_rtx_SET (reg, mem));
1474 size += GET_MODE_SIZE (mode);
1475 }
1476 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1477 }
1478
1479 /* Save the state required to perform an untyped call with the same
1480 arguments as were passed to the current function. */
1481
1482 static rtx
1483 expand_builtin_apply_args_1 (void)
1484 {
1485 rtx registers, tem;
1486 int size, align, regno;
1487 fixed_size_mode mode;
1488 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1489
1490 /* Create a block where the arg-pointer, structure value address,
1491 and argument registers can be saved. */
1492 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1493
1494 /* Walk past the arg-pointer and structure value address. */
1495 size = GET_MODE_SIZE (Pmode);
1496 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1497 size += GET_MODE_SIZE (Pmode);
1498
1499 /* Save each register used in calling a function to the block. */
1500 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1501 if ((mode = apply_args_mode[regno]) != VOIDmode)
1502 {
1503 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1504 if (size % align != 0)
1505 size = CEIL (size, align) * align;
1506
1507 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1508
1509 emit_move_insn (adjust_address (registers, mode, size), tem);
1510 size += GET_MODE_SIZE (mode);
1511 }
1512
1513 /* Save the arg pointer to the block. */
1514 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1515 /* We need the pointer as the caller actually passed them to us, not
1516 as we might have pretended they were passed. Make sure it's a valid
1517 operand, as emit_move_insn isn't expected to handle a PLUS. */
1518 if (STACK_GROWS_DOWNWARD)
1519 tem
1520 = force_operand (plus_constant (Pmode, tem,
1521 crtl->args.pretend_args_size),
1522 NULL_RTX);
1523 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1524
1525 size = GET_MODE_SIZE (Pmode);
1526
1527 /* Save the structure value address unless this is passed as an
1528 "invisible" first argument. */
1529 if (struct_incoming_value)
1530 {
1531 emit_move_insn (adjust_address (registers, Pmode, size),
1532 copy_to_reg (struct_incoming_value));
1533 size += GET_MODE_SIZE (Pmode);
1534 }
1535
1536 /* Return the address of the block. */
1537 return copy_addr_to_reg (XEXP (registers, 0));
1538 }
1539
1540 /* __builtin_apply_args returns block of memory allocated on
1541 the stack into which is stored the arg pointer, structure
1542 value address, static chain, and all the registers that might
1543 possibly be used in performing a function call. The code is
1544 moved to the start of the function so the incoming values are
1545 saved. */
1546
1547 static rtx
1548 expand_builtin_apply_args (void)
1549 {
1550 /* Don't do __builtin_apply_args more than once in a function.
1551 Save the result of the first call and reuse it. */
1552 if (apply_args_value != 0)
1553 return apply_args_value;
1554 {
1555 /* When this function is called, it means that registers must be
1556 saved on entry to this function. So we migrate the
1557 call to the first insn of this function. */
1558 rtx temp;
1559
1560 start_sequence ();
1561 temp = expand_builtin_apply_args_1 ();
1562 rtx_insn *seq = get_insns ();
1563 end_sequence ();
1564
1565 apply_args_value = temp;
1566
1567 /* Put the insns after the NOTE that starts the function.
1568 If this is inside a start_sequence, make the outer-level insn
1569 chain current, so the code is placed at the start of the
1570 function. If internal_arg_pointer is a non-virtual pseudo,
1571 it needs to be placed after the function that initializes
1572 that pseudo. */
1573 push_topmost_sequence ();
1574 if (REG_P (crtl->args.internal_arg_pointer)
1575 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1576 emit_insn_before (seq, parm_birth_insn);
1577 else
1578 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1579 pop_topmost_sequence ();
1580 return temp;
1581 }
1582 }
1583
1584 /* Perform an untyped call and save the state required to perform an
1585 untyped return of whatever value was returned by the given function. */
1586
1587 static rtx
1588 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1589 {
1590 int size, align, regno;
1591 fixed_size_mode mode;
1592 rtx incoming_args, result, reg, dest, src;
1593 rtx_call_insn *call_insn;
1594 rtx old_stack_level = 0;
1595 rtx call_fusage = 0;
1596 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1597
1598 arguments = convert_memory_address (Pmode, arguments);
1599
1600 /* Create a block where the return registers can be saved. */
1601 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1602
1603 /* Fetch the arg pointer from the ARGUMENTS block. */
1604 incoming_args = gen_reg_rtx (Pmode);
1605 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1606 if (!STACK_GROWS_DOWNWARD)
1607 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1608 incoming_args, 0, OPTAB_LIB_WIDEN);
1609
1610 /* Push a new argument block and copy the arguments. Do not allow
1611 the (potential) memcpy call below to interfere with our stack
1612 manipulations. */
1613 do_pending_stack_adjust ();
1614 NO_DEFER_POP;
1615
1616 /* Save the stack with nonlocal if available. */
1617 if (targetm.have_save_stack_nonlocal ())
1618 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1619 else
1620 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1621
1622 /* Allocate a block of memory onto the stack and copy the memory
1623 arguments to the outgoing arguments address. We can pass TRUE
1624 as the 4th argument because we just saved the stack pointer
1625 and will restore it right after the call. */
1626 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1627
1628 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1629 may have already set current_function_calls_alloca to true.
1630 current_function_calls_alloca won't be set if argsize is zero,
1631 so we have to guarantee need_drap is true here. */
1632 if (SUPPORTS_STACK_ALIGNMENT)
1633 crtl->need_drap = true;
1634
1635 dest = virtual_outgoing_args_rtx;
1636 if (!STACK_GROWS_DOWNWARD)
1637 {
1638 if (CONST_INT_P (argsize))
1639 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1640 else
1641 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1642 }
1643 dest = gen_rtx_MEM (BLKmode, dest);
1644 set_mem_align (dest, PARM_BOUNDARY);
1645 src = gen_rtx_MEM (BLKmode, incoming_args);
1646 set_mem_align (src, PARM_BOUNDARY);
1647 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1648
1649 /* Refer to the argument block. */
1650 apply_args_size ();
1651 arguments = gen_rtx_MEM (BLKmode, arguments);
1652 set_mem_align (arguments, PARM_BOUNDARY);
1653
1654 /* Walk past the arg-pointer and structure value address. */
1655 size = GET_MODE_SIZE (Pmode);
1656 if (struct_value)
1657 size += GET_MODE_SIZE (Pmode);
1658
1659 /* Restore each of the registers previously saved. Make USE insns
1660 for each of these registers for use in making the call. */
1661 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1662 if ((mode = apply_args_mode[regno]) != VOIDmode)
1663 {
1664 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1665 if (size % align != 0)
1666 size = CEIL (size, align) * align;
1667 reg = gen_rtx_REG (mode, regno);
1668 emit_move_insn (reg, adjust_address (arguments, mode, size));
1669 use_reg (&call_fusage, reg);
1670 size += GET_MODE_SIZE (mode);
1671 }
1672
1673 /* Restore the structure value address unless this is passed as an
1674 "invisible" first argument. */
1675 size = GET_MODE_SIZE (Pmode);
1676 if (struct_value)
1677 {
1678 rtx value = gen_reg_rtx (Pmode);
1679 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1680 emit_move_insn (struct_value, value);
1681 if (REG_P (struct_value))
1682 use_reg (&call_fusage, struct_value);
1683 size += GET_MODE_SIZE (Pmode);
1684 }
1685
1686 /* All arguments and registers used for the call are set up by now! */
1687 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1688
1689 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1690 and we don't want to load it into a register as an optimization,
1691 because prepare_call_address already did it if it should be done. */
1692 if (GET_CODE (function) != SYMBOL_REF)
1693 function = memory_address (FUNCTION_MODE, function);
1694
1695 /* Generate the actual call instruction and save the return value. */
1696 if (targetm.have_untyped_call ())
1697 {
1698 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1699 emit_call_insn (targetm.gen_untyped_call (mem, result,
1700 result_vector (1, result)));
1701 }
1702 else if (targetm.have_call_value ())
1703 {
1704 rtx valreg = 0;
1705
1706 /* Locate the unique return register. It is not possible to
1707 express a call that sets more than one return register using
1708 call_value; use untyped_call for that. In fact, untyped_call
1709 only needs to save the return registers in the given block. */
1710 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1711 if ((mode = apply_result_mode[regno]) != VOIDmode)
1712 {
1713 gcc_assert (!valreg); /* have_untyped_call required. */
1714
1715 valreg = gen_rtx_REG (mode, regno);
1716 }
1717
1718 emit_insn (targetm.gen_call_value (valreg,
1719 gen_rtx_MEM (FUNCTION_MODE, function),
1720 const0_rtx, NULL_RTX, const0_rtx));
1721
1722 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1723 }
1724 else
1725 gcc_unreachable ();
1726
1727 /* Find the CALL insn we just emitted, and attach the register usage
1728 information. */
1729 call_insn = last_call_insn ();
1730 add_function_usage_to (call_insn, call_fusage);
1731
1732 /* Restore the stack. */
1733 if (targetm.have_save_stack_nonlocal ())
1734 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1735 else
1736 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1737 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1738
1739 OK_DEFER_POP;
1740
1741 /* Return the address of the result block. */
1742 result = copy_addr_to_reg (XEXP (result, 0));
1743 return convert_memory_address (ptr_mode, result);
1744 }
1745
1746 /* Perform an untyped return. */
1747
1748 static void
1749 expand_builtin_return (rtx result)
1750 {
1751 int size, align, regno;
1752 fixed_size_mode mode;
1753 rtx reg;
1754 rtx_insn *call_fusage = 0;
1755
1756 result = convert_memory_address (Pmode, result);
1757
1758 apply_result_size ();
1759 result = gen_rtx_MEM (BLKmode, result);
1760
1761 if (targetm.have_untyped_return ())
1762 {
1763 rtx vector = result_vector (0, result);
1764 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1765 emit_barrier ();
1766 return;
1767 }
1768
1769 /* Restore the return value and note that each value is used. */
1770 size = 0;
1771 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1772 if ((mode = apply_result_mode[regno]) != VOIDmode)
1773 {
1774 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1775 if (size % align != 0)
1776 size = CEIL (size, align) * align;
1777 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1778 emit_move_insn (reg, adjust_address (result, mode, size));
1779
1780 push_to_sequence (call_fusage);
1781 emit_use (reg);
1782 call_fusage = get_insns ();
1783 end_sequence ();
1784 size += GET_MODE_SIZE (mode);
1785 }
1786
1787 /* Put the USE insns before the return. */
1788 emit_insn (call_fusage);
1789
1790 /* Return whatever values was restored by jumping directly to the end
1791 of the function. */
1792 expand_naked_return ();
1793 }
1794
1795 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1796
1797 static enum type_class
1798 type_to_class (tree type)
1799 {
1800 switch (TREE_CODE (type))
1801 {
1802 case VOID_TYPE: return void_type_class;
1803 case INTEGER_TYPE: return integer_type_class;
1804 case ENUMERAL_TYPE: return enumeral_type_class;
1805 case BOOLEAN_TYPE: return boolean_type_class;
1806 case POINTER_TYPE: return pointer_type_class;
1807 case REFERENCE_TYPE: return reference_type_class;
1808 case OFFSET_TYPE: return offset_type_class;
1809 case REAL_TYPE: return real_type_class;
1810 case COMPLEX_TYPE: return complex_type_class;
1811 case FUNCTION_TYPE: return function_type_class;
1812 case METHOD_TYPE: return method_type_class;
1813 case RECORD_TYPE: return record_type_class;
1814 case UNION_TYPE:
1815 case QUAL_UNION_TYPE: return union_type_class;
1816 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1817 ? string_type_class : array_type_class);
1818 case LANG_TYPE: return lang_type_class;
1819 default: return no_type_class;
1820 }
1821 }
1822
1823 /* Expand a call EXP to __builtin_classify_type. */
1824
1825 static rtx
1826 expand_builtin_classify_type (tree exp)
1827 {
1828 if (call_expr_nargs (exp))
1829 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1830 return GEN_INT (no_type_class);
1831 }
1832
1833 /* This helper macro, meant to be used in mathfn_built_in below, determines
1834 which among a set of builtin math functions is appropriate for a given type
1835 mode. The `F' (float) and `L' (long double) are automatically generated
1836 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1837 types, there are additional types that are considered with 'F32', 'F64',
1838 'F128', etc. suffixes. */
1839 #define CASE_MATHFN(MATHFN) \
1840 CASE_CFN_##MATHFN: \
1841 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1842 fcodel = BUILT_IN_##MATHFN##L ; break;
1843 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1844 types. */
1845 #define CASE_MATHFN_FLOATN(MATHFN) \
1846 CASE_CFN_##MATHFN: \
1847 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1848 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1849 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1850 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1851 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1852 break;
1853 /* Similar to above, but appends _R after any F/L suffix. */
1854 #define CASE_MATHFN_REENT(MATHFN) \
1855 case CFN_BUILT_IN_##MATHFN##_R: \
1856 case CFN_BUILT_IN_##MATHFN##F_R: \
1857 case CFN_BUILT_IN_##MATHFN##L_R: \
1858 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1859 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1860
1861 /* Return a function equivalent to FN but operating on floating-point
1862 values of type TYPE, or END_BUILTINS if no such function exists.
1863 This is purely an operation on function codes; it does not guarantee
1864 that the target actually has an implementation of the function. */
1865
1866 static built_in_function
1867 mathfn_built_in_2 (tree type, combined_fn fn)
1868 {
1869 tree mtype;
1870 built_in_function fcode, fcodef, fcodel;
1871 built_in_function fcodef16 = END_BUILTINS;
1872 built_in_function fcodef32 = END_BUILTINS;
1873 built_in_function fcodef64 = END_BUILTINS;
1874 built_in_function fcodef128 = END_BUILTINS;
1875 built_in_function fcodef32x = END_BUILTINS;
1876 built_in_function fcodef64x = END_BUILTINS;
1877 built_in_function fcodef128x = END_BUILTINS;
1878
1879 switch (fn)
1880 {
1881 CASE_MATHFN (ACOS)
1882 CASE_MATHFN (ACOSH)
1883 CASE_MATHFN (ASIN)
1884 CASE_MATHFN (ASINH)
1885 CASE_MATHFN (ATAN)
1886 CASE_MATHFN (ATAN2)
1887 CASE_MATHFN (ATANH)
1888 CASE_MATHFN (CBRT)
1889 CASE_MATHFN_FLOATN (CEIL)
1890 CASE_MATHFN (CEXPI)
1891 CASE_MATHFN_FLOATN (COPYSIGN)
1892 CASE_MATHFN (COS)
1893 CASE_MATHFN (COSH)
1894 CASE_MATHFN (DREM)
1895 CASE_MATHFN (ERF)
1896 CASE_MATHFN (ERFC)
1897 CASE_MATHFN (EXP)
1898 CASE_MATHFN (EXP10)
1899 CASE_MATHFN (EXP2)
1900 CASE_MATHFN (EXPM1)
1901 CASE_MATHFN (FABS)
1902 CASE_MATHFN (FDIM)
1903 CASE_MATHFN_FLOATN (FLOOR)
1904 CASE_MATHFN_FLOATN (FMA)
1905 CASE_MATHFN_FLOATN (FMAX)
1906 CASE_MATHFN_FLOATN (FMIN)
1907 CASE_MATHFN (FMOD)
1908 CASE_MATHFN (FREXP)
1909 CASE_MATHFN (GAMMA)
1910 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1911 CASE_MATHFN (HUGE_VAL)
1912 CASE_MATHFN (HYPOT)
1913 CASE_MATHFN (ILOGB)
1914 CASE_MATHFN (ICEIL)
1915 CASE_MATHFN (IFLOOR)
1916 CASE_MATHFN (INF)
1917 CASE_MATHFN (IRINT)
1918 CASE_MATHFN (IROUND)
1919 CASE_MATHFN (ISINF)
1920 CASE_MATHFN (J0)
1921 CASE_MATHFN (J1)
1922 CASE_MATHFN (JN)
1923 CASE_MATHFN (LCEIL)
1924 CASE_MATHFN (LDEXP)
1925 CASE_MATHFN (LFLOOR)
1926 CASE_MATHFN (LGAMMA)
1927 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1928 CASE_MATHFN (LLCEIL)
1929 CASE_MATHFN (LLFLOOR)
1930 CASE_MATHFN (LLRINT)
1931 CASE_MATHFN (LLROUND)
1932 CASE_MATHFN (LOG)
1933 CASE_MATHFN (LOG10)
1934 CASE_MATHFN (LOG1P)
1935 CASE_MATHFN (LOG2)
1936 CASE_MATHFN (LOGB)
1937 CASE_MATHFN (LRINT)
1938 CASE_MATHFN (LROUND)
1939 CASE_MATHFN (MODF)
1940 CASE_MATHFN (NAN)
1941 CASE_MATHFN (NANS)
1942 CASE_MATHFN_FLOATN (NEARBYINT)
1943 CASE_MATHFN (NEXTAFTER)
1944 CASE_MATHFN (NEXTTOWARD)
1945 CASE_MATHFN (POW)
1946 CASE_MATHFN (POWI)
1947 CASE_MATHFN (POW10)
1948 CASE_MATHFN (REMAINDER)
1949 CASE_MATHFN (REMQUO)
1950 CASE_MATHFN_FLOATN (RINT)
1951 CASE_MATHFN_FLOATN (ROUND)
1952 CASE_MATHFN (SCALB)
1953 CASE_MATHFN (SCALBLN)
1954 CASE_MATHFN (SCALBN)
1955 CASE_MATHFN (SIGNBIT)
1956 CASE_MATHFN (SIGNIFICAND)
1957 CASE_MATHFN (SIN)
1958 CASE_MATHFN (SINCOS)
1959 CASE_MATHFN (SINH)
1960 CASE_MATHFN_FLOATN (SQRT)
1961 CASE_MATHFN (TAN)
1962 CASE_MATHFN (TANH)
1963 CASE_MATHFN (TGAMMA)
1964 CASE_MATHFN_FLOATN (TRUNC)
1965 CASE_MATHFN (Y0)
1966 CASE_MATHFN (Y1)
1967 CASE_MATHFN (YN)
1968
1969 default:
1970 return END_BUILTINS;
1971 }
1972
1973 mtype = TYPE_MAIN_VARIANT (type);
1974 if (mtype == double_type_node)
1975 return fcode;
1976 else if (mtype == float_type_node)
1977 return fcodef;
1978 else if (mtype == long_double_type_node)
1979 return fcodel;
1980 else if (mtype == float16_type_node)
1981 return fcodef16;
1982 else if (mtype == float32_type_node)
1983 return fcodef32;
1984 else if (mtype == float64_type_node)
1985 return fcodef64;
1986 else if (mtype == float128_type_node)
1987 return fcodef128;
1988 else if (mtype == float32x_type_node)
1989 return fcodef32x;
1990 else if (mtype == float64x_type_node)
1991 return fcodef64x;
1992 else if (mtype == float128x_type_node)
1993 return fcodef128x;
1994 else
1995 return END_BUILTINS;
1996 }
1997
1998 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1999 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2000 otherwise use the explicit declaration. If we can't do the conversion,
2001 return null. */
2002
2003 static tree
2004 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2005 {
2006 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2007 if (fcode2 == END_BUILTINS)
2008 return NULL_TREE;
2009
2010 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2011 return NULL_TREE;
2012
2013 return builtin_decl_explicit (fcode2);
2014 }
2015
2016 /* Like mathfn_built_in_1, but always use the implicit array. */
2017
2018 tree
2019 mathfn_built_in (tree type, combined_fn fn)
2020 {
2021 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2022 }
2023
2024 /* Like mathfn_built_in_1, but take a built_in_function and
2025 always use the implicit array. */
2026
2027 tree
2028 mathfn_built_in (tree type, enum built_in_function fn)
2029 {
2030 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2031 }
2032
2033 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2034 return its code, otherwise return IFN_LAST. Note that this function
2035 only tests whether the function is defined in internals.def, not whether
2036 it is actually available on the target. */
2037
2038 internal_fn
2039 associated_internal_fn (tree fndecl)
2040 {
2041 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2042 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2043 switch (DECL_FUNCTION_CODE (fndecl))
2044 {
2045 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2046 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2047 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2048 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2049 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2050 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2051 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2052 #include "internal-fn.def"
2053
2054 CASE_FLT_FN (BUILT_IN_POW10):
2055 return IFN_EXP10;
2056
2057 CASE_FLT_FN (BUILT_IN_DREM):
2058 return IFN_REMAINDER;
2059
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2063 return IFN_LDEXP;
2064 return IFN_LAST;
2065
2066 default:
2067 return IFN_LAST;
2068 }
2069 }
2070
2071 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2072 on the current target by a call to an internal function, return the
2073 code of that internal function, otherwise return IFN_LAST. The caller
2074 is responsible for ensuring that any side-effects of the built-in
2075 call are dealt with correctly. E.g. if CALL sets errno, the caller
2076 must decide that the errno result isn't needed or make it available
2077 in some other way. */
2078
2079 internal_fn
2080 replacement_internal_fn (gcall *call)
2081 {
2082 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2083 {
2084 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2085 if (ifn != IFN_LAST)
2086 {
2087 tree_pair types = direct_internal_fn_types (ifn, call);
2088 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2089 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2090 return ifn;
2091 }
2092 }
2093 return IFN_LAST;
2094 }
2095
2096 /* Expand a call to the builtin trinary math functions (fma).
2097 Return NULL_RTX if a normal call should be emitted rather than expanding the
2098 function in-line. EXP is the expression that is a call to the builtin
2099 function; if convenient, the result should be placed in TARGET.
2100 SUBTARGET may be used as the target for computing one of EXP's
2101 operands. */
2102
2103 static rtx
2104 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2105 {
2106 optab builtin_optab;
2107 rtx op0, op1, op2, result;
2108 rtx_insn *insns;
2109 tree fndecl = get_callee_fndecl (exp);
2110 tree arg0, arg1, arg2;
2111 machine_mode mode;
2112
2113 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2114 return NULL_RTX;
2115
2116 arg0 = CALL_EXPR_ARG (exp, 0);
2117 arg1 = CALL_EXPR_ARG (exp, 1);
2118 arg2 = CALL_EXPR_ARG (exp, 2);
2119
2120 switch (DECL_FUNCTION_CODE (fndecl))
2121 {
2122 CASE_FLT_FN (BUILT_IN_FMA):
2123 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2124 builtin_optab = fma_optab; break;
2125 default:
2126 gcc_unreachable ();
2127 }
2128
2129 /* Make a suitable register to place result in. */
2130 mode = TYPE_MODE (TREE_TYPE (exp));
2131
2132 /* Before working hard, check whether the instruction is available. */
2133 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2134 return NULL_RTX;
2135
2136 result = gen_reg_rtx (mode);
2137
2138 /* Always stabilize the argument list. */
2139 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2140 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2141 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2142
2143 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2144 op1 = expand_normal (arg1);
2145 op2 = expand_normal (arg2);
2146
2147 start_sequence ();
2148
2149 /* Compute into RESULT.
2150 Set RESULT to wherever the result comes back. */
2151 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2152 result, 0);
2153
2154 /* If we were unable to expand via the builtin, stop the sequence
2155 (without outputting the insns) and call to the library function
2156 with the stabilized argument list. */
2157 if (result == 0)
2158 {
2159 end_sequence ();
2160 return expand_call (exp, target, target == const0_rtx);
2161 }
2162
2163 /* Output the entire sequence. */
2164 insns = get_insns ();
2165 end_sequence ();
2166 emit_insn (insns);
2167
2168 return result;
2169 }
2170
2171 /* Expand a call to the builtin sin and cos math functions.
2172 Return NULL_RTX if a normal call should be emitted rather than expanding the
2173 function in-line. EXP is the expression that is a call to the builtin
2174 function; if convenient, the result should be placed in TARGET.
2175 SUBTARGET may be used as the target for computing one of EXP's
2176 operands. */
2177
2178 static rtx
2179 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2180 {
2181 optab builtin_optab;
2182 rtx op0;
2183 rtx_insn *insns;
2184 tree fndecl = get_callee_fndecl (exp);
2185 machine_mode mode;
2186 tree arg;
2187
2188 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2189 return NULL_RTX;
2190
2191 arg = CALL_EXPR_ARG (exp, 0);
2192
2193 switch (DECL_FUNCTION_CODE (fndecl))
2194 {
2195 CASE_FLT_FN (BUILT_IN_SIN):
2196 CASE_FLT_FN (BUILT_IN_COS):
2197 builtin_optab = sincos_optab; break;
2198 default:
2199 gcc_unreachable ();
2200 }
2201
2202 /* Make a suitable register to place result in. */
2203 mode = TYPE_MODE (TREE_TYPE (exp));
2204
2205 /* Check if sincos insn is available, otherwise fallback
2206 to sin or cos insn. */
2207 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2208 switch (DECL_FUNCTION_CODE (fndecl))
2209 {
2210 CASE_FLT_FN (BUILT_IN_SIN):
2211 builtin_optab = sin_optab; break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 builtin_optab = cos_optab; break;
2214 default:
2215 gcc_unreachable ();
2216 }
2217
2218 /* Before working hard, check whether the instruction is available. */
2219 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2220 {
2221 rtx result = gen_reg_rtx (mode);
2222
2223 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2224 need to expand the argument again. This way, we will not perform
2225 side-effects more the once. */
2226 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2227
2228 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2229
2230 start_sequence ();
2231
2232 /* Compute into RESULT.
2233 Set RESULT to wherever the result comes back. */
2234 if (builtin_optab == sincos_optab)
2235 {
2236 int ok;
2237
2238 switch (DECL_FUNCTION_CODE (fndecl))
2239 {
2240 CASE_FLT_FN (BUILT_IN_SIN):
2241 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2242 break;
2243 CASE_FLT_FN (BUILT_IN_COS):
2244 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2245 break;
2246 default:
2247 gcc_unreachable ();
2248 }
2249 gcc_assert (ok);
2250 }
2251 else
2252 result = expand_unop (mode, builtin_optab, op0, result, 0);
2253
2254 if (result != 0)
2255 {
2256 /* Output the entire sequence. */
2257 insns = get_insns ();
2258 end_sequence ();
2259 emit_insn (insns);
2260 return result;
2261 }
2262
2263 /* If we were unable to expand via the builtin, stop the sequence
2264 (without outputting the insns) and call to the library function
2265 with the stabilized argument list. */
2266 end_sequence ();
2267 }
2268
2269 return expand_call (exp, target, target == const0_rtx);
2270 }
2271
2272 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2273 return an RTL instruction code that implements the functionality.
2274 If that isn't possible or available return CODE_FOR_nothing. */
2275
2276 static enum insn_code
2277 interclass_mathfn_icode (tree arg, tree fndecl)
2278 {
2279 bool errno_set = false;
2280 optab builtin_optab = unknown_optab;
2281 machine_mode mode;
2282
2283 switch (DECL_FUNCTION_CODE (fndecl))
2284 {
2285 CASE_FLT_FN (BUILT_IN_ILOGB):
2286 errno_set = true; builtin_optab = ilogb_optab; break;
2287 CASE_FLT_FN (BUILT_IN_ISINF):
2288 builtin_optab = isinf_optab; break;
2289 case BUILT_IN_ISNORMAL:
2290 case BUILT_IN_ISFINITE:
2291 CASE_FLT_FN (BUILT_IN_FINITE):
2292 case BUILT_IN_FINITED32:
2293 case BUILT_IN_FINITED64:
2294 case BUILT_IN_FINITED128:
2295 case BUILT_IN_ISINFD32:
2296 case BUILT_IN_ISINFD64:
2297 case BUILT_IN_ISINFD128:
2298 /* These builtins have no optabs (yet). */
2299 break;
2300 default:
2301 gcc_unreachable ();
2302 }
2303
2304 /* There's no easy way to detect the case we need to set EDOM. */
2305 if (flag_errno_math && errno_set)
2306 return CODE_FOR_nothing;
2307
2308 /* Optab mode depends on the mode of the input argument. */
2309 mode = TYPE_MODE (TREE_TYPE (arg));
2310
2311 if (builtin_optab)
2312 return optab_handler (builtin_optab, mode);
2313 return CODE_FOR_nothing;
2314 }
2315
2316 /* Expand a call to one of the builtin math functions that operate on
2317 floating point argument and output an integer result (ilogb, isinf,
2318 isnan, etc).
2319 Return 0 if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
2321 function; if convenient, the result should be placed in TARGET. */
2322
2323 static rtx
2324 expand_builtin_interclass_mathfn (tree exp, rtx target)
2325 {
2326 enum insn_code icode = CODE_FOR_nothing;
2327 rtx op0;
2328 tree fndecl = get_callee_fndecl (exp);
2329 machine_mode mode;
2330 tree arg;
2331
2332 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2333 return NULL_RTX;
2334
2335 arg = CALL_EXPR_ARG (exp, 0);
2336 icode = interclass_mathfn_icode (arg, fndecl);
2337 mode = TYPE_MODE (TREE_TYPE (arg));
2338
2339 if (icode != CODE_FOR_nothing)
2340 {
2341 struct expand_operand ops[1];
2342 rtx_insn *last = get_last_insn ();
2343 tree orig_arg = arg;
2344
2345 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2346 need to expand the argument again. This way, we will not perform
2347 side-effects more the once. */
2348 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2349
2350 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2351
2352 if (mode != GET_MODE (op0))
2353 op0 = convert_to_mode (mode, op0, 0);
2354
2355 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2356 if (maybe_legitimize_operands (icode, 0, 1, ops)
2357 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2358 return ops[0].value;
2359
2360 delete_insns_since (last);
2361 CALL_EXPR_ARG (exp, 0) = orig_arg;
2362 }
2363
2364 return NULL_RTX;
2365 }
2366
2367 /* Expand a call to the builtin sincos math function.
2368 Return NULL_RTX if a normal call should be emitted rather than expanding the
2369 function in-line. EXP is the expression that is a call to the builtin
2370 function. */
2371
2372 static rtx
2373 expand_builtin_sincos (tree exp)
2374 {
2375 rtx op0, op1, op2, target1, target2;
2376 machine_mode mode;
2377 tree arg, sinp, cosp;
2378 int result;
2379 location_t loc = EXPR_LOCATION (exp);
2380 tree alias_type, alias_off;
2381
2382 if (!validate_arglist (exp, REAL_TYPE,
2383 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2384 return NULL_RTX;
2385
2386 arg = CALL_EXPR_ARG (exp, 0);
2387 sinp = CALL_EXPR_ARG (exp, 1);
2388 cosp = CALL_EXPR_ARG (exp, 2);
2389
2390 /* Make a suitable register to place result in. */
2391 mode = TYPE_MODE (TREE_TYPE (arg));
2392
2393 /* Check if sincos insn is available, otherwise emit the call. */
2394 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2395 return NULL_RTX;
2396
2397 target1 = gen_reg_rtx (mode);
2398 target2 = gen_reg_rtx (mode);
2399
2400 op0 = expand_normal (arg);
2401 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2402 alias_off = build_int_cst (alias_type, 0);
2403 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2404 sinp, alias_off));
2405 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2406 cosp, alias_off));
2407
2408 /* Compute into target1 and target2.
2409 Set TARGET to wherever the result comes back. */
2410 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2411 gcc_assert (result);
2412
2413 /* Move target1 and target2 to the memory locations indicated
2414 by op1 and op2. */
2415 emit_move_insn (op1, target1);
2416 emit_move_insn (op2, target2);
2417
2418 return const0_rtx;
2419 }
2420
2421 /* Expand a call to the internal cexpi builtin to the sincos math function.
2422 EXP is the expression that is a call to the builtin function; if convenient,
2423 the result should be placed in TARGET. */
2424
2425 static rtx
2426 expand_builtin_cexpi (tree exp, rtx target)
2427 {
2428 tree fndecl = get_callee_fndecl (exp);
2429 tree arg, type;
2430 machine_mode mode;
2431 rtx op0, op1, op2;
2432 location_t loc = EXPR_LOCATION (exp);
2433
2434 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2435 return NULL_RTX;
2436
2437 arg = CALL_EXPR_ARG (exp, 0);
2438 type = TREE_TYPE (arg);
2439 mode = TYPE_MODE (TREE_TYPE (arg));
2440
2441 /* Try expanding via a sincos optab, fall back to emitting a libcall
2442 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2443 is only generated from sincos, cexp or if we have either of them. */
2444 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2445 {
2446 op1 = gen_reg_rtx (mode);
2447 op2 = gen_reg_rtx (mode);
2448
2449 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2450
2451 /* Compute into op1 and op2. */
2452 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2453 }
2454 else if (targetm.libc_has_function (function_sincos))
2455 {
2456 tree call, fn = NULL_TREE;
2457 tree top1, top2;
2458 rtx op1a, op2a;
2459
2460 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2461 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2462 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2463 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2465 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2466 else
2467 gcc_unreachable ();
2468
2469 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2470 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2471 op1a = copy_addr_to_reg (XEXP (op1, 0));
2472 op2a = copy_addr_to_reg (XEXP (op2, 0));
2473 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2474 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2475
2476 /* Make sure not to fold the sincos call again. */
2477 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2478 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2479 call, 3, arg, top1, top2));
2480 }
2481 else
2482 {
2483 tree call, fn = NULL_TREE, narg;
2484 tree ctype = build_complex_type (type);
2485
2486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2487 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2489 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2490 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2491 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2492 else
2493 gcc_unreachable ();
2494
2495 /* If we don't have a decl for cexp create one. This is the
2496 friendliest fallback if the user calls __builtin_cexpi
2497 without full target C99 function support. */
2498 if (fn == NULL_TREE)
2499 {
2500 tree fntype;
2501 const char *name = NULL;
2502
2503 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2504 name = "cexpf";
2505 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2506 name = "cexp";
2507 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2508 name = "cexpl";
2509
2510 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2511 fn = build_fn_decl (name, fntype);
2512 }
2513
2514 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2515 build_real (type, dconst0), arg);
2516
2517 /* Make sure not to fold the cexp call again. */
2518 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2519 return expand_expr (build_call_nary (ctype, call, 1, narg),
2520 target, VOIDmode, EXPAND_NORMAL);
2521 }
2522
2523 /* Now build the proper return type. */
2524 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2525 make_tree (TREE_TYPE (arg), op2),
2526 make_tree (TREE_TYPE (arg), op1)),
2527 target, VOIDmode, EXPAND_NORMAL);
2528 }
2529
2530 /* Conveniently construct a function call expression. FNDECL names the
2531 function to be called, N is the number of arguments, and the "..."
2532 parameters are the argument expressions. Unlike build_call_exr
2533 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2534
2535 static tree
2536 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2537 {
2538 va_list ap;
2539 tree fntype = TREE_TYPE (fndecl);
2540 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2541
2542 va_start (ap, n);
2543 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2544 va_end (ap);
2545 SET_EXPR_LOCATION (fn, loc);
2546 return fn;
2547 }
2548
2549 /* Expand a call to one of the builtin rounding functions gcc defines
2550 as an extension (lfloor and lceil). As these are gcc extensions we
2551 do not need to worry about setting errno to EDOM.
2552 If expanding via optab fails, lower expression to (int)(floor(x)).
2553 EXP is the expression that is a call to the builtin function;
2554 if convenient, the result should be placed in TARGET. */
2555
2556 static rtx
2557 expand_builtin_int_roundingfn (tree exp, rtx target)
2558 {
2559 convert_optab builtin_optab;
2560 rtx op0, tmp;
2561 rtx_insn *insns;
2562 tree fndecl = get_callee_fndecl (exp);
2563 enum built_in_function fallback_fn;
2564 tree fallback_fndecl;
2565 machine_mode mode;
2566 tree arg;
2567
2568 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2569 gcc_unreachable ();
2570
2571 arg = CALL_EXPR_ARG (exp, 0);
2572
2573 switch (DECL_FUNCTION_CODE (fndecl))
2574 {
2575 CASE_FLT_FN (BUILT_IN_ICEIL):
2576 CASE_FLT_FN (BUILT_IN_LCEIL):
2577 CASE_FLT_FN (BUILT_IN_LLCEIL):
2578 builtin_optab = lceil_optab;
2579 fallback_fn = BUILT_IN_CEIL;
2580 break;
2581
2582 CASE_FLT_FN (BUILT_IN_IFLOOR):
2583 CASE_FLT_FN (BUILT_IN_LFLOOR):
2584 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2585 builtin_optab = lfloor_optab;
2586 fallback_fn = BUILT_IN_FLOOR;
2587 break;
2588
2589 default:
2590 gcc_unreachable ();
2591 }
2592
2593 /* Make a suitable register to place result in. */
2594 mode = TYPE_MODE (TREE_TYPE (exp));
2595
2596 target = gen_reg_rtx (mode);
2597
2598 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2599 need to expand the argument again. This way, we will not perform
2600 side-effects more the once. */
2601 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2602
2603 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2604
2605 start_sequence ();
2606
2607 /* Compute into TARGET. */
2608 if (expand_sfix_optab (target, op0, builtin_optab))
2609 {
2610 /* Output the entire sequence. */
2611 insns = get_insns ();
2612 end_sequence ();
2613 emit_insn (insns);
2614 return target;
2615 }
2616
2617 /* If we were unable to expand via the builtin, stop the sequence
2618 (without outputting the insns). */
2619 end_sequence ();
2620
2621 /* Fall back to floating point rounding optab. */
2622 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2623
2624 /* For non-C99 targets we may end up without a fallback fndecl here
2625 if the user called __builtin_lfloor directly. In this case emit
2626 a call to the floor/ceil variants nevertheless. This should result
2627 in the best user experience for not full C99 targets. */
2628 if (fallback_fndecl == NULL_TREE)
2629 {
2630 tree fntype;
2631 const char *name = NULL;
2632
2633 switch (DECL_FUNCTION_CODE (fndecl))
2634 {
2635 case BUILT_IN_ICEIL:
2636 case BUILT_IN_LCEIL:
2637 case BUILT_IN_LLCEIL:
2638 name = "ceil";
2639 break;
2640 case BUILT_IN_ICEILF:
2641 case BUILT_IN_LCEILF:
2642 case BUILT_IN_LLCEILF:
2643 name = "ceilf";
2644 break;
2645 case BUILT_IN_ICEILL:
2646 case BUILT_IN_LCEILL:
2647 case BUILT_IN_LLCEILL:
2648 name = "ceill";
2649 break;
2650 case BUILT_IN_IFLOOR:
2651 case BUILT_IN_LFLOOR:
2652 case BUILT_IN_LLFLOOR:
2653 name = "floor";
2654 break;
2655 case BUILT_IN_IFLOORF:
2656 case BUILT_IN_LFLOORF:
2657 case BUILT_IN_LLFLOORF:
2658 name = "floorf";
2659 break;
2660 case BUILT_IN_IFLOORL:
2661 case BUILT_IN_LFLOORL:
2662 case BUILT_IN_LLFLOORL:
2663 name = "floorl";
2664 break;
2665 default:
2666 gcc_unreachable ();
2667 }
2668
2669 fntype = build_function_type_list (TREE_TYPE (arg),
2670 TREE_TYPE (arg), NULL_TREE);
2671 fallback_fndecl = build_fn_decl (name, fntype);
2672 }
2673
2674 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2675
2676 tmp = expand_normal (exp);
2677 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2678
2679 /* Truncate the result of floating point optab to integer
2680 via expand_fix (). */
2681 target = gen_reg_rtx (mode);
2682 expand_fix (target, tmp, 0);
2683
2684 return target;
2685 }
2686
2687 /* Expand a call to one of the builtin math functions doing integer
2688 conversion (lrint).
2689 Return 0 if a normal call should be emitted rather than expanding the
2690 function in-line. EXP is the expression that is a call to the builtin
2691 function; if convenient, the result should be placed in TARGET. */
2692
2693 static rtx
2694 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2695 {
2696 convert_optab builtin_optab;
2697 rtx op0;
2698 rtx_insn *insns;
2699 tree fndecl = get_callee_fndecl (exp);
2700 tree arg;
2701 machine_mode mode;
2702 enum built_in_function fallback_fn = BUILT_IN_NONE;
2703
2704 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2705 gcc_unreachable ();
2706
2707 arg = CALL_EXPR_ARG (exp, 0);
2708
2709 switch (DECL_FUNCTION_CODE (fndecl))
2710 {
2711 CASE_FLT_FN (BUILT_IN_IRINT):
2712 fallback_fn = BUILT_IN_LRINT;
2713 gcc_fallthrough ();
2714 CASE_FLT_FN (BUILT_IN_LRINT):
2715 CASE_FLT_FN (BUILT_IN_LLRINT):
2716 builtin_optab = lrint_optab;
2717 break;
2718
2719 CASE_FLT_FN (BUILT_IN_IROUND):
2720 fallback_fn = BUILT_IN_LROUND;
2721 gcc_fallthrough ();
2722 CASE_FLT_FN (BUILT_IN_LROUND):
2723 CASE_FLT_FN (BUILT_IN_LLROUND):
2724 builtin_optab = lround_optab;
2725 break;
2726
2727 default:
2728 gcc_unreachable ();
2729 }
2730
2731 /* There's no easy way to detect the case we need to set EDOM. */
2732 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2733 return NULL_RTX;
2734
2735 /* Make a suitable register to place result in. */
2736 mode = TYPE_MODE (TREE_TYPE (exp));
2737
2738 /* There's no easy way to detect the case we need to set EDOM. */
2739 if (!flag_errno_math)
2740 {
2741 rtx result = gen_reg_rtx (mode);
2742
2743 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2744 need to expand the argument again. This way, we will not perform
2745 side-effects more the once. */
2746 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2747
2748 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2749
2750 start_sequence ();
2751
2752 if (expand_sfix_optab (result, op0, builtin_optab))
2753 {
2754 /* Output the entire sequence. */
2755 insns = get_insns ();
2756 end_sequence ();
2757 emit_insn (insns);
2758 return result;
2759 }
2760
2761 /* If we were unable to expand via the builtin, stop the sequence
2762 (without outputting the insns) and call to the library function
2763 with the stabilized argument list. */
2764 end_sequence ();
2765 }
2766
2767 if (fallback_fn != BUILT_IN_NONE)
2768 {
2769 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2770 targets, (int) round (x) should never be transformed into
2771 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2772 a call to lround in the hope that the target provides at least some
2773 C99 functions. This should result in the best user experience for
2774 not full C99 targets. */
2775 tree fallback_fndecl = mathfn_built_in_1
2776 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2777
2778 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2779 fallback_fndecl, 1, arg);
2780
2781 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2782 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2783 return convert_to_mode (mode, target, 0);
2784 }
2785
2786 return expand_call (exp, target, target == const0_rtx);
2787 }
2788
2789 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2790 a normal call should be emitted rather than expanding the function
2791 in-line. EXP is the expression that is a call to the builtin
2792 function; if convenient, the result should be placed in TARGET. */
2793
2794 static rtx
2795 expand_builtin_powi (tree exp, rtx target)
2796 {
2797 tree arg0, arg1;
2798 rtx op0, op1;
2799 machine_mode mode;
2800 machine_mode mode2;
2801
2802 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2803 return NULL_RTX;
2804
2805 arg0 = CALL_EXPR_ARG (exp, 0);
2806 arg1 = CALL_EXPR_ARG (exp, 1);
2807 mode = TYPE_MODE (TREE_TYPE (exp));
2808
2809 /* Emit a libcall to libgcc. */
2810
2811 /* Mode of the 2nd argument must match that of an int. */
2812 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2813
2814 if (target == NULL_RTX)
2815 target = gen_reg_rtx (mode);
2816
2817 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2818 if (GET_MODE (op0) != mode)
2819 op0 = convert_to_mode (mode, op0, 0);
2820 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2821 if (GET_MODE (op1) != mode2)
2822 op1 = convert_to_mode (mode2, op1, 0);
2823
2824 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2825 target, LCT_CONST, mode,
2826 op0, mode, op1, mode2);
2827
2828 return target;
2829 }
2830
2831 /* Expand expression EXP which is a call to the strlen builtin. Return
2832 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2833 try to get the result in TARGET, if convenient. */
2834
2835 static rtx
2836 expand_builtin_strlen (tree exp, rtx target,
2837 machine_mode target_mode)
2838 {
2839 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2840 return NULL_RTX;
2841
2842 struct expand_operand ops[4];
2843 rtx pat;
2844 tree len;
2845 tree src = CALL_EXPR_ARG (exp, 0);
2846 rtx src_reg;
2847 rtx_insn *before_strlen;
2848 machine_mode insn_mode;
2849 enum insn_code icode = CODE_FOR_nothing;
2850 unsigned int align;
2851
2852 /* If the length can be computed at compile-time, return it. */
2853 len = c_strlen (src, 0);
2854 if (len)
2855 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2856
2857 /* If the length can be computed at compile-time and is constant
2858 integer, but there are side-effects in src, evaluate
2859 src for side-effects, then return len.
2860 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2861 can be optimized into: i++; x = 3; */
2862 len = c_strlen (src, 1);
2863 if (len && TREE_CODE (len) == INTEGER_CST)
2864 {
2865 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2866 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2867 }
2868
2869 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2870
2871 /* If SRC is not a pointer type, don't do this operation inline. */
2872 if (align == 0)
2873 return NULL_RTX;
2874
2875 /* Bail out if we can't compute strlen in the right mode. */
2876 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2877 {
2878 icode = optab_handler (strlen_optab, insn_mode);
2879 if (icode != CODE_FOR_nothing)
2880 break;
2881 }
2882 if (insn_mode == VOIDmode)
2883 return NULL_RTX;
2884
2885 /* Make a place to hold the source address. We will not expand
2886 the actual source until we are sure that the expansion will
2887 not fail -- there are trees that cannot be expanded twice. */
2888 src_reg = gen_reg_rtx (Pmode);
2889
2890 /* Mark the beginning of the strlen sequence so we can emit the
2891 source operand later. */
2892 before_strlen = get_last_insn ();
2893
2894 create_output_operand (&ops[0], target, insn_mode);
2895 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2896 create_integer_operand (&ops[2], 0);
2897 create_integer_operand (&ops[3], align);
2898 if (!maybe_expand_insn (icode, 4, ops))
2899 return NULL_RTX;
2900
2901 /* Check to see if the argument was declared attribute nonstring
2902 and if so, issue a warning since at this point it's not known
2903 to be nul-terminated. */
2904 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2905
2906 /* Now that we are assured of success, expand the source. */
2907 start_sequence ();
2908 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2909 if (pat != src_reg)
2910 {
2911 #ifdef POINTERS_EXTEND_UNSIGNED
2912 if (GET_MODE (pat) != Pmode)
2913 pat = convert_to_mode (Pmode, pat,
2914 POINTERS_EXTEND_UNSIGNED);
2915 #endif
2916 emit_move_insn (src_reg, pat);
2917 }
2918 pat = get_insns ();
2919 end_sequence ();
2920
2921 if (before_strlen)
2922 emit_insn_after (pat, before_strlen);
2923 else
2924 emit_insn_before (pat, get_insns ());
2925
2926 /* Return the value in the proper mode for this function. */
2927 if (GET_MODE (ops[0].value) == target_mode)
2928 target = ops[0].value;
2929 else if (target != 0)
2930 convert_move (target, ops[0].value, 0);
2931 else
2932 target = convert_to_mode (target_mode, ops[0].value, 0);
2933
2934 return target;
2935 }
2936
2937 /* Expand call EXP to the strnlen built-in, returning the result
2938 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2939
2940 static rtx
2941 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2942 {
2943 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2944 return NULL_RTX;
2945
2946 tree src = CALL_EXPR_ARG (exp, 0);
2947 tree bound = CALL_EXPR_ARG (exp, 1);
2948
2949 if (!bound)
2950 return NULL_RTX;
2951
2952 location_t loc = UNKNOWN_LOCATION;
2953 if (EXPR_HAS_LOCATION (exp))
2954 loc = EXPR_LOCATION (exp);
2955
2956 tree maxobjsize = max_object_size ();
2957 tree func = get_callee_fndecl (exp);
2958
2959 tree len = c_strlen (src, 0);
2960
2961 if (TREE_CODE (bound) == INTEGER_CST)
2962 {
2963 if (!TREE_NO_WARNING (exp)
2964 && tree_int_cst_lt (maxobjsize, bound)
2965 && warning_at (loc, OPT_Wstringop_overflow_,
2966 "%K%qD specified bound %E "
2967 "exceeds maximum object size %E",
2968 exp, func, bound, maxobjsize))
2969 TREE_NO_WARNING (exp) = true;
2970
2971 if (!len || TREE_CODE (len) != INTEGER_CST)
2972 return NULL_RTX;
2973
2974 len = fold_convert_loc (loc, size_type_node, len);
2975 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2976 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2977 }
2978
2979 if (TREE_CODE (bound) != SSA_NAME)
2980 return NULL_RTX;
2981
2982 wide_int min, max;
2983 enum value_range_type rng = get_range_info (bound, &min, &max);
2984 if (rng != VR_RANGE)
2985 return NULL_RTX;
2986
2987 if (!TREE_NO_WARNING (exp)
2988 && wi::ltu_p (wi::to_wide (maxobjsize), min)
2989 && warning_at (loc, OPT_Wstringop_overflow_,
2990 "%K%qD specified bound [%wu, %wu] "
2991 "exceeds maximum object size %E",
2992 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
2993 TREE_NO_WARNING (exp) = true;
2994
2995 if (!len || TREE_CODE (len) != INTEGER_CST)
2996 return NULL_RTX;
2997
2998 if (wi::gtu_p (min, wi::to_wide (len)))
2999 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3000
3001 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3002 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3003 }
3004
3005 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3006 bytes from constant string DATA + OFFSET and return it as target
3007 constant. */
3008
3009 static rtx
3010 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3011 scalar_int_mode mode)
3012 {
3013 const char *str = (const char *) data;
3014
3015 gcc_assert (offset >= 0
3016 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3017 <= strlen (str) + 1));
3018
3019 return c_readstr (str + offset, mode);
3020 }
3021
3022 /* LEN specify length of the block of memcpy/memset operation.
3023 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3024 In some cases we can make very likely guess on max size, then we
3025 set it into PROBABLE_MAX_SIZE. */
3026
3027 static void
3028 determine_block_size (tree len, rtx len_rtx,
3029 unsigned HOST_WIDE_INT *min_size,
3030 unsigned HOST_WIDE_INT *max_size,
3031 unsigned HOST_WIDE_INT *probable_max_size)
3032 {
3033 if (CONST_INT_P (len_rtx))
3034 {
3035 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3036 return;
3037 }
3038 else
3039 {
3040 wide_int min, max;
3041 enum value_range_type range_type = VR_UNDEFINED;
3042
3043 /* Determine bounds from the type. */
3044 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3045 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3046 else
3047 *min_size = 0;
3048 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3049 *probable_max_size = *max_size
3050 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3051 else
3052 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3053
3054 if (TREE_CODE (len) == SSA_NAME)
3055 range_type = get_range_info (len, &min, &max);
3056 if (range_type == VR_RANGE)
3057 {
3058 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3059 *min_size = min.to_uhwi ();
3060 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3061 *probable_max_size = *max_size = max.to_uhwi ();
3062 }
3063 else if (range_type == VR_ANTI_RANGE)
3064 {
3065 /* Anti range 0...N lets us to determine minimal size to N+1. */
3066 if (min == 0)
3067 {
3068 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3069 *min_size = max.to_uhwi () + 1;
3070 }
3071 /* Code like
3072
3073 int n;
3074 if (n < 100)
3075 memcpy (a, b, n)
3076
3077 Produce anti range allowing negative values of N. We still
3078 can use the information and make a guess that N is not negative.
3079 */
3080 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3081 *probable_max_size = min.to_uhwi () - 1;
3082 }
3083 }
3084 gcc_checking_assert (*max_size <=
3085 (unsigned HOST_WIDE_INT)
3086 GET_MODE_MASK (GET_MODE (len_rtx)));
3087 }
3088
3089 /* Try to verify that the sizes and lengths of the arguments to a string
3090 manipulation function given by EXP are within valid bounds and that
3091 the operation does not lead to buffer overflow or read past the end.
3092 Arguments other than EXP may be null. When non-null, the arguments
3093 have the following meaning:
3094 DST is the destination of a copy call or NULL otherwise.
3095 SRC is the source of a copy call or NULL otherwise.
3096 DSTWRITE is the number of bytes written into the destination obtained
3097 from the user-supplied size argument to the function (such as in
3098 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3099 MAXREAD is the user-supplied bound on the length of the source sequence
3100 (such as in strncat(d, s, N). It specifies the upper limit on the number
3101 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3102 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3103 expression EXP is a string function call (as opposed to a memory call
3104 like memcpy). As an exception, SRCSTR can also be an integer denoting
3105 the precomputed size of the source string or object (for functions like
3106 memcpy).
3107 DSTSIZE is the size of the destination object specified by the last
3108 argument to the _chk builtins, typically resulting from the expansion
3109 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3110 DSTSIZE).
3111
3112 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3113 SIZE_MAX.
3114
3115 If the call is successfully verified as safe return true, otherwise
3116 return false. */
3117
3118 static bool
3119 check_access (tree exp, tree, tree, tree dstwrite,
3120 tree maxread, tree srcstr, tree dstsize)
3121 {
3122 int opt = OPT_Wstringop_overflow_;
3123
3124 /* The size of the largest object is half the address space, or
3125 PTRDIFF_MAX. (This is way too permissive.) */
3126 tree maxobjsize = max_object_size ();
3127
3128 /* Either the length of the source string for string functions or
3129 the size of the source object for raw memory functions. */
3130 tree slen = NULL_TREE;
3131
3132 tree range[2] = { NULL_TREE, NULL_TREE };
3133
3134 /* Set to true when the exact number of bytes written by a string
3135 function like strcpy is not known and the only thing that is
3136 known is that it must be at least one (for the terminating nul). */
3137 bool at_least_one = false;
3138 if (srcstr)
3139 {
3140 /* SRCSTR is normally a pointer to string but as a special case
3141 it can be an integer denoting the length of a string. */
3142 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3143 {
3144 /* Try to determine the range of lengths the source string
3145 refers to. If it can be determined and is less than
3146 the upper bound given by MAXREAD add one to it for
3147 the terminating nul. Otherwise, set it to one for
3148 the same reason, or to MAXREAD as appropriate. */
3149 get_range_strlen (srcstr, range);
3150 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3151 {
3152 if (maxread && tree_int_cst_le (maxread, range[0]))
3153 range[0] = range[1] = maxread;
3154 else
3155 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3156 range[0], size_one_node);
3157
3158 if (maxread && tree_int_cst_le (maxread, range[1]))
3159 range[1] = maxread;
3160 else if (!integer_all_onesp (range[1]))
3161 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3162 range[1], size_one_node);
3163
3164 slen = range[0];
3165 }
3166 else
3167 {
3168 at_least_one = true;
3169 slen = size_one_node;
3170 }
3171 }
3172 else
3173 slen = srcstr;
3174 }
3175
3176 if (!dstwrite && !maxread)
3177 {
3178 /* When the only available piece of data is the object size
3179 there is nothing to do. */
3180 if (!slen)
3181 return true;
3182
3183 /* Otherwise, when the length of the source sequence is known
3184 (as with strlen), set DSTWRITE to it. */
3185 if (!range[0])
3186 dstwrite = slen;
3187 }
3188
3189 if (!dstsize)
3190 dstsize = maxobjsize;
3191
3192 if (dstwrite)
3193 get_size_range (dstwrite, range);
3194
3195 /* This can happen at -O0. */
3196 if (range[0] && TREE_CODE (range[0]) != INTEGER_CST)
3197 return false;
3198
3199 tree func = get_callee_fndecl (exp);
3200
3201 /* First check the number of bytes to be written against the maximum
3202 object size. */
3203 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3204 {
3205 if (TREE_NO_WARNING (exp))
3206 return false;
3207
3208 location_t loc = tree_nonartificial_location (exp);
3209 loc = expansion_point_location_if_in_system_header (loc);
3210
3211 bool warned;
3212 if (range[0] == range[1])
3213 warned = warning_at (loc, opt,
3214 "%K%qD specified size %E "
3215 "exceeds maximum object size %E",
3216 exp, func, range[0], maxobjsize);
3217 else
3218 warned = warning_at (loc, opt,
3219 "%K%qD specified size between %E and %E "
3220 "exceeds maximum object size %E",
3221 exp, func,
3222 range[0], range[1], maxobjsize);
3223 if (warned)
3224 TREE_NO_WARNING (exp) = true;
3225
3226 return false;
3227 }
3228
3229 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3230 constant, and in range of unsigned HOST_WIDE_INT. */
3231 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3232
3233 /* Next check the number of bytes to be written against the destination
3234 object size. */
3235 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3236 {
3237 if (range[0]
3238 && ((tree_fits_uhwi_p (dstsize)
3239 && tree_int_cst_lt (dstsize, range[0]))
3240 || (tree_fits_uhwi_p (dstwrite)
3241 && tree_int_cst_lt (dstwrite, range[0]))))
3242 {
3243 if (TREE_NO_WARNING (exp))
3244 return false;
3245
3246 location_t loc = tree_nonartificial_location (exp);
3247 loc = expansion_point_location_if_in_system_header (loc);
3248
3249 if (dstwrite == slen && at_least_one)
3250 {
3251 /* This is a call to strcpy with a destination of 0 size
3252 and a source of unknown length. The call will write
3253 at least one byte past the end of the destination. */
3254 warning_at (loc, opt,
3255 "%K%qD writing %E or more bytes into a region "
3256 "of size %E overflows the destination",
3257 exp, func, range[0], dstsize);
3258 }
3259 else if (tree_int_cst_equal (range[0], range[1]))
3260 warning_n (loc, opt, tree_to_uhwi (range[0]),
3261 "%K%qD writing %E byte into a region "
3262 "of size %E overflows the destination",
3263 "%K%qD writing %E bytes into a region "
3264 "of size %E overflows the destination",
3265 exp, func, range[0], dstsize);
3266 else if (tree_int_cst_sign_bit (range[1]))
3267 {
3268 /* Avoid printing the upper bound if it's invalid. */
3269 warning_at (loc, opt,
3270 "%K%qD writing %E or more bytes into a region "
3271 "of size %E overflows the destination",
3272 exp, func, range[0], dstsize);
3273 }
3274 else
3275 warning_at (loc, opt,
3276 "%K%qD writing between %E and %E bytes into "
3277 "a region of size %E overflows the destination",
3278 exp, func, range[0], range[1],
3279 dstsize);
3280
3281 /* Return error when an overflow has been detected. */
3282 return false;
3283 }
3284 }
3285
3286 /* Check the maximum length of the source sequence against the size
3287 of the destination object if known, or against the maximum size
3288 of an object. */
3289 if (maxread)
3290 {
3291 get_size_range (maxread, range);
3292
3293 /* Use the lower end for MAXREAD from now on. */
3294 if (range[0])
3295 maxread = range[0];
3296
3297 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3298 {
3299 location_t loc = tree_nonartificial_location (exp);
3300 loc = expansion_point_location_if_in_system_header (loc);
3301
3302 if (tree_int_cst_lt (maxobjsize, range[0]))
3303 {
3304 if (TREE_NO_WARNING (exp))
3305 return false;
3306
3307 /* Warn about crazy big sizes first since that's more
3308 likely to be meaningful than saying that the bound
3309 is greater than the object size if both are big. */
3310 if (range[0] == range[1])
3311 warning_at (loc, opt,
3312 "%K%qD specified bound %E "
3313 "exceeds maximum object size %E",
3314 exp, func,
3315 range[0], maxobjsize);
3316 else
3317 warning_at (loc, opt,
3318 "%K%qD specified bound between %E and %E "
3319 "exceeds maximum object size %E",
3320 exp, func,
3321 range[0], range[1], maxobjsize);
3322
3323 return false;
3324 }
3325
3326 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3327 {
3328 if (TREE_NO_WARNING (exp))
3329 return false;
3330
3331 if (tree_int_cst_equal (range[0], range[1]))
3332 warning_at (loc, opt,
3333 "%K%qD specified bound %E "
3334 "exceeds destination size %E",
3335 exp, func,
3336 range[0], dstsize);
3337 else
3338 warning_at (loc, opt,
3339 "%K%qD specified bound between %E and %E "
3340 "exceeds destination size %E",
3341 exp, func,
3342 range[0], range[1], dstsize);
3343 return false;
3344 }
3345 }
3346 }
3347
3348 /* Check for reading past the end of SRC. */
3349 if (slen
3350 && slen == srcstr
3351 && dstwrite && range[0]
3352 && tree_int_cst_lt (slen, range[0]))
3353 {
3354 if (TREE_NO_WARNING (exp))
3355 return false;
3356
3357 location_t loc = tree_nonartificial_location (exp);
3358
3359 if (tree_int_cst_equal (range[0], range[1]))
3360 warning_n (loc, opt, tree_to_uhwi (range[0]),
3361 "%K%qD reading %E byte from a region of size %E",
3362 "%K%qD reading %E bytes from a region of size %E",
3363 exp, func, range[0], slen);
3364 else if (tree_int_cst_sign_bit (range[1]))
3365 {
3366 /* Avoid printing the upper bound if it's invalid. */
3367 warning_at (loc, opt,
3368 "%K%qD reading %E or more bytes from a region "
3369 "of size %E",
3370 exp, func, range[0], slen);
3371 }
3372 else
3373 warning_at (loc, opt,
3374 "%K%qD reading between %E and %E bytes from a region "
3375 "of size %E",
3376 exp, func, range[0], range[1], slen);
3377 return false;
3378 }
3379
3380 return true;
3381 }
3382
3383 /* Helper to compute the size of the object referenced by the DEST
3384 expression which must have pointer type, using Object Size type
3385 OSTYPE (only the least significant 2 bits are used). Return
3386 an estimate of the size of the object if successful or NULL when
3387 the size cannot be determined. When the referenced object involves
3388 a non-constant offset in some range the returned value represents
3389 the largest size given the smallest non-negative offset in the
3390 range. The function is intended for diagnostics and should not
3391 be used to influence code generation or optimization. */
3392
3393 tree
3394 compute_objsize (tree dest, int ostype)
3395 {
3396 unsigned HOST_WIDE_INT size;
3397
3398 /* Only the two least significant bits are meaningful. */
3399 ostype &= 3;
3400
3401 if (compute_builtin_object_size (dest, ostype, &size))
3402 return build_int_cst (sizetype, size);
3403
3404 if (TREE_CODE (dest) == SSA_NAME)
3405 {
3406 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3407 if (!is_gimple_assign (stmt))
3408 return NULL_TREE;
3409
3410 dest = gimple_assign_rhs1 (stmt);
3411
3412 tree_code code = gimple_assign_rhs_code (stmt);
3413 if (code == POINTER_PLUS_EXPR)
3414 {
3415 /* compute_builtin_object_size fails for addresses with
3416 non-constant offsets. Try to determine the range of
3417 such an offset here and use it to adjust the constant
3418 size. */
3419 tree off = gimple_assign_rhs2 (stmt);
3420 if (TREE_CODE (off) == INTEGER_CST)
3421 {
3422 if (tree size = compute_objsize (dest, ostype))
3423 {
3424 wide_int wioff = wi::to_wide (off);
3425 wide_int wisiz = wi::to_wide (size);
3426
3427 /* Ignore negative offsets for now. For others,
3428 use the lower bound as the most optimistic
3429 estimate of the (remaining) size. */
3430 if (wi::sign_mask (wioff))
3431 ;
3432 else if (wi::ltu_p (wioff, wisiz))
3433 return wide_int_to_tree (TREE_TYPE (size),
3434 wi::sub (wisiz, wioff));
3435 else
3436 return size_zero_node;
3437 }
3438 }
3439 else if (TREE_CODE (off) == SSA_NAME
3440 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3441 {
3442 wide_int min, max;
3443 enum value_range_type rng = get_range_info (off, &min, &max);
3444
3445 if (rng == VR_RANGE)
3446 {
3447 if (tree size = compute_objsize (dest, ostype))
3448 {
3449 wide_int wisiz = wi::to_wide (size);
3450
3451 /* Ignore negative offsets for now. For others,
3452 use the lower bound as the most optimistic
3453 estimate of the (remaining)size. */
3454 if (wi::sign_mask (min))
3455 ;
3456 else if (wi::ltu_p (min, wisiz))
3457 return wide_int_to_tree (TREE_TYPE (size),
3458 wi::sub (wisiz, min));
3459 else
3460 return size_zero_node;
3461 }
3462 }
3463 }
3464 }
3465 else if (code != ADDR_EXPR)
3466 return NULL_TREE;
3467 }
3468
3469 /* Unless computing the largest size (for memcpy and other raw memory
3470 functions), try to determine the size of the object from its type. */
3471 if (!ostype)
3472 return NULL_TREE;
3473
3474 if (TREE_CODE (dest) != ADDR_EXPR)
3475 return NULL_TREE;
3476
3477 tree type = TREE_TYPE (dest);
3478 if (TREE_CODE (type) == POINTER_TYPE)
3479 type = TREE_TYPE (type);
3480
3481 type = TYPE_MAIN_VARIANT (type);
3482
3483 if (TREE_CODE (type) == ARRAY_TYPE
3484 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3485 {
3486 /* Return the constant size unless it's zero (that's a zero-length
3487 array likely at the end of a struct). */
3488 tree size = TYPE_SIZE_UNIT (type);
3489 if (size && TREE_CODE (size) == INTEGER_CST
3490 && !integer_zerop (size))
3491 return size;
3492 }
3493
3494 return NULL_TREE;
3495 }
3496
3497 /* Helper to determine and check the sizes of the source and the destination
3498 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3499 call expression, DEST is the destination argument, SRC is the source
3500 argument or null, and LEN is the number of bytes. Use Object Size type-0
3501 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3502 (no overflow or invalid sizes), false otherwise. */
3503
3504 static bool
3505 check_memop_access (tree exp, tree dest, tree src, tree size)
3506 {
3507 /* For functions like memset and memcpy that operate on raw memory
3508 try to determine the size of the largest source and destination
3509 object using type-0 Object Size regardless of the object size
3510 type specified by the option. */
3511 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3512 tree dstsize = compute_objsize (dest, 0);
3513
3514 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3515 srcsize, dstsize);
3516 }
3517
3518 /* Validate memchr arguments without performing any expansion.
3519 Return NULL_RTX. */
3520
3521 static rtx
3522 expand_builtin_memchr (tree exp, rtx)
3523 {
3524 if (!validate_arglist (exp,
3525 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3526 return NULL_RTX;
3527
3528 tree arg1 = CALL_EXPR_ARG (exp, 0);
3529 tree len = CALL_EXPR_ARG (exp, 2);
3530
3531 /* Diagnose calls where the specified length exceeds the size
3532 of the object. */
3533 if (warn_stringop_overflow)
3534 {
3535 tree size = compute_objsize (arg1, 0);
3536 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3537 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3538 }
3539
3540 return NULL_RTX;
3541 }
3542
3543 /* Expand a call EXP to the memcpy builtin.
3544 Return NULL_RTX if we failed, the caller should emit a normal call,
3545 otherwise try to get the result in TARGET, if convenient (and in
3546 mode MODE if that's convenient). */
3547
3548 static rtx
3549 expand_builtin_memcpy (tree exp, rtx target)
3550 {
3551 if (!validate_arglist (exp,
3552 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3553 return NULL_RTX;
3554
3555 tree dest = CALL_EXPR_ARG (exp, 0);
3556 tree src = CALL_EXPR_ARG (exp, 1);
3557 tree len = CALL_EXPR_ARG (exp, 2);
3558
3559 check_memop_access (exp, dest, src, len);
3560
3561 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3562 /*endp=*/ 0);
3563 }
3564
3565 /* Check a call EXP to the memmove built-in for validity.
3566 Return NULL_RTX on both success and failure. */
3567
3568 static rtx
3569 expand_builtin_memmove (tree exp, rtx)
3570 {
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3573 return NULL_RTX;
3574
3575 tree dest = CALL_EXPR_ARG (exp, 0);
3576 tree src = CALL_EXPR_ARG (exp, 1);
3577 tree len = CALL_EXPR_ARG (exp, 2);
3578
3579 check_memop_access (exp, dest, src, len);
3580
3581 return NULL_RTX;
3582 }
3583
3584 /* Expand a call EXP to the mempcpy builtin.
3585 Return NULL_RTX if we failed; the caller should emit a normal call,
3586 otherwise try to get the result in TARGET, if convenient (and in
3587 mode MODE if that's convenient). If ENDP is 0 return the
3588 destination pointer, if ENDP is 1 return the end pointer ala
3589 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3590 stpcpy. */
3591
3592 static rtx
3593 expand_builtin_mempcpy (tree exp, rtx target)
3594 {
3595 if (!validate_arglist (exp,
3596 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3597 return NULL_RTX;
3598
3599 tree dest = CALL_EXPR_ARG (exp, 0);
3600 tree src = CALL_EXPR_ARG (exp, 1);
3601 tree len = CALL_EXPR_ARG (exp, 2);
3602
3603 /* Policy does not generally allow using compute_objsize (which
3604 is used internally by check_memop_size) to change code generation
3605 or drive optimization decisions.
3606
3607 In this instance it is safe because the code we generate has
3608 the same semantics regardless of the return value of
3609 check_memop_sizes. Exactly the same amount of data is copied
3610 and the return value is exactly the same in both cases.
3611
3612 Furthermore, check_memop_size always uses mode 0 for the call to
3613 compute_objsize, so the imprecise nature of compute_objsize is
3614 avoided. */
3615
3616 /* Avoid expanding mempcpy into memcpy when the call is determined
3617 to overflow the buffer. This also prevents the same overflow
3618 from being diagnosed again when expanding memcpy. */
3619 if (!check_memop_access (exp, dest, src, len))
3620 return NULL_RTX;
3621
3622 return expand_builtin_mempcpy_args (dest, src, len,
3623 target, exp, /*endp=*/ 1);
3624 }
3625
3626 /* Helper function to do the actual work for expand of memory copy family
3627 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3628 of memory from SRC to DEST and assign to TARGET if convenient.
3629 If ENDP is 0 return the
3630 destination pointer, if ENDP is 1 return the end pointer ala
3631 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3632 stpcpy. */
3633
3634 static rtx
3635 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3636 rtx target, tree exp, int endp)
3637 {
3638 const char *src_str;
3639 unsigned int src_align = get_pointer_alignment (src);
3640 unsigned int dest_align = get_pointer_alignment (dest);
3641 rtx dest_mem, src_mem, dest_addr, len_rtx;
3642 HOST_WIDE_INT expected_size = -1;
3643 unsigned int expected_align = 0;
3644 unsigned HOST_WIDE_INT min_size;
3645 unsigned HOST_WIDE_INT max_size;
3646 unsigned HOST_WIDE_INT probable_max_size;
3647
3648 /* If DEST is not a pointer type, call the normal function. */
3649 if (dest_align == 0)
3650 return NULL_RTX;
3651
3652 /* If either SRC is not a pointer type, don't do this
3653 operation in-line. */
3654 if (src_align == 0)
3655 return NULL_RTX;
3656
3657 if (currently_expanding_gimple_stmt)
3658 stringop_block_profile (currently_expanding_gimple_stmt,
3659 &expected_align, &expected_size);
3660
3661 if (expected_align < dest_align)
3662 expected_align = dest_align;
3663 dest_mem = get_memory_rtx (dest, len);
3664 set_mem_align (dest_mem, dest_align);
3665 len_rtx = expand_normal (len);
3666 determine_block_size (len, len_rtx, &min_size, &max_size,
3667 &probable_max_size);
3668 src_str = c_getstr (src);
3669
3670 /* If SRC is a string constant and block move would be done
3671 by pieces, we can avoid loading the string from memory
3672 and only stored the computed constants. */
3673 if (src_str
3674 && CONST_INT_P (len_rtx)
3675 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3676 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3677 CONST_CAST (char *, src_str),
3678 dest_align, false))
3679 {
3680 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3681 builtin_memcpy_read_str,
3682 CONST_CAST (char *, src_str),
3683 dest_align, false, endp);
3684 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3685 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3686 return dest_mem;
3687 }
3688
3689 src_mem = get_memory_rtx (src, len);
3690 set_mem_align (src_mem, src_align);
3691
3692 /* Copy word part most expediently. */
3693 enum block_op_methods method = BLOCK_OP_NORMAL;
3694 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3695 method = BLOCK_OP_TAILCALL;
3696 if (endp == 1 && target != const0_rtx)
3697 method = BLOCK_OP_NO_LIBCALL_RET;
3698 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3699 expected_align, expected_size,
3700 min_size, max_size, probable_max_size);
3701 if (dest_addr == pc_rtx)
3702 return NULL_RTX;
3703
3704 if (dest_addr == 0)
3705 {
3706 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3707 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3708 }
3709
3710 if (endp && target != const0_rtx)
3711 {
3712 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3713 /* stpcpy pointer to last byte. */
3714 if (endp == 2)
3715 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3716 }
3717
3718 return dest_addr;
3719 }
3720
3721 static rtx
3722 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3723 rtx target, tree orig_exp, int endp)
3724 {
3725 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3726 endp);
3727 }
3728
3729 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3730 we failed, the caller should emit a normal call, otherwise try to
3731 get the result in TARGET, if convenient. If ENDP is 0 return the
3732 destination pointer, if ENDP is 1 return the end pointer ala
3733 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3734 stpcpy. */
3735
3736 static rtx
3737 expand_movstr (tree dest, tree src, rtx target, int endp)
3738 {
3739 struct expand_operand ops[3];
3740 rtx dest_mem;
3741 rtx src_mem;
3742
3743 if (!targetm.have_movstr ())
3744 return NULL_RTX;
3745
3746 dest_mem = get_memory_rtx (dest, NULL);
3747 src_mem = get_memory_rtx (src, NULL);
3748 if (!endp)
3749 {
3750 target = force_reg (Pmode, XEXP (dest_mem, 0));
3751 dest_mem = replace_equiv_address (dest_mem, target);
3752 }
3753
3754 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3755 create_fixed_operand (&ops[1], dest_mem);
3756 create_fixed_operand (&ops[2], src_mem);
3757 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3758 return NULL_RTX;
3759
3760 if (endp && target != const0_rtx)
3761 {
3762 target = ops[0].value;
3763 /* movstr is supposed to set end to the address of the NUL
3764 terminator. If the caller requested a mempcpy-like return value,
3765 adjust it. */
3766 if (endp == 1)
3767 {
3768 rtx tem = plus_constant (GET_MODE (target),
3769 gen_lowpart (GET_MODE (target), target), 1);
3770 emit_move_insn (target, force_operand (tem, NULL_RTX));
3771 }
3772 }
3773 return target;
3774 }
3775
3776 /* Do some very basic size validation of a call to the strcpy builtin
3777 given by EXP. Return NULL_RTX to have the built-in expand to a call
3778 to the library function. */
3779
3780 static rtx
3781 expand_builtin_strcat (tree exp, rtx)
3782 {
3783 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3784 || !warn_stringop_overflow)
3785 return NULL_RTX;
3786
3787 tree dest = CALL_EXPR_ARG (exp, 0);
3788 tree src = CALL_EXPR_ARG (exp, 1);
3789
3790 /* There is no way here to determine the length of the string in
3791 the destination to which the SRC string is being appended so
3792 just diagnose cases when the souce string is longer than
3793 the destination object. */
3794
3795 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3796
3797 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3798 destsize);
3799
3800 return NULL_RTX;
3801 }
3802
3803 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3804 NULL_RTX if we failed the caller should emit a normal call, otherwise
3805 try to get the result in TARGET, if convenient (and in mode MODE if that's
3806 convenient). */
3807
3808 static rtx
3809 expand_builtin_strcpy (tree exp, rtx target)
3810 {
3811 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3812 return NULL_RTX;
3813
3814 tree dest = CALL_EXPR_ARG (exp, 0);
3815 tree src = CALL_EXPR_ARG (exp, 1);
3816
3817 if (warn_stringop_overflow)
3818 {
3819 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3820 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3821 src, destsize);
3822 }
3823
3824 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3825 {
3826 /* Check to see if the argument was declared attribute nonstring
3827 and if so, issue a warning since at this point it's not known
3828 to be nul-terminated. */
3829 tree fndecl = get_callee_fndecl (exp);
3830 maybe_warn_nonstring_arg (fndecl, exp);
3831 return ret;
3832 }
3833
3834 return NULL_RTX;
3835 }
3836
3837 /* Helper function to do the actual work for expand_builtin_strcpy. The
3838 arguments to the builtin_strcpy call DEST and SRC are broken out
3839 so that this can also be called without constructing an actual CALL_EXPR.
3840 The other arguments and return value are the same as for
3841 expand_builtin_strcpy. */
3842
3843 static rtx
3844 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3845 {
3846 return expand_movstr (dest, src, target, /*endp=*/0);
3847 }
3848
3849 /* Expand a call EXP to the stpcpy builtin.
3850 Return NULL_RTX if we failed the caller should emit a normal call,
3851 otherwise try to get the result in TARGET, if convenient (and in
3852 mode MODE if that's convenient). */
3853
3854 static rtx
3855 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3856 {
3857 tree dst, src;
3858 location_t loc = EXPR_LOCATION (exp);
3859
3860 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3861 return NULL_RTX;
3862
3863 dst = CALL_EXPR_ARG (exp, 0);
3864 src = CALL_EXPR_ARG (exp, 1);
3865
3866 if (warn_stringop_overflow)
3867 {
3868 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3869 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3870 src, destsize);
3871 }
3872
3873 /* If return value is ignored, transform stpcpy into strcpy. */
3874 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3875 {
3876 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3877 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3878 return expand_expr (result, target, mode, EXPAND_NORMAL);
3879 }
3880 else
3881 {
3882 tree len, lenp1;
3883 rtx ret;
3884
3885 /* Ensure we get an actual string whose length can be evaluated at
3886 compile-time, not an expression containing a string. This is
3887 because the latter will potentially produce pessimized code
3888 when used to produce the return value. */
3889 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3890 return expand_movstr (dst, src, target, /*endp=*/2);
3891
3892 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3893 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3894 target, exp, /*endp=*/2);
3895
3896 if (ret)
3897 return ret;
3898
3899 if (TREE_CODE (len) == INTEGER_CST)
3900 {
3901 rtx len_rtx = expand_normal (len);
3902
3903 if (CONST_INT_P (len_rtx))
3904 {
3905 ret = expand_builtin_strcpy_args (dst, src, target);
3906
3907 if (ret)
3908 {
3909 if (! target)
3910 {
3911 if (mode != VOIDmode)
3912 target = gen_reg_rtx (mode);
3913 else
3914 target = gen_reg_rtx (GET_MODE (ret));
3915 }
3916 if (GET_MODE (target) != GET_MODE (ret))
3917 ret = gen_lowpart (GET_MODE (target), ret);
3918
3919 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3920 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3921 gcc_assert (ret);
3922
3923 return target;
3924 }
3925 }
3926 }
3927
3928 return expand_movstr (dst, src, target, /*endp=*/2);
3929 }
3930 }
3931
3932 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3933 arguments while being careful to avoid duplicate warnings (which could
3934 be issued if the expander were to expand the call, resulting in it
3935 being emitted in expand_call(). */
3936
3937 static rtx
3938 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3939 {
3940 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3941 {
3942 /* The call has been successfully expanded. Check for nonstring
3943 arguments and issue warnings as appropriate. */
3944 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3945 return ret;
3946 }
3947
3948 return NULL_RTX;
3949 }
3950
3951 /* Check a call EXP to the stpncpy built-in for validity.
3952 Return NULL_RTX on both success and failure. */
3953
3954 static rtx
3955 expand_builtin_stpncpy (tree exp, rtx)
3956 {
3957 if (!validate_arglist (exp,
3958 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3959 || !warn_stringop_overflow)
3960 return NULL_RTX;
3961
3962 /* The source and destination of the call. */
3963 tree dest = CALL_EXPR_ARG (exp, 0);
3964 tree src = CALL_EXPR_ARG (exp, 1);
3965
3966 /* The exact number of bytes to write (not the maximum). */
3967 tree len = CALL_EXPR_ARG (exp, 2);
3968
3969 /* The size of the destination object. */
3970 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3971
3972 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3973
3974 return NULL_RTX;
3975 }
3976
3977 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3978 bytes from constant string DATA + OFFSET and return it as target
3979 constant. */
3980
3981 rtx
3982 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3983 scalar_int_mode mode)
3984 {
3985 const char *str = (const char *) data;
3986
3987 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3988 return const0_rtx;
3989
3990 return c_readstr (str + offset, mode);
3991 }
3992
3993 /* Helper to check the sizes of sequences and the destination of calls
3994 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3995 success (no overflow or invalid sizes), false otherwise. */
3996
3997 static bool
3998 check_strncat_sizes (tree exp, tree objsize)
3999 {
4000 tree dest = CALL_EXPR_ARG (exp, 0);
4001 tree src = CALL_EXPR_ARG (exp, 1);
4002 tree maxread = CALL_EXPR_ARG (exp, 2);
4003
4004 /* Try to determine the range of lengths that the source expression
4005 refers to. */
4006 tree lenrange[2];
4007 get_range_strlen (src, lenrange);
4008
4009 /* Try to verify that the destination is big enough for the shortest
4010 string. */
4011
4012 if (!objsize && warn_stringop_overflow)
4013 {
4014 /* If it hasn't been provided by __strncat_chk, try to determine
4015 the size of the destination object into which the source is
4016 being copied. */
4017 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4018 }
4019
4020 /* Add one for the terminating nul. */
4021 tree srclen = (lenrange[0]
4022 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4023 size_one_node)
4024 : NULL_TREE);
4025
4026 /* The strncat function copies at most MAXREAD bytes and always appends
4027 the terminating nul so the specified upper bound should never be equal
4028 to (or greater than) the size of the destination. */
4029 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4030 && tree_int_cst_equal (objsize, maxread))
4031 {
4032 location_t loc = tree_nonartificial_location (exp);
4033 loc = expansion_point_location_if_in_system_header (loc);
4034
4035 warning_at (loc, OPT_Wstringop_overflow_,
4036 "%K%qD specified bound %E equals destination size",
4037 exp, get_callee_fndecl (exp), maxread);
4038
4039 return false;
4040 }
4041
4042 if (!srclen
4043 || (maxread && tree_fits_uhwi_p (maxread)
4044 && tree_fits_uhwi_p (srclen)
4045 && tree_int_cst_lt (maxread, srclen)))
4046 srclen = maxread;
4047
4048 /* The number of bytes to write is LEN but check_access will also
4049 check SRCLEN if LEN's value isn't known. */
4050 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4051 objsize);
4052 }
4053
4054 /* Similar to expand_builtin_strcat, do some very basic size validation
4055 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4056 the built-in expand to a call to the library function. */
4057
4058 static rtx
4059 expand_builtin_strncat (tree exp, rtx)
4060 {
4061 if (!validate_arglist (exp,
4062 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4063 || !warn_stringop_overflow)
4064 return NULL_RTX;
4065
4066 tree dest = CALL_EXPR_ARG (exp, 0);
4067 tree src = CALL_EXPR_ARG (exp, 1);
4068 /* The upper bound on the number of bytes to write. */
4069 tree maxread = CALL_EXPR_ARG (exp, 2);
4070 /* The length of the source sequence. */
4071 tree slen = c_strlen (src, 1);
4072
4073 /* Try to determine the range of lengths that the source expression
4074 refers to. */
4075 tree lenrange[2];
4076 if (slen)
4077 lenrange[0] = lenrange[1] = slen;
4078 else
4079 get_range_strlen (src, lenrange);
4080
4081 /* Try to verify that the destination is big enough for the shortest
4082 string. First try to determine the size of the destination object
4083 into which the source is being copied. */
4084 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4085
4086 /* Add one for the terminating nul. */
4087 tree srclen = (lenrange[0]
4088 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4089 size_one_node)
4090 : NULL_TREE);
4091
4092 /* The strncat function copies at most MAXREAD bytes and always appends
4093 the terminating nul so the specified upper bound should never be equal
4094 to (or greater than) the size of the destination. */
4095 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4096 && tree_int_cst_equal (destsize, maxread))
4097 {
4098 location_t loc = tree_nonartificial_location (exp);
4099 loc = expansion_point_location_if_in_system_header (loc);
4100
4101 warning_at (loc, OPT_Wstringop_overflow_,
4102 "%K%qD specified bound %E equals destination size",
4103 exp, get_callee_fndecl (exp), maxread);
4104
4105 return NULL_RTX;
4106 }
4107
4108 if (!srclen
4109 || (maxread && tree_fits_uhwi_p (maxread)
4110 && tree_fits_uhwi_p (srclen)
4111 && tree_int_cst_lt (maxread, srclen)))
4112 srclen = maxread;
4113
4114 /* The number of bytes to write is SRCLEN. */
4115 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4116
4117 return NULL_RTX;
4118 }
4119
4120 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4121 NULL_RTX if we failed the caller should emit a normal call. */
4122
4123 static rtx
4124 expand_builtin_strncpy (tree exp, rtx target)
4125 {
4126 location_t loc = EXPR_LOCATION (exp);
4127
4128 if (validate_arglist (exp,
4129 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4130 {
4131 tree dest = CALL_EXPR_ARG (exp, 0);
4132 tree src = CALL_EXPR_ARG (exp, 1);
4133 /* The number of bytes to write (not the maximum). */
4134 tree len = CALL_EXPR_ARG (exp, 2);
4135 /* The length of the source sequence. */
4136 tree slen = c_strlen (src, 1);
4137
4138 if (warn_stringop_overflow)
4139 {
4140 tree destsize = compute_objsize (dest,
4141 warn_stringop_overflow - 1);
4142
4143 /* The number of bytes to write is LEN but check_access will also
4144 check SLEN if LEN's value isn't known. */
4145 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4146 destsize);
4147 }
4148
4149 /* We must be passed a constant len and src parameter. */
4150 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4151 return NULL_RTX;
4152
4153 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4154
4155 /* We're required to pad with trailing zeros if the requested
4156 len is greater than strlen(s2)+1. In that case try to
4157 use store_by_pieces, if it fails, punt. */
4158 if (tree_int_cst_lt (slen, len))
4159 {
4160 unsigned int dest_align = get_pointer_alignment (dest);
4161 const char *p = c_getstr (src);
4162 rtx dest_mem;
4163
4164 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4165 || !can_store_by_pieces (tree_to_uhwi (len),
4166 builtin_strncpy_read_str,
4167 CONST_CAST (char *, p),
4168 dest_align, false))
4169 return NULL_RTX;
4170
4171 dest_mem = get_memory_rtx (dest, len);
4172 store_by_pieces (dest_mem, tree_to_uhwi (len),
4173 builtin_strncpy_read_str,
4174 CONST_CAST (char *, p), dest_align, false, 0);
4175 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4176 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4177 return dest_mem;
4178 }
4179 }
4180 return NULL_RTX;
4181 }
4182
4183 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4184 bytes from constant string DATA + OFFSET and return it as target
4185 constant. */
4186
4187 rtx
4188 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4189 scalar_int_mode mode)
4190 {
4191 const char *c = (const char *) data;
4192 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4193
4194 memset (p, *c, GET_MODE_SIZE (mode));
4195
4196 return c_readstr (p, mode);
4197 }
4198
4199 /* Callback routine for store_by_pieces. Return the RTL of a register
4200 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4201 char value given in the RTL register data. For example, if mode is
4202 4 bytes wide, return the RTL for 0x01010101*data. */
4203
4204 static rtx
4205 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4206 scalar_int_mode mode)
4207 {
4208 rtx target, coeff;
4209 size_t size;
4210 char *p;
4211
4212 size = GET_MODE_SIZE (mode);
4213 if (size == 1)
4214 return (rtx) data;
4215
4216 p = XALLOCAVEC (char, size);
4217 memset (p, 1, size);
4218 coeff = c_readstr (p, mode);
4219
4220 target = convert_to_mode (mode, (rtx) data, 1);
4221 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4222 return force_reg (mode, target);
4223 }
4224
4225 /* Expand expression EXP, which is a call to the memset builtin. Return
4226 NULL_RTX if we failed the caller should emit a normal call, otherwise
4227 try to get the result in TARGET, if convenient (and in mode MODE if that's
4228 convenient). */
4229
4230 static rtx
4231 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4232 {
4233 if (!validate_arglist (exp,
4234 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4235 return NULL_RTX;
4236
4237 tree dest = CALL_EXPR_ARG (exp, 0);
4238 tree val = CALL_EXPR_ARG (exp, 1);
4239 tree len = CALL_EXPR_ARG (exp, 2);
4240
4241 check_memop_access (exp, dest, NULL_TREE, len);
4242
4243 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4244 }
4245
4246 /* Helper function to do the actual work for expand_builtin_memset. The
4247 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4248 so that this can also be called without constructing an actual CALL_EXPR.
4249 The other arguments and return value are the same as for
4250 expand_builtin_memset. */
4251
4252 static rtx
4253 expand_builtin_memset_args (tree dest, tree val, tree len,
4254 rtx target, machine_mode mode, tree orig_exp)
4255 {
4256 tree fndecl, fn;
4257 enum built_in_function fcode;
4258 machine_mode val_mode;
4259 char c;
4260 unsigned int dest_align;
4261 rtx dest_mem, dest_addr, len_rtx;
4262 HOST_WIDE_INT expected_size = -1;
4263 unsigned int expected_align = 0;
4264 unsigned HOST_WIDE_INT min_size;
4265 unsigned HOST_WIDE_INT max_size;
4266 unsigned HOST_WIDE_INT probable_max_size;
4267
4268 dest_align = get_pointer_alignment (dest);
4269
4270 /* If DEST is not a pointer type, don't do this operation in-line. */
4271 if (dest_align == 0)
4272 return NULL_RTX;
4273
4274 if (currently_expanding_gimple_stmt)
4275 stringop_block_profile (currently_expanding_gimple_stmt,
4276 &expected_align, &expected_size);
4277
4278 if (expected_align < dest_align)
4279 expected_align = dest_align;
4280
4281 /* If the LEN parameter is zero, return DEST. */
4282 if (integer_zerop (len))
4283 {
4284 /* Evaluate and ignore VAL in case it has side-effects. */
4285 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4286 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4287 }
4288
4289 /* Stabilize the arguments in case we fail. */
4290 dest = builtin_save_expr (dest);
4291 val = builtin_save_expr (val);
4292 len = builtin_save_expr (len);
4293
4294 len_rtx = expand_normal (len);
4295 determine_block_size (len, len_rtx, &min_size, &max_size,
4296 &probable_max_size);
4297 dest_mem = get_memory_rtx (dest, len);
4298 val_mode = TYPE_MODE (unsigned_char_type_node);
4299
4300 if (TREE_CODE (val) != INTEGER_CST)
4301 {
4302 rtx val_rtx;
4303
4304 val_rtx = expand_normal (val);
4305 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4306
4307 /* Assume that we can memset by pieces if we can store
4308 * the coefficients by pieces (in the required modes).
4309 * We can't pass builtin_memset_gen_str as that emits RTL. */
4310 c = 1;
4311 if (tree_fits_uhwi_p (len)
4312 && can_store_by_pieces (tree_to_uhwi (len),
4313 builtin_memset_read_str, &c, dest_align,
4314 true))
4315 {
4316 val_rtx = force_reg (val_mode, val_rtx);
4317 store_by_pieces (dest_mem, tree_to_uhwi (len),
4318 builtin_memset_gen_str, val_rtx, dest_align,
4319 true, 0);
4320 }
4321 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4322 dest_align, expected_align,
4323 expected_size, min_size, max_size,
4324 probable_max_size))
4325 goto do_libcall;
4326
4327 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4328 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4329 return dest_mem;
4330 }
4331
4332 if (target_char_cast (val, &c))
4333 goto do_libcall;
4334
4335 if (c)
4336 {
4337 if (tree_fits_uhwi_p (len)
4338 && can_store_by_pieces (tree_to_uhwi (len),
4339 builtin_memset_read_str, &c, dest_align,
4340 true))
4341 store_by_pieces (dest_mem, tree_to_uhwi (len),
4342 builtin_memset_read_str, &c, dest_align, true, 0);
4343 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4344 gen_int_mode (c, val_mode),
4345 dest_align, expected_align,
4346 expected_size, min_size, max_size,
4347 probable_max_size))
4348 goto do_libcall;
4349
4350 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4351 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4352 return dest_mem;
4353 }
4354
4355 set_mem_align (dest_mem, dest_align);
4356 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4357 CALL_EXPR_TAILCALL (orig_exp)
4358 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4359 expected_align, expected_size,
4360 min_size, max_size,
4361 probable_max_size);
4362
4363 if (dest_addr == 0)
4364 {
4365 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4366 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4367 }
4368
4369 return dest_addr;
4370
4371 do_libcall:
4372 fndecl = get_callee_fndecl (orig_exp);
4373 fcode = DECL_FUNCTION_CODE (fndecl);
4374 if (fcode == BUILT_IN_MEMSET)
4375 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4376 dest, val, len);
4377 else if (fcode == BUILT_IN_BZERO)
4378 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4379 dest, len);
4380 else
4381 gcc_unreachable ();
4382 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4383 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4384 return expand_call (fn, target, target == const0_rtx);
4385 }
4386
4387 /* Expand expression EXP, which is a call to the bzero builtin. Return
4388 NULL_RTX if we failed the caller should emit a normal call. */
4389
4390 static rtx
4391 expand_builtin_bzero (tree exp)
4392 {
4393 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4394 return NULL_RTX;
4395
4396 tree dest = CALL_EXPR_ARG (exp, 0);
4397 tree size = CALL_EXPR_ARG (exp, 1);
4398
4399 check_memop_access (exp, dest, NULL_TREE, size);
4400
4401 /* New argument list transforming bzero(ptr x, int y) to
4402 memset(ptr x, int 0, size_t y). This is done this way
4403 so that if it isn't expanded inline, we fallback to
4404 calling bzero instead of memset. */
4405
4406 location_t loc = EXPR_LOCATION (exp);
4407
4408 return expand_builtin_memset_args (dest, integer_zero_node,
4409 fold_convert_loc (loc,
4410 size_type_node, size),
4411 const0_rtx, VOIDmode, exp);
4412 }
4413
4414 /* Try to expand cmpstr operation ICODE with the given operands.
4415 Return the result rtx on success, otherwise return null. */
4416
4417 static rtx
4418 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4419 HOST_WIDE_INT align)
4420 {
4421 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4422
4423 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4424 target = NULL_RTX;
4425
4426 struct expand_operand ops[4];
4427 create_output_operand (&ops[0], target, insn_mode);
4428 create_fixed_operand (&ops[1], arg1_rtx);
4429 create_fixed_operand (&ops[2], arg2_rtx);
4430 create_integer_operand (&ops[3], align);
4431 if (maybe_expand_insn (icode, 4, ops))
4432 return ops[0].value;
4433 return NULL_RTX;
4434 }
4435
4436 /* Expand expression EXP, which is a call to the memcmp built-in function.
4437 Return NULL_RTX if we failed and the caller should emit a normal call,
4438 otherwise try to get the result in TARGET, if convenient.
4439 RESULT_EQ is true if we can relax the returned value to be either zero
4440 or nonzero, without caring about the sign. */
4441
4442 static rtx
4443 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4444 {
4445 if (!validate_arglist (exp,
4446 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4447 return NULL_RTX;
4448
4449 tree arg1 = CALL_EXPR_ARG (exp, 0);
4450 tree arg2 = CALL_EXPR_ARG (exp, 1);
4451 tree len = CALL_EXPR_ARG (exp, 2);
4452 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4453 bool no_overflow = true;
4454
4455 /* Diagnose calls where the specified length exceeds the size of either
4456 object. */
4457 tree size = compute_objsize (arg1, 0);
4458 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4459 len, /*maxread=*/NULL_TREE, size,
4460 /*objsize=*/NULL_TREE);
4461 if (no_overflow)
4462 {
4463 size = compute_objsize (arg2, 0);
4464 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4465 len, /*maxread=*/NULL_TREE, size,
4466 /*objsize=*/NULL_TREE);
4467 }
4468
4469 /* Due to the performance benefit, always inline the calls first
4470 when result_eq is false. */
4471 rtx result = NULL_RTX;
4472
4473 if (!result_eq && fcode != BUILT_IN_BCMP && no_overflow)
4474 {
4475 result = inline_expand_builtin_string_cmp (exp, target, true);
4476 if (result)
4477 return result;
4478 }
4479
4480 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4481 location_t loc = EXPR_LOCATION (exp);
4482
4483 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4484 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4485
4486 /* If we don't have POINTER_TYPE, call the function. */
4487 if (arg1_align == 0 || arg2_align == 0)
4488 return NULL_RTX;
4489
4490 rtx arg1_rtx = get_memory_rtx (arg1, len);
4491 rtx arg2_rtx = get_memory_rtx (arg2, len);
4492 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4493
4494 /* Set MEM_SIZE as appropriate. */
4495 if (CONST_INT_P (len_rtx))
4496 {
4497 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4498 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4499 }
4500
4501 by_pieces_constfn constfn = NULL;
4502
4503 const char *src_str = c_getstr (arg2);
4504 if (result_eq && src_str == NULL)
4505 {
4506 src_str = c_getstr (arg1);
4507 if (src_str != NULL)
4508 std::swap (arg1_rtx, arg2_rtx);
4509 }
4510
4511 /* If SRC is a string constant and block move would be done
4512 by pieces, we can avoid loading the string from memory
4513 and only stored the computed constants. */
4514 if (src_str
4515 && CONST_INT_P (len_rtx)
4516 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4517 constfn = builtin_memcpy_read_str;
4518
4519 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4520 TREE_TYPE (len), target,
4521 result_eq, constfn,
4522 CONST_CAST (char *, src_str));
4523
4524 if (result)
4525 {
4526 /* Return the value in the proper mode for this function. */
4527 if (GET_MODE (result) == mode)
4528 return result;
4529
4530 if (target != 0)
4531 {
4532 convert_move (target, result, 0);
4533 return target;
4534 }
4535
4536 return convert_to_mode (mode, result, 0);
4537 }
4538
4539 return NULL_RTX;
4540 }
4541
4542 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4543 if we failed the caller should emit a normal call, otherwise try to get
4544 the result in TARGET, if convenient. */
4545
4546 static rtx
4547 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4548 {
4549 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4550 return NULL_RTX;
4551
4552 /* Due to the performance benefit, always inline the calls first. */
4553 rtx result = NULL_RTX;
4554 result = inline_expand_builtin_string_cmp (exp, target, false);
4555 if (result)
4556 return result;
4557
4558 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4559 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4560 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4561 return NULL_RTX;
4562
4563 tree arg1 = CALL_EXPR_ARG (exp, 0);
4564 tree arg2 = CALL_EXPR_ARG (exp, 1);
4565
4566 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4567 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4568
4569 /* If we don't have POINTER_TYPE, call the function. */
4570 if (arg1_align == 0 || arg2_align == 0)
4571 return NULL_RTX;
4572
4573 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4574 arg1 = builtin_save_expr (arg1);
4575 arg2 = builtin_save_expr (arg2);
4576
4577 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4578 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4579
4580 /* Try to call cmpstrsi. */
4581 if (cmpstr_icode != CODE_FOR_nothing)
4582 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4583 MIN (arg1_align, arg2_align));
4584
4585 /* Try to determine at least one length and call cmpstrnsi. */
4586 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4587 {
4588 tree len;
4589 rtx arg3_rtx;
4590
4591 tree len1 = c_strlen (arg1, 1);
4592 tree len2 = c_strlen (arg2, 1);
4593
4594 if (len1)
4595 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4596 if (len2)
4597 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4598
4599 /* If we don't have a constant length for the first, use the length
4600 of the second, if we know it. We don't require a constant for
4601 this case; some cost analysis could be done if both are available
4602 but neither is constant. For now, assume they're equally cheap,
4603 unless one has side effects. If both strings have constant lengths,
4604 use the smaller. */
4605
4606 if (!len1)
4607 len = len2;
4608 else if (!len2)
4609 len = len1;
4610 else if (TREE_SIDE_EFFECTS (len1))
4611 len = len2;
4612 else if (TREE_SIDE_EFFECTS (len2))
4613 len = len1;
4614 else if (TREE_CODE (len1) != INTEGER_CST)
4615 len = len2;
4616 else if (TREE_CODE (len2) != INTEGER_CST)
4617 len = len1;
4618 else if (tree_int_cst_lt (len1, len2))
4619 len = len1;
4620 else
4621 len = len2;
4622
4623 /* If both arguments have side effects, we cannot optimize. */
4624 if (len && !TREE_SIDE_EFFECTS (len))
4625 {
4626 arg3_rtx = expand_normal (len);
4627 result = expand_cmpstrn_or_cmpmem
4628 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4629 arg3_rtx, MIN (arg1_align, arg2_align));
4630 }
4631 }
4632
4633 tree fndecl = get_callee_fndecl (exp);
4634 if (result)
4635 {
4636 /* Check to see if the argument was declared attribute nonstring
4637 and if so, issue a warning since at this point it's not known
4638 to be nul-terminated. */
4639 maybe_warn_nonstring_arg (fndecl, exp);
4640
4641 /* Return the value in the proper mode for this function. */
4642 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4643 if (GET_MODE (result) == mode)
4644 return result;
4645 if (target == 0)
4646 return convert_to_mode (mode, result, 0);
4647 convert_move (target, result, 0);
4648 return target;
4649 }
4650
4651 /* Expand the library call ourselves using a stabilized argument
4652 list to avoid re-evaluating the function's arguments twice. */
4653 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4654 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4655 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4656 return expand_call (fn, target, target == const0_rtx);
4657 }
4658
4659 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4660 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4661 the result in TARGET, if convenient. */
4662
4663 static rtx
4664 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4665 ATTRIBUTE_UNUSED machine_mode mode)
4666 {
4667 if (!validate_arglist (exp,
4668 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4669 return NULL_RTX;
4670
4671 /* Due to the performance benefit, always inline the calls first. */
4672 rtx result = NULL_RTX;
4673 result = inline_expand_builtin_string_cmp (exp, target, false);
4674 if (result)
4675 return result;
4676
4677 /* If c_strlen can determine an expression for one of the string
4678 lengths, and it doesn't have side effects, then emit cmpstrnsi
4679 using length MIN(strlen(string)+1, arg3). */
4680 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4681 if (cmpstrn_icode == CODE_FOR_nothing)
4682 return NULL_RTX;
4683
4684 tree len;
4685
4686 tree arg1 = CALL_EXPR_ARG (exp, 0);
4687 tree arg2 = CALL_EXPR_ARG (exp, 1);
4688 tree arg3 = CALL_EXPR_ARG (exp, 2);
4689
4690 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4691 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4692
4693 tree len1 = c_strlen (arg1, 1);
4694 tree len2 = c_strlen (arg2, 1);
4695
4696 location_t loc = EXPR_LOCATION (exp);
4697
4698 if (len1)
4699 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4700 if (len2)
4701 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4702
4703 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4704
4705 /* If we don't have a constant length for the first, use the length
4706 of the second, if we know it. If neither string is constant length,
4707 use the given length argument. We don't require a constant for
4708 this case; some cost analysis could be done if both are available
4709 but neither is constant. For now, assume they're equally cheap,
4710 unless one has side effects. If both strings have constant lengths,
4711 use the smaller. */
4712
4713 if (!len1 && !len2)
4714 len = len3;
4715 else if (!len1)
4716 len = len2;
4717 else if (!len2)
4718 len = len1;
4719 else if (TREE_SIDE_EFFECTS (len1))
4720 len = len2;
4721 else if (TREE_SIDE_EFFECTS (len2))
4722 len = len1;
4723 else if (TREE_CODE (len1) != INTEGER_CST)
4724 len = len2;
4725 else if (TREE_CODE (len2) != INTEGER_CST)
4726 len = len1;
4727 else if (tree_int_cst_lt (len1, len2))
4728 len = len1;
4729 else
4730 len = len2;
4731
4732 /* If we are not using the given length, we must incorporate it here.
4733 The actual new length parameter will be MIN(len,arg3) in this case. */
4734 if (len != len3)
4735 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4736 rtx arg1_rtx = get_memory_rtx (arg1, len);
4737 rtx arg2_rtx = get_memory_rtx (arg2, len);
4738 rtx arg3_rtx = expand_normal (len);
4739 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4740 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4741 MIN (arg1_align, arg2_align));
4742
4743 tree fndecl = get_callee_fndecl (exp);
4744 if (result)
4745 {
4746 /* Check to see if the argument was declared attribute nonstring
4747 and if so, issue a warning since at this point it's not known
4748 to be nul-terminated. */
4749 maybe_warn_nonstring_arg (fndecl, exp);
4750
4751 /* Return the value in the proper mode for this function. */
4752 mode = TYPE_MODE (TREE_TYPE (exp));
4753 if (GET_MODE (result) == mode)
4754 return result;
4755 if (target == 0)
4756 return convert_to_mode (mode, result, 0);
4757 convert_move (target, result, 0);
4758 return target;
4759 }
4760
4761 /* Expand the library call ourselves using a stabilized argument
4762 list to avoid re-evaluating the function's arguments twice. */
4763 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4764 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4765 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4766 return expand_call (fn, target, target == const0_rtx);
4767 }
4768
4769 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4770 if that's convenient. */
4771
4772 rtx
4773 expand_builtin_saveregs (void)
4774 {
4775 rtx val;
4776 rtx_insn *seq;
4777
4778 /* Don't do __builtin_saveregs more than once in a function.
4779 Save the result of the first call and reuse it. */
4780 if (saveregs_value != 0)
4781 return saveregs_value;
4782
4783 /* When this function is called, it means that registers must be
4784 saved on entry to this function. So we migrate the call to the
4785 first insn of this function. */
4786
4787 start_sequence ();
4788
4789 /* Do whatever the machine needs done in this case. */
4790 val = targetm.calls.expand_builtin_saveregs ();
4791
4792 seq = get_insns ();
4793 end_sequence ();
4794
4795 saveregs_value = val;
4796
4797 /* Put the insns after the NOTE that starts the function. If this
4798 is inside a start_sequence, make the outer-level insn chain current, so
4799 the code is placed at the start of the function. */
4800 push_topmost_sequence ();
4801 emit_insn_after (seq, entry_of_function ());
4802 pop_topmost_sequence ();
4803
4804 return val;
4805 }
4806
4807 /* Expand a call to __builtin_next_arg. */
4808
4809 static rtx
4810 expand_builtin_next_arg (void)
4811 {
4812 /* Checking arguments is already done in fold_builtin_next_arg
4813 that must be called before this function. */
4814 return expand_binop (ptr_mode, add_optab,
4815 crtl->args.internal_arg_pointer,
4816 crtl->args.arg_offset_rtx,
4817 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4818 }
4819
4820 /* Make it easier for the backends by protecting the valist argument
4821 from multiple evaluations. */
4822
4823 static tree
4824 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4825 {
4826 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4827
4828 /* The current way of determining the type of valist is completely
4829 bogus. We should have the information on the va builtin instead. */
4830 if (!vatype)
4831 vatype = targetm.fn_abi_va_list (cfun->decl);
4832
4833 if (TREE_CODE (vatype) == ARRAY_TYPE)
4834 {
4835 if (TREE_SIDE_EFFECTS (valist))
4836 valist = save_expr (valist);
4837
4838 /* For this case, the backends will be expecting a pointer to
4839 vatype, but it's possible we've actually been given an array
4840 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4841 So fix it. */
4842 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4843 {
4844 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4845 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4846 }
4847 }
4848 else
4849 {
4850 tree pt = build_pointer_type (vatype);
4851
4852 if (! needs_lvalue)
4853 {
4854 if (! TREE_SIDE_EFFECTS (valist))
4855 return valist;
4856
4857 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4858 TREE_SIDE_EFFECTS (valist) = 1;
4859 }
4860
4861 if (TREE_SIDE_EFFECTS (valist))
4862 valist = save_expr (valist);
4863 valist = fold_build2_loc (loc, MEM_REF,
4864 vatype, valist, build_int_cst (pt, 0));
4865 }
4866
4867 return valist;
4868 }
4869
4870 /* The "standard" definition of va_list is void*. */
4871
4872 tree
4873 std_build_builtin_va_list (void)
4874 {
4875 return ptr_type_node;
4876 }
4877
4878 /* The "standard" abi va_list is va_list_type_node. */
4879
4880 tree
4881 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4882 {
4883 return va_list_type_node;
4884 }
4885
4886 /* The "standard" type of va_list is va_list_type_node. */
4887
4888 tree
4889 std_canonical_va_list_type (tree type)
4890 {
4891 tree wtype, htype;
4892
4893 wtype = va_list_type_node;
4894 htype = type;
4895
4896 if (TREE_CODE (wtype) == ARRAY_TYPE)
4897 {
4898 /* If va_list is an array type, the argument may have decayed
4899 to a pointer type, e.g. by being passed to another function.
4900 In that case, unwrap both types so that we can compare the
4901 underlying records. */
4902 if (TREE_CODE (htype) == ARRAY_TYPE
4903 || POINTER_TYPE_P (htype))
4904 {
4905 wtype = TREE_TYPE (wtype);
4906 htype = TREE_TYPE (htype);
4907 }
4908 }
4909 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4910 return va_list_type_node;
4911
4912 return NULL_TREE;
4913 }
4914
4915 /* The "standard" implementation of va_start: just assign `nextarg' to
4916 the variable. */
4917
4918 void
4919 std_expand_builtin_va_start (tree valist, rtx nextarg)
4920 {
4921 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4922 convert_move (va_r, nextarg, 0);
4923 }
4924
4925 /* Expand EXP, a call to __builtin_va_start. */
4926
4927 static rtx
4928 expand_builtin_va_start (tree exp)
4929 {
4930 rtx nextarg;
4931 tree valist;
4932 location_t loc = EXPR_LOCATION (exp);
4933
4934 if (call_expr_nargs (exp) < 2)
4935 {
4936 error_at (loc, "too few arguments to function %<va_start%>");
4937 return const0_rtx;
4938 }
4939
4940 if (fold_builtin_next_arg (exp, true))
4941 return const0_rtx;
4942
4943 nextarg = expand_builtin_next_arg ();
4944 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4945
4946 if (targetm.expand_builtin_va_start)
4947 targetm.expand_builtin_va_start (valist, nextarg);
4948 else
4949 std_expand_builtin_va_start (valist, nextarg);
4950
4951 return const0_rtx;
4952 }
4953
4954 /* Expand EXP, a call to __builtin_va_end. */
4955
4956 static rtx
4957 expand_builtin_va_end (tree exp)
4958 {
4959 tree valist = CALL_EXPR_ARG (exp, 0);
4960
4961 /* Evaluate for side effects, if needed. I hate macros that don't
4962 do that. */
4963 if (TREE_SIDE_EFFECTS (valist))
4964 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4965
4966 return const0_rtx;
4967 }
4968
4969 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4970 builtin rather than just as an assignment in stdarg.h because of the
4971 nastiness of array-type va_list types. */
4972
4973 static rtx
4974 expand_builtin_va_copy (tree exp)
4975 {
4976 tree dst, src, t;
4977 location_t loc = EXPR_LOCATION (exp);
4978
4979 dst = CALL_EXPR_ARG (exp, 0);
4980 src = CALL_EXPR_ARG (exp, 1);
4981
4982 dst = stabilize_va_list_loc (loc, dst, 1);
4983 src = stabilize_va_list_loc (loc, src, 0);
4984
4985 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4986
4987 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4988 {
4989 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4990 TREE_SIDE_EFFECTS (t) = 1;
4991 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4992 }
4993 else
4994 {
4995 rtx dstb, srcb, size;
4996
4997 /* Evaluate to pointers. */
4998 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4999 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5000 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5001 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5002
5003 dstb = convert_memory_address (Pmode, dstb);
5004 srcb = convert_memory_address (Pmode, srcb);
5005
5006 /* "Dereference" to BLKmode memories. */
5007 dstb = gen_rtx_MEM (BLKmode, dstb);
5008 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5009 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5010 srcb = gen_rtx_MEM (BLKmode, srcb);
5011 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5012 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5013
5014 /* Copy. */
5015 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5016 }
5017
5018 return const0_rtx;
5019 }
5020
5021 /* Expand a call to one of the builtin functions __builtin_frame_address or
5022 __builtin_return_address. */
5023
5024 static rtx
5025 expand_builtin_frame_address (tree fndecl, tree exp)
5026 {
5027 /* The argument must be a nonnegative integer constant.
5028 It counts the number of frames to scan up the stack.
5029 The value is either the frame pointer value or the return
5030 address saved in that frame. */
5031 if (call_expr_nargs (exp) == 0)
5032 /* Warning about missing arg was already issued. */
5033 return const0_rtx;
5034 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5035 {
5036 error ("invalid argument to %qD", fndecl);
5037 return const0_rtx;
5038 }
5039 else
5040 {
5041 /* Number of frames to scan up the stack. */
5042 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5043
5044 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5045
5046 /* Some ports cannot access arbitrary stack frames. */
5047 if (tem == NULL)
5048 {
5049 warning (0, "unsupported argument to %qD", fndecl);
5050 return const0_rtx;
5051 }
5052
5053 if (count)
5054 {
5055 /* Warn since no effort is made to ensure that any frame
5056 beyond the current one exists or can be safely reached. */
5057 warning (OPT_Wframe_address, "calling %qD with "
5058 "a nonzero argument is unsafe", fndecl);
5059 }
5060
5061 /* For __builtin_frame_address, return what we've got. */
5062 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5063 return tem;
5064
5065 if (!REG_P (tem)
5066 && ! CONSTANT_P (tem))
5067 tem = copy_addr_to_reg (tem);
5068 return tem;
5069 }
5070 }
5071
5072 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5073 failed and the caller should emit a normal call. */
5074
5075 static rtx
5076 expand_builtin_alloca (tree exp)
5077 {
5078 rtx op0;
5079 rtx result;
5080 unsigned int align;
5081 tree fndecl = get_callee_fndecl (exp);
5082 HOST_WIDE_INT max_size;
5083 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5084 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5085 bool valid_arglist
5086 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5087 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5088 VOID_TYPE)
5089 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5090 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5091 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5092
5093 if (!valid_arglist)
5094 return NULL_RTX;
5095
5096 if ((alloca_for_var && !warn_vla_limit)
5097 || (!alloca_for_var && !warn_alloca_limit))
5098 {
5099 /* -Walloca-larger-than and -Wvla-larger-than settings override
5100 the more general -Walloc-size-larger-than so unless either of
5101 the former options is specified check the alloca arguments for
5102 overflow. */
5103 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5104 int idx[] = { 0, -1 };
5105 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5106 }
5107
5108 /* Compute the argument. */
5109 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5110
5111 /* Compute the alignment. */
5112 align = (fcode == BUILT_IN_ALLOCA
5113 ? BIGGEST_ALIGNMENT
5114 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5115
5116 /* Compute the maximum size. */
5117 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5118 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5119 : -1);
5120
5121 /* Allocate the desired space. If the allocation stems from the declaration
5122 of a variable-sized object, it cannot accumulate. */
5123 result
5124 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5125 result = convert_memory_address (ptr_mode, result);
5126
5127 return result;
5128 }
5129
5130 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5131 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5132 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5133 handle_builtin_stack_restore function. */
5134
5135 static rtx
5136 expand_asan_emit_allocas_unpoison (tree exp)
5137 {
5138 tree arg0 = CALL_EXPR_ARG (exp, 0);
5139 tree arg1 = CALL_EXPR_ARG (exp, 1);
5140 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5141 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5142 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5143 stack_pointer_rtx, NULL_RTX, 0,
5144 OPTAB_LIB_WIDEN);
5145 off = convert_modes (ptr_mode, Pmode, off, 0);
5146 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5147 OPTAB_LIB_WIDEN);
5148 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5149 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5150 top, ptr_mode, bot, ptr_mode);
5151 return ret;
5152 }
5153
5154 /* Expand a call to bswap builtin in EXP.
5155 Return NULL_RTX if a normal call should be emitted rather than expanding the
5156 function in-line. If convenient, the result should be placed in TARGET.
5157 SUBTARGET may be used as the target for computing one of EXP's operands. */
5158
5159 static rtx
5160 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5161 rtx subtarget)
5162 {
5163 tree arg;
5164 rtx op0;
5165
5166 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5167 return NULL_RTX;
5168
5169 arg = CALL_EXPR_ARG (exp, 0);
5170 op0 = expand_expr (arg,
5171 subtarget && GET_MODE (subtarget) == target_mode
5172 ? subtarget : NULL_RTX,
5173 target_mode, EXPAND_NORMAL);
5174 if (GET_MODE (op0) != target_mode)
5175 op0 = convert_to_mode (target_mode, op0, 1);
5176
5177 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5178
5179 gcc_assert (target);
5180
5181 return convert_to_mode (target_mode, target, 1);
5182 }
5183
5184 /* Expand a call to a unary builtin in EXP.
5185 Return NULL_RTX if a normal call should be emitted rather than expanding the
5186 function in-line. If convenient, the result should be placed in TARGET.
5187 SUBTARGET may be used as the target for computing one of EXP's operands. */
5188
5189 static rtx
5190 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5191 rtx subtarget, optab op_optab)
5192 {
5193 rtx op0;
5194
5195 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5196 return NULL_RTX;
5197
5198 /* Compute the argument. */
5199 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5200 (subtarget
5201 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5202 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5203 VOIDmode, EXPAND_NORMAL);
5204 /* Compute op, into TARGET if possible.
5205 Set TARGET to wherever the result comes back. */
5206 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5207 op_optab, op0, target, op_optab != clrsb_optab);
5208 gcc_assert (target);
5209
5210 return convert_to_mode (target_mode, target, 0);
5211 }
5212
5213 /* Expand a call to __builtin_expect. We just return our argument
5214 as the builtin_expect semantic should've been already executed by
5215 tree branch prediction pass. */
5216
5217 static rtx
5218 expand_builtin_expect (tree exp, rtx target)
5219 {
5220 tree arg;
5221
5222 if (call_expr_nargs (exp) < 2)
5223 return const0_rtx;
5224 arg = CALL_EXPR_ARG (exp, 0);
5225
5226 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5227 /* When guessing was done, the hints should be already stripped away. */
5228 gcc_assert (!flag_guess_branch_prob
5229 || optimize == 0 || seen_error ());
5230 return target;
5231 }
5232
5233 /* Expand a call to __builtin_assume_aligned. We just return our first
5234 argument as the builtin_assume_aligned semantic should've been already
5235 executed by CCP. */
5236
5237 static rtx
5238 expand_builtin_assume_aligned (tree exp, rtx target)
5239 {
5240 if (call_expr_nargs (exp) < 2)
5241 return const0_rtx;
5242 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5243 EXPAND_NORMAL);
5244 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5245 && (call_expr_nargs (exp) < 3
5246 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5247 return target;
5248 }
5249
5250 void
5251 expand_builtin_trap (void)
5252 {
5253 if (targetm.have_trap ())
5254 {
5255 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5256 /* For trap insns when not accumulating outgoing args force
5257 REG_ARGS_SIZE note to prevent crossjumping of calls with
5258 different args sizes. */
5259 if (!ACCUMULATE_OUTGOING_ARGS)
5260 add_args_size_note (insn, stack_pointer_delta);
5261 }
5262 else
5263 {
5264 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5265 tree call_expr = build_call_expr (fn, 0);
5266 expand_call (call_expr, NULL_RTX, false);
5267 }
5268
5269 emit_barrier ();
5270 }
5271
5272 /* Expand a call to __builtin_unreachable. We do nothing except emit
5273 a barrier saying that control flow will not pass here.
5274
5275 It is the responsibility of the program being compiled to ensure
5276 that control flow does never reach __builtin_unreachable. */
5277 static void
5278 expand_builtin_unreachable (void)
5279 {
5280 emit_barrier ();
5281 }
5282
5283 /* Expand EXP, a call to fabs, fabsf or fabsl.
5284 Return NULL_RTX if a normal call should be emitted rather than expanding
5285 the function inline. If convenient, the result should be placed
5286 in TARGET. SUBTARGET may be used as the target for computing
5287 the operand. */
5288
5289 static rtx
5290 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5291 {
5292 machine_mode mode;
5293 tree arg;
5294 rtx op0;
5295
5296 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5297 return NULL_RTX;
5298
5299 arg = CALL_EXPR_ARG (exp, 0);
5300 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5301 mode = TYPE_MODE (TREE_TYPE (arg));
5302 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5303 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5304 }
5305
5306 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5307 Return NULL is a normal call should be emitted rather than expanding the
5308 function inline. If convenient, the result should be placed in TARGET.
5309 SUBTARGET may be used as the target for computing the operand. */
5310
5311 static rtx
5312 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5313 {
5314 rtx op0, op1;
5315 tree arg;
5316
5317 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5318 return NULL_RTX;
5319
5320 arg = CALL_EXPR_ARG (exp, 0);
5321 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5322
5323 arg = CALL_EXPR_ARG (exp, 1);
5324 op1 = expand_normal (arg);
5325
5326 return expand_copysign (op0, op1, target);
5327 }
5328
5329 /* Expand a call to __builtin___clear_cache. */
5330
5331 static rtx
5332 expand_builtin___clear_cache (tree exp)
5333 {
5334 if (!targetm.code_for_clear_cache)
5335 {
5336 #ifdef CLEAR_INSN_CACHE
5337 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5338 does something. Just do the default expansion to a call to
5339 __clear_cache(). */
5340 return NULL_RTX;
5341 #else
5342 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5343 does nothing. There is no need to call it. Do nothing. */
5344 return const0_rtx;
5345 #endif /* CLEAR_INSN_CACHE */
5346 }
5347
5348 /* We have a "clear_cache" insn, and it will handle everything. */
5349 tree begin, end;
5350 rtx begin_rtx, end_rtx;
5351
5352 /* We must not expand to a library call. If we did, any
5353 fallback library function in libgcc that might contain a call to
5354 __builtin___clear_cache() would recurse infinitely. */
5355 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5356 {
5357 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5358 return const0_rtx;
5359 }
5360
5361 if (targetm.have_clear_cache ())
5362 {
5363 struct expand_operand ops[2];
5364
5365 begin = CALL_EXPR_ARG (exp, 0);
5366 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5367
5368 end = CALL_EXPR_ARG (exp, 1);
5369 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5370
5371 create_address_operand (&ops[0], begin_rtx);
5372 create_address_operand (&ops[1], end_rtx);
5373 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5374 return const0_rtx;
5375 }
5376 return const0_rtx;
5377 }
5378
5379 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5380
5381 static rtx
5382 round_trampoline_addr (rtx tramp)
5383 {
5384 rtx temp, addend, mask;
5385
5386 /* If we don't need too much alignment, we'll have been guaranteed
5387 proper alignment by get_trampoline_type. */
5388 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5389 return tramp;
5390
5391 /* Round address up to desired boundary. */
5392 temp = gen_reg_rtx (Pmode);
5393 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5394 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5395
5396 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5397 temp, 0, OPTAB_LIB_WIDEN);
5398 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5399 temp, 0, OPTAB_LIB_WIDEN);
5400
5401 return tramp;
5402 }
5403
5404 static rtx
5405 expand_builtin_init_trampoline (tree exp, bool onstack)
5406 {
5407 tree t_tramp, t_func, t_chain;
5408 rtx m_tramp, r_tramp, r_chain, tmp;
5409
5410 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5411 POINTER_TYPE, VOID_TYPE))
5412 return NULL_RTX;
5413
5414 t_tramp = CALL_EXPR_ARG (exp, 0);
5415 t_func = CALL_EXPR_ARG (exp, 1);
5416 t_chain = CALL_EXPR_ARG (exp, 2);
5417
5418 r_tramp = expand_normal (t_tramp);
5419 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5420 MEM_NOTRAP_P (m_tramp) = 1;
5421
5422 /* If ONSTACK, the TRAMP argument should be the address of a field
5423 within the local function's FRAME decl. Either way, let's see if
5424 we can fill in the MEM_ATTRs for this memory. */
5425 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5426 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5427
5428 /* Creator of a heap trampoline is responsible for making sure the
5429 address is aligned to at least STACK_BOUNDARY. Normally malloc
5430 will ensure this anyhow. */
5431 tmp = round_trampoline_addr (r_tramp);
5432 if (tmp != r_tramp)
5433 {
5434 m_tramp = change_address (m_tramp, BLKmode, tmp);
5435 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5436 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5437 }
5438
5439 /* The FUNC argument should be the address of the nested function.
5440 Extract the actual function decl to pass to the hook. */
5441 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5442 t_func = TREE_OPERAND (t_func, 0);
5443 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5444
5445 r_chain = expand_normal (t_chain);
5446
5447 /* Generate insns to initialize the trampoline. */
5448 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5449
5450 if (onstack)
5451 {
5452 trampolines_created = 1;
5453
5454 if (targetm.calls.custom_function_descriptors != 0)
5455 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5456 "trampoline generated for nested function %qD", t_func);
5457 }
5458
5459 return const0_rtx;
5460 }
5461
5462 static rtx
5463 expand_builtin_adjust_trampoline (tree exp)
5464 {
5465 rtx tramp;
5466
5467 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5468 return NULL_RTX;
5469
5470 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5471 tramp = round_trampoline_addr (tramp);
5472 if (targetm.calls.trampoline_adjust_address)
5473 tramp = targetm.calls.trampoline_adjust_address (tramp);
5474
5475 return tramp;
5476 }
5477
5478 /* Expand a call to the builtin descriptor initialization routine.
5479 A descriptor is made up of a couple of pointers to the static
5480 chain and the code entry in this order. */
5481
5482 static rtx
5483 expand_builtin_init_descriptor (tree exp)
5484 {
5485 tree t_descr, t_func, t_chain;
5486 rtx m_descr, r_descr, r_func, r_chain;
5487
5488 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5489 VOID_TYPE))
5490 return NULL_RTX;
5491
5492 t_descr = CALL_EXPR_ARG (exp, 0);
5493 t_func = CALL_EXPR_ARG (exp, 1);
5494 t_chain = CALL_EXPR_ARG (exp, 2);
5495
5496 r_descr = expand_normal (t_descr);
5497 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5498 MEM_NOTRAP_P (m_descr) = 1;
5499
5500 r_func = expand_normal (t_func);
5501 r_chain = expand_normal (t_chain);
5502
5503 /* Generate insns to initialize the descriptor. */
5504 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5505 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5506 POINTER_SIZE / BITS_PER_UNIT), r_func);
5507
5508 return const0_rtx;
5509 }
5510
5511 /* Expand a call to the builtin descriptor adjustment routine. */
5512
5513 static rtx
5514 expand_builtin_adjust_descriptor (tree exp)
5515 {
5516 rtx tramp;
5517
5518 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5519 return NULL_RTX;
5520
5521 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5522
5523 /* Unalign the descriptor to allow runtime identification. */
5524 tramp = plus_constant (ptr_mode, tramp,
5525 targetm.calls.custom_function_descriptors);
5526
5527 return force_operand (tramp, NULL_RTX);
5528 }
5529
5530 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5531 function. The function first checks whether the back end provides
5532 an insn to implement signbit for the respective mode. If not, it
5533 checks whether the floating point format of the value is such that
5534 the sign bit can be extracted. If that is not the case, error out.
5535 EXP is the expression that is a call to the builtin function; if
5536 convenient, the result should be placed in TARGET. */
5537 static rtx
5538 expand_builtin_signbit (tree exp, rtx target)
5539 {
5540 const struct real_format *fmt;
5541 scalar_float_mode fmode;
5542 scalar_int_mode rmode, imode;
5543 tree arg;
5544 int word, bitpos;
5545 enum insn_code icode;
5546 rtx temp;
5547 location_t loc = EXPR_LOCATION (exp);
5548
5549 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5550 return NULL_RTX;
5551
5552 arg = CALL_EXPR_ARG (exp, 0);
5553 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5554 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5555 fmt = REAL_MODE_FORMAT (fmode);
5556
5557 arg = builtin_save_expr (arg);
5558
5559 /* Expand the argument yielding a RTX expression. */
5560 temp = expand_normal (arg);
5561
5562 /* Check if the back end provides an insn that handles signbit for the
5563 argument's mode. */
5564 icode = optab_handler (signbit_optab, fmode);
5565 if (icode != CODE_FOR_nothing)
5566 {
5567 rtx_insn *last = get_last_insn ();
5568 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5569 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5570 return target;
5571 delete_insns_since (last);
5572 }
5573
5574 /* For floating point formats without a sign bit, implement signbit
5575 as "ARG < 0.0". */
5576 bitpos = fmt->signbit_ro;
5577 if (bitpos < 0)
5578 {
5579 /* But we can't do this if the format supports signed zero. */
5580 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5581
5582 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5583 build_real (TREE_TYPE (arg), dconst0));
5584 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5585 }
5586
5587 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5588 {
5589 imode = int_mode_for_mode (fmode).require ();
5590 temp = gen_lowpart (imode, temp);
5591 }
5592 else
5593 {
5594 imode = word_mode;
5595 /* Handle targets with different FP word orders. */
5596 if (FLOAT_WORDS_BIG_ENDIAN)
5597 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5598 else
5599 word = bitpos / BITS_PER_WORD;
5600 temp = operand_subword_force (temp, word, fmode);
5601 bitpos = bitpos % BITS_PER_WORD;
5602 }
5603
5604 /* Force the intermediate word_mode (or narrower) result into a
5605 register. This avoids attempting to create paradoxical SUBREGs
5606 of floating point modes below. */
5607 temp = force_reg (imode, temp);
5608
5609 /* If the bitpos is within the "result mode" lowpart, the operation
5610 can be implement with a single bitwise AND. Otherwise, we need
5611 a right shift and an AND. */
5612
5613 if (bitpos < GET_MODE_BITSIZE (rmode))
5614 {
5615 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5616
5617 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5618 temp = gen_lowpart (rmode, temp);
5619 temp = expand_binop (rmode, and_optab, temp,
5620 immed_wide_int_const (mask, rmode),
5621 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5622 }
5623 else
5624 {
5625 /* Perform a logical right shift to place the signbit in the least
5626 significant bit, then truncate the result to the desired mode
5627 and mask just this bit. */
5628 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5629 temp = gen_lowpart (rmode, temp);
5630 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5631 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5632 }
5633
5634 return temp;
5635 }
5636
5637 /* Expand fork or exec calls. TARGET is the desired target of the
5638 call. EXP is the call. FN is the
5639 identificator of the actual function. IGNORE is nonzero if the
5640 value is to be ignored. */
5641
5642 static rtx
5643 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5644 {
5645 tree id, decl;
5646 tree call;
5647
5648 /* If we are not profiling, just call the function. */
5649 if (!profile_arc_flag)
5650 return NULL_RTX;
5651
5652 /* Otherwise call the wrapper. This should be equivalent for the rest of
5653 compiler, so the code does not diverge, and the wrapper may run the
5654 code necessary for keeping the profiling sane. */
5655
5656 switch (DECL_FUNCTION_CODE (fn))
5657 {
5658 case BUILT_IN_FORK:
5659 id = get_identifier ("__gcov_fork");
5660 break;
5661
5662 case BUILT_IN_EXECL:
5663 id = get_identifier ("__gcov_execl");
5664 break;
5665
5666 case BUILT_IN_EXECV:
5667 id = get_identifier ("__gcov_execv");
5668 break;
5669
5670 case BUILT_IN_EXECLP:
5671 id = get_identifier ("__gcov_execlp");
5672 break;
5673
5674 case BUILT_IN_EXECLE:
5675 id = get_identifier ("__gcov_execle");
5676 break;
5677
5678 case BUILT_IN_EXECVP:
5679 id = get_identifier ("__gcov_execvp");
5680 break;
5681
5682 case BUILT_IN_EXECVE:
5683 id = get_identifier ("__gcov_execve");
5684 break;
5685
5686 default:
5687 gcc_unreachable ();
5688 }
5689
5690 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5691 FUNCTION_DECL, id, TREE_TYPE (fn));
5692 DECL_EXTERNAL (decl) = 1;
5693 TREE_PUBLIC (decl) = 1;
5694 DECL_ARTIFICIAL (decl) = 1;
5695 TREE_NOTHROW (decl) = 1;
5696 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5697 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5698 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5699 return expand_call (call, target, ignore);
5700 }
5701
5702
5703 \f
5704 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5705 the pointer in these functions is void*, the tree optimizers may remove
5706 casts. The mode computed in expand_builtin isn't reliable either, due
5707 to __sync_bool_compare_and_swap.
5708
5709 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5710 group of builtins. This gives us log2 of the mode size. */
5711
5712 static inline machine_mode
5713 get_builtin_sync_mode (int fcode_diff)
5714 {
5715 /* The size is not negotiable, so ask not to get BLKmode in return
5716 if the target indicates that a smaller size would be better. */
5717 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5718 }
5719
5720 /* Expand the memory expression LOC and return the appropriate memory operand
5721 for the builtin_sync operations. */
5722
5723 static rtx
5724 get_builtin_sync_mem (tree loc, machine_mode mode)
5725 {
5726 rtx addr, mem;
5727
5728 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5729 addr = convert_memory_address (Pmode, addr);
5730
5731 /* Note that we explicitly do not want any alias information for this
5732 memory, so that we kill all other live memories. Otherwise we don't
5733 satisfy the full barrier semantics of the intrinsic. */
5734 mem = validize_mem (gen_rtx_MEM (mode, addr));
5735
5736 /* The alignment needs to be at least according to that of the mode. */
5737 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5738 get_pointer_alignment (loc)));
5739 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5740 MEM_VOLATILE_P (mem) = 1;
5741
5742 return mem;
5743 }
5744
5745 /* Make sure an argument is in the right mode.
5746 EXP is the tree argument.
5747 MODE is the mode it should be in. */
5748
5749 static rtx
5750 expand_expr_force_mode (tree exp, machine_mode mode)
5751 {
5752 rtx val;
5753 machine_mode old_mode;
5754
5755 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5756 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5757 of CONST_INTs, where we know the old_mode only from the call argument. */
5758
5759 old_mode = GET_MODE (val);
5760 if (old_mode == VOIDmode)
5761 old_mode = TYPE_MODE (TREE_TYPE (exp));
5762 val = convert_modes (mode, old_mode, val, 1);
5763 return val;
5764 }
5765
5766
5767 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5768 EXP is the CALL_EXPR. CODE is the rtx code
5769 that corresponds to the arithmetic or logical operation from the name;
5770 an exception here is that NOT actually means NAND. TARGET is an optional
5771 place for us to store the results; AFTER is true if this is the
5772 fetch_and_xxx form. */
5773
5774 static rtx
5775 expand_builtin_sync_operation (machine_mode mode, tree exp,
5776 enum rtx_code code, bool after,
5777 rtx target)
5778 {
5779 rtx val, mem;
5780 location_t loc = EXPR_LOCATION (exp);
5781
5782 if (code == NOT && warn_sync_nand)
5783 {
5784 tree fndecl = get_callee_fndecl (exp);
5785 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5786
5787 static bool warned_f_a_n, warned_n_a_f;
5788
5789 switch (fcode)
5790 {
5791 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5792 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5793 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5794 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5795 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5796 if (warned_f_a_n)
5797 break;
5798
5799 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5800 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5801 warned_f_a_n = true;
5802 break;
5803
5804 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5805 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5806 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5807 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5808 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5809 if (warned_n_a_f)
5810 break;
5811
5812 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5813 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5814 warned_n_a_f = true;
5815 break;
5816
5817 default:
5818 gcc_unreachable ();
5819 }
5820 }
5821
5822 /* Expand the operands. */
5823 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5824 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5825
5826 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5827 after);
5828 }
5829
5830 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5831 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5832 true if this is the boolean form. TARGET is a place for us to store the
5833 results; this is NOT optional if IS_BOOL is true. */
5834
5835 static rtx
5836 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5837 bool is_bool, rtx target)
5838 {
5839 rtx old_val, new_val, mem;
5840 rtx *pbool, *poval;
5841
5842 /* Expand the operands. */
5843 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5844 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5845 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5846
5847 pbool = poval = NULL;
5848 if (target != const0_rtx)
5849 {
5850 if (is_bool)
5851 pbool = &target;
5852 else
5853 poval = &target;
5854 }
5855 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5856 false, MEMMODEL_SYNC_SEQ_CST,
5857 MEMMODEL_SYNC_SEQ_CST))
5858 return NULL_RTX;
5859
5860 return target;
5861 }
5862
5863 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5864 general form is actually an atomic exchange, and some targets only
5865 support a reduced form with the second argument being a constant 1.
5866 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5867 the results. */
5868
5869 static rtx
5870 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5871 rtx target)
5872 {
5873 rtx val, mem;
5874
5875 /* Expand the operands. */
5876 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5877 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5878
5879 return expand_sync_lock_test_and_set (target, mem, val);
5880 }
5881
5882 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5883
5884 static void
5885 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5886 {
5887 rtx mem;
5888
5889 /* Expand the operands. */
5890 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5891
5892 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5893 }
5894
5895 /* Given an integer representing an ``enum memmodel'', verify its
5896 correctness and return the memory model enum. */
5897
5898 static enum memmodel
5899 get_memmodel (tree exp)
5900 {
5901 rtx op;
5902 unsigned HOST_WIDE_INT val;
5903 source_location loc
5904 = expansion_point_location_if_in_system_header (input_location);
5905
5906 /* If the parameter is not a constant, it's a run time value so we'll just
5907 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5908 if (TREE_CODE (exp) != INTEGER_CST)
5909 return MEMMODEL_SEQ_CST;
5910
5911 op = expand_normal (exp);
5912
5913 val = INTVAL (op);
5914 if (targetm.memmodel_check)
5915 val = targetm.memmodel_check (val);
5916 else if (val & ~MEMMODEL_MASK)
5917 {
5918 warning_at (loc, OPT_Winvalid_memory_model,
5919 "unknown architecture specifier in memory model to builtin");
5920 return MEMMODEL_SEQ_CST;
5921 }
5922
5923 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5924 if (memmodel_base (val) >= MEMMODEL_LAST)
5925 {
5926 warning_at (loc, OPT_Winvalid_memory_model,
5927 "invalid memory model argument to builtin");
5928 return MEMMODEL_SEQ_CST;
5929 }
5930
5931 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5932 be conservative and promote consume to acquire. */
5933 if (val == MEMMODEL_CONSUME)
5934 val = MEMMODEL_ACQUIRE;
5935
5936 return (enum memmodel) val;
5937 }
5938
5939 /* Expand the __atomic_exchange intrinsic:
5940 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5941 EXP is the CALL_EXPR.
5942 TARGET is an optional place for us to store the results. */
5943
5944 static rtx
5945 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5946 {
5947 rtx val, mem;
5948 enum memmodel model;
5949
5950 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5951
5952 if (!flag_inline_atomics)
5953 return NULL_RTX;
5954
5955 /* Expand the operands. */
5956 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5957 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5958
5959 return expand_atomic_exchange (target, mem, val, model);
5960 }
5961
5962 /* Expand the __atomic_compare_exchange intrinsic:
5963 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5964 TYPE desired, BOOL weak,
5965 enum memmodel success,
5966 enum memmodel failure)
5967 EXP is the CALL_EXPR.
5968 TARGET is an optional place for us to store the results. */
5969
5970 static rtx
5971 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5972 rtx target)
5973 {
5974 rtx expect, desired, mem, oldval;
5975 rtx_code_label *label;
5976 enum memmodel success, failure;
5977 tree weak;
5978 bool is_weak;
5979 source_location loc
5980 = expansion_point_location_if_in_system_header (input_location);
5981
5982 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5983 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5984
5985 if (failure > success)
5986 {
5987 warning_at (loc, OPT_Winvalid_memory_model,
5988 "failure memory model cannot be stronger than success "
5989 "memory model for %<__atomic_compare_exchange%>");
5990 success = MEMMODEL_SEQ_CST;
5991 }
5992
5993 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5994 {
5995 warning_at (loc, OPT_Winvalid_memory_model,
5996 "invalid failure memory model for "
5997 "%<__atomic_compare_exchange%>");
5998 failure = MEMMODEL_SEQ_CST;
5999 success = MEMMODEL_SEQ_CST;
6000 }
6001
6002
6003 if (!flag_inline_atomics)
6004 return NULL_RTX;
6005
6006 /* Expand the operands. */
6007 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6008
6009 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6010 expect = convert_memory_address (Pmode, expect);
6011 expect = gen_rtx_MEM (mode, expect);
6012 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6013
6014 weak = CALL_EXPR_ARG (exp, 3);
6015 is_weak = false;
6016 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6017 is_weak = true;
6018
6019 if (target == const0_rtx)
6020 target = NULL;
6021
6022 /* Lest the rtl backend create a race condition with an imporoper store
6023 to memory, always create a new pseudo for OLDVAL. */
6024 oldval = NULL;
6025
6026 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6027 is_weak, success, failure))
6028 return NULL_RTX;
6029
6030 /* Conditionally store back to EXPECT, lest we create a race condition
6031 with an improper store to memory. */
6032 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6033 the normal case where EXPECT is totally private, i.e. a register. At
6034 which point the store can be unconditional. */
6035 label = gen_label_rtx ();
6036 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6037 GET_MODE (target), 1, label);
6038 emit_move_insn (expect, oldval);
6039 emit_label (label);
6040
6041 return target;
6042 }
6043
6044 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6045 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6046 call. The weak parameter must be dropped to match the expected parameter
6047 list and the expected argument changed from value to pointer to memory
6048 slot. */
6049
6050 static void
6051 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6052 {
6053 unsigned int z;
6054 vec<tree, va_gc> *vec;
6055
6056 vec_alloc (vec, 5);
6057 vec->quick_push (gimple_call_arg (call, 0));
6058 tree expected = gimple_call_arg (call, 1);
6059 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6060 TREE_TYPE (expected));
6061 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6062 if (expd != x)
6063 emit_move_insn (x, expd);
6064 tree v = make_tree (TREE_TYPE (expected), x);
6065 vec->quick_push (build1 (ADDR_EXPR,
6066 build_pointer_type (TREE_TYPE (expected)), v));
6067 vec->quick_push (gimple_call_arg (call, 2));
6068 /* Skip the boolean weak parameter. */
6069 for (z = 4; z < 6; z++)
6070 vec->quick_push (gimple_call_arg (call, z));
6071 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6072 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6073 gcc_assert (bytes_log2 < 5);
6074 built_in_function fncode
6075 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6076 + bytes_log2);
6077 tree fndecl = builtin_decl_explicit (fncode);
6078 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6079 fndecl);
6080 tree exp = build_call_vec (boolean_type_node, fn, vec);
6081 tree lhs = gimple_call_lhs (call);
6082 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6083 if (lhs)
6084 {
6085 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6086 if (GET_MODE (boolret) != mode)
6087 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6088 x = force_reg (mode, x);
6089 write_complex_part (target, boolret, true);
6090 write_complex_part (target, x, false);
6091 }
6092 }
6093
6094 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6095
6096 void
6097 expand_ifn_atomic_compare_exchange (gcall *call)
6098 {
6099 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6100 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6101 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6102 rtx expect, desired, mem, oldval, boolret;
6103 enum memmodel success, failure;
6104 tree lhs;
6105 bool is_weak;
6106 source_location loc
6107 = expansion_point_location_if_in_system_header (gimple_location (call));
6108
6109 success = get_memmodel (gimple_call_arg (call, 4));
6110 failure = get_memmodel (gimple_call_arg (call, 5));
6111
6112 if (failure > success)
6113 {
6114 warning_at (loc, OPT_Winvalid_memory_model,
6115 "failure memory model cannot be stronger than success "
6116 "memory model for %<__atomic_compare_exchange%>");
6117 success = MEMMODEL_SEQ_CST;
6118 }
6119
6120 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6121 {
6122 warning_at (loc, OPT_Winvalid_memory_model,
6123 "invalid failure memory model for "
6124 "%<__atomic_compare_exchange%>");
6125 failure = MEMMODEL_SEQ_CST;
6126 success = MEMMODEL_SEQ_CST;
6127 }
6128
6129 if (!flag_inline_atomics)
6130 {
6131 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6132 return;
6133 }
6134
6135 /* Expand the operands. */
6136 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6137
6138 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6139 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6140
6141 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6142
6143 boolret = NULL;
6144 oldval = NULL;
6145
6146 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6147 is_weak, success, failure))
6148 {
6149 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6150 return;
6151 }
6152
6153 lhs = gimple_call_lhs (call);
6154 if (lhs)
6155 {
6156 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6157 if (GET_MODE (boolret) != mode)
6158 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6159 write_complex_part (target, boolret, true);
6160 write_complex_part (target, oldval, false);
6161 }
6162 }
6163
6164 /* Expand the __atomic_load intrinsic:
6165 TYPE __atomic_load (TYPE *object, enum memmodel)
6166 EXP is the CALL_EXPR.
6167 TARGET is an optional place for us to store the results. */
6168
6169 static rtx
6170 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6171 {
6172 rtx mem;
6173 enum memmodel model;
6174
6175 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6176 if (is_mm_release (model) || is_mm_acq_rel (model))
6177 {
6178 source_location loc
6179 = expansion_point_location_if_in_system_header (input_location);
6180 warning_at (loc, OPT_Winvalid_memory_model,
6181 "invalid memory model for %<__atomic_load%>");
6182 model = MEMMODEL_SEQ_CST;
6183 }
6184
6185 if (!flag_inline_atomics)
6186 return NULL_RTX;
6187
6188 /* Expand the operand. */
6189 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6190
6191 return expand_atomic_load (target, mem, model);
6192 }
6193
6194
6195 /* Expand the __atomic_store intrinsic:
6196 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6197 EXP is the CALL_EXPR.
6198 TARGET is an optional place for us to store the results. */
6199
6200 static rtx
6201 expand_builtin_atomic_store (machine_mode mode, tree exp)
6202 {
6203 rtx mem, val;
6204 enum memmodel model;
6205
6206 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6207 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6208 || is_mm_release (model)))
6209 {
6210 source_location loc
6211 = expansion_point_location_if_in_system_header (input_location);
6212 warning_at (loc, OPT_Winvalid_memory_model,
6213 "invalid memory model for %<__atomic_store%>");
6214 model = MEMMODEL_SEQ_CST;
6215 }
6216
6217 if (!flag_inline_atomics)
6218 return NULL_RTX;
6219
6220 /* Expand the operands. */
6221 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6222 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6223
6224 return expand_atomic_store (mem, val, model, false);
6225 }
6226
6227 /* Expand the __atomic_fetch_XXX intrinsic:
6228 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6229 EXP is the CALL_EXPR.
6230 TARGET is an optional place for us to store the results.
6231 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6232 FETCH_AFTER is true if returning the result of the operation.
6233 FETCH_AFTER is false if returning the value before the operation.
6234 IGNORE is true if the result is not used.
6235 EXT_CALL is the correct builtin for an external call if this cannot be
6236 resolved to an instruction sequence. */
6237
6238 static rtx
6239 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6240 enum rtx_code code, bool fetch_after,
6241 bool ignore, enum built_in_function ext_call)
6242 {
6243 rtx val, mem, ret;
6244 enum memmodel model;
6245 tree fndecl;
6246 tree addr;
6247
6248 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6249
6250 /* Expand the operands. */
6251 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6252 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6253
6254 /* Only try generating instructions if inlining is turned on. */
6255 if (flag_inline_atomics)
6256 {
6257 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6258 if (ret)
6259 return ret;
6260 }
6261
6262 /* Return if a different routine isn't needed for the library call. */
6263 if (ext_call == BUILT_IN_NONE)
6264 return NULL_RTX;
6265
6266 /* Change the call to the specified function. */
6267 fndecl = get_callee_fndecl (exp);
6268 addr = CALL_EXPR_FN (exp);
6269 STRIP_NOPS (addr);
6270
6271 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6272 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6273
6274 /* If we will emit code after the call, the call can not be a tail call.
6275 If it is emitted as a tail call, a barrier is emitted after it, and
6276 then all trailing code is removed. */
6277 if (!ignore)
6278 CALL_EXPR_TAILCALL (exp) = 0;
6279
6280 /* Expand the call here so we can emit trailing code. */
6281 ret = expand_call (exp, target, ignore);
6282
6283 /* Replace the original function just in case it matters. */
6284 TREE_OPERAND (addr, 0) = fndecl;
6285
6286 /* Then issue the arithmetic correction to return the right result. */
6287 if (!ignore)
6288 {
6289 if (code == NOT)
6290 {
6291 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6292 OPTAB_LIB_WIDEN);
6293 ret = expand_simple_unop (mode, NOT, ret, target, true);
6294 }
6295 else
6296 ret = expand_simple_binop (mode, code, ret, val, target, true,
6297 OPTAB_LIB_WIDEN);
6298 }
6299 return ret;
6300 }
6301
6302 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6303
6304 void
6305 expand_ifn_atomic_bit_test_and (gcall *call)
6306 {
6307 tree ptr = gimple_call_arg (call, 0);
6308 tree bit = gimple_call_arg (call, 1);
6309 tree flag = gimple_call_arg (call, 2);
6310 tree lhs = gimple_call_lhs (call);
6311 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6312 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6313 enum rtx_code code;
6314 optab optab;
6315 struct expand_operand ops[5];
6316
6317 gcc_assert (flag_inline_atomics);
6318
6319 if (gimple_call_num_args (call) == 4)
6320 model = get_memmodel (gimple_call_arg (call, 3));
6321
6322 rtx mem = get_builtin_sync_mem (ptr, mode);
6323 rtx val = expand_expr_force_mode (bit, mode);
6324
6325 switch (gimple_call_internal_fn (call))
6326 {
6327 case IFN_ATOMIC_BIT_TEST_AND_SET:
6328 code = IOR;
6329 optab = atomic_bit_test_and_set_optab;
6330 break;
6331 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6332 code = XOR;
6333 optab = atomic_bit_test_and_complement_optab;
6334 break;
6335 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6336 code = AND;
6337 optab = atomic_bit_test_and_reset_optab;
6338 break;
6339 default:
6340 gcc_unreachable ();
6341 }
6342
6343 if (lhs == NULL_TREE)
6344 {
6345 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6346 val, NULL_RTX, true, OPTAB_DIRECT);
6347 if (code == AND)
6348 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6349 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6350 return;
6351 }
6352
6353 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6354 enum insn_code icode = direct_optab_handler (optab, mode);
6355 gcc_assert (icode != CODE_FOR_nothing);
6356 create_output_operand (&ops[0], target, mode);
6357 create_fixed_operand (&ops[1], mem);
6358 create_convert_operand_to (&ops[2], val, mode, true);
6359 create_integer_operand (&ops[3], model);
6360 create_integer_operand (&ops[4], integer_onep (flag));
6361 if (maybe_expand_insn (icode, 5, ops))
6362 return;
6363
6364 rtx bitval = val;
6365 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6366 val, NULL_RTX, true, OPTAB_DIRECT);
6367 rtx maskval = val;
6368 if (code == AND)
6369 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6370 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6371 code, model, false);
6372 if (integer_onep (flag))
6373 {
6374 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6375 NULL_RTX, true, OPTAB_DIRECT);
6376 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6377 true, OPTAB_DIRECT);
6378 }
6379 else
6380 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6381 OPTAB_DIRECT);
6382 if (result != target)
6383 emit_move_insn (target, result);
6384 }
6385
6386 /* Expand an atomic clear operation.
6387 void _atomic_clear (BOOL *obj, enum memmodel)
6388 EXP is the call expression. */
6389
6390 static rtx
6391 expand_builtin_atomic_clear (tree exp)
6392 {
6393 machine_mode mode;
6394 rtx mem, ret;
6395 enum memmodel model;
6396
6397 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6398 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6399 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6400
6401 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6402 {
6403 source_location loc
6404 = expansion_point_location_if_in_system_header (input_location);
6405 warning_at (loc, OPT_Winvalid_memory_model,
6406 "invalid memory model for %<__atomic_store%>");
6407 model = MEMMODEL_SEQ_CST;
6408 }
6409
6410 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6411 Failing that, a store is issued by __atomic_store. The only way this can
6412 fail is if the bool type is larger than a word size. Unlikely, but
6413 handle it anyway for completeness. Assume a single threaded model since
6414 there is no atomic support in this case, and no barriers are required. */
6415 ret = expand_atomic_store (mem, const0_rtx, model, true);
6416 if (!ret)
6417 emit_move_insn (mem, const0_rtx);
6418 return const0_rtx;
6419 }
6420
6421 /* Expand an atomic test_and_set operation.
6422 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6423 EXP is the call expression. */
6424
6425 static rtx
6426 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6427 {
6428 rtx mem;
6429 enum memmodel model;
6430 machine_mode mode;
6431
6432 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6433 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6434 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6435
6436 return expand_atomic_test_and_set (target, mem, model);
6437 }
6438
6439
6440 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6441 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6442
6443 static tree
6444 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6445 {
6446 int size;
6447 machine_mode mode;
6448 unsigned int mode_align, type_align;
6449
6450 if (TREE_CODE (arg0) != INTEGER_CST)
6451 return NULL_TREE;
6452
6453 /* We need a corresponding integer mode for the access to be lock-free. */
6454 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6455 if (!int_mode_for_size (size, 0).exists (&mode))
6456 return boolean_false_node;
6457
6458 mode_align = GET_MODE_ALIGNMENT (mode);
6459
6460 if (TREE_CODE (arg1) == INTEGER_CST)
6461 {
6462 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6463
6464 /* Either this argument is null, or it's a fake pointer encoding
6465 the alignment of the object. */
6466 val = least_bit_hwi (val);
6467 val *= BITS_PER_UNIT;
6468
6469 if (val == 0 || mode_align < val)
6470 type_align = mode_align;
6471 else
6472 type_align = val;
6473 }
6474 else
6475 {
6476 tree ttype = TREE_TYPE (arg1);
6477
6478 /* This function is usually invoked and folded immediately by the front
6479 end before anything else has a chance to look at it. The pointer
6480 parameter at this point is usually cast to a void *, so check for that
6481 and look past the cast. */
6482 if (CONVERT_EXPR_P (arg1)
6483 && POINTER_TYPE_P (ttype)
6484 && VOID_TYPE_P (TREE_TYPE (ttype))
6485 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6486 arg1 = TREE_OPERAND (arg1, 0);
6487
6488 ttype = TREE_TYPE (arg1);
6489 gcc_assert (POINTER_TYPE_P (ttype));
6490
6491 /* Get the underlying type of the object. */
6492 ttype = TREE_TYPE (ttype);
6493 type_align = TYPE_ALIGN (ttype);
6494 }
6495
6496 /* If the object has smaller alignment, the lock free routines cannot
6497 be used. */
6498 if (type_align < mode_align)
6499 return boolean_false_node;
6500
6501 /* Check if a compare_and_swap pattern exists for the mode which represents
6502 the required size. The pattern is not allowed to fail, so the existence
6503 of the pattern indicates support is present. Also require that an
6504 atomic load exists for the required size. */
6505 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6506 return boolean_true_node;
6507 else
6508 return boolean_false_node;
6509 }
6510
6511 /* Return true if the parameters to call EXP represent an object which will
6512 always generate lock free instructions. The first argument represents the
6513 size of the object, and the second parameter is a pointer to the object
6514 itself. If NULL is passed for the object, then the result is based on
6515 typical alignment for an object of the specified size. Otherwise return
6516 false. */
6517
6518 static rtx
6519 expand_builtin_atomic_always_lock_free (tree exp)
6520 {
6521 tree size;
6522 tree arg0 = CALL_EXPR_ARG (exp, 0);
6523 tree arg1 = CALL_EXPR_ARG (exp, 1);
6524
6525 if (TREE_CODE (arg0) != INTEGER_CST)
6526 {
6527 error ("non-constant argument 1 to __atomic_always_lock_free");
6528 return const0_rtx;
6529 }
6530
6531 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6532 if (size == boolean_true_node)
6533 return const1_rtx;
6534 return const0_rtx;
6535 }
6536
6537 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6538 is lock free on this architecture. */
6539
6540 static tree
6541 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6542 {
6543 if (!flag_inline_atomics)
6544 return NULL_TREE;
6545
6546 /* If it isn't always lock free, don't generate a result. */
6547 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6548 return boolean_true_node;
6549
6550 return NULL_TREE;
6551 }
6552
6553 /* Return true if the parameters to call EXP represent an object which will
6554 always generate lock free instructions. The first argument represents the
6555 size of the object, and the second parameter is a pointer to the object
6556 itself. If NULL is passed for the object, then the result is based on
6557 typical alignment for an object of the specified size. Otherwise return
6558 NULL*/
6559
6560 static rtx
6561 expand_builtin_atomic_is_lock_free (tree exp)
6562 {
6563 tree size;
6564 tree arg0 = CALL_EXPR_ARG (exp, 0);
6565 tree arg1 = CALL_EXPR_ARG (exp, 1);
6566
6567 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6568 {
6569 error ("non-integer argument 1 to __atomic_is_lock_free");
6570 return NULL_RTX;
6571 }
6572
6573 if (!flag_inline_atomics)
6574 return NULL_RTX;
6575
6576 /* If the value is known at compile time, return the RTX for it. */
6577 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6578 if (size == boolean_true_node)
6579 return const1_rtx;
6580
6581 return NULL_RTX;
6582 }
6583
6584 /* Expand the __atomic_thread_fence intrinsic:
6585 void __atomic_thread_fence (enum memmodel)
6586 EXP is the CALL_EXPR. */
6587
6588 static void
6589 expand_builtin_atomic_thread_fence (tree exp)
6590 {
6591 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6592 expand_mem_thread_fence (model);
6593 }
6594
6595 /* Expand the __atomic_signal_fence intrinsic:
6596 void __atomic_signal_fence (enum memmodel)
6597 EXP is the CALL_EXPR. */
6598
6599 static void
6600 expand_builtin_atomic_signal_fence (tree exp)
6601 {
6602 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6603 expand_mem_signal_fence (model);
6604 }
6605
6606 /* Expand the __sync_synchronize intrinsic. */
6607
6608 static void
6609 expand_builtin_sync_synchronize (void)
6610 {
6611 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6612 }
6613
6614 static rtx
6615 expand_builtin_thread_pointer (tree exp, rtx target)
6616 {
6617 enum insn_code icode;
6618 if (!validate_arglist (exp, VOID_TYPE))
6619 return const0_rtx;
6620 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6621 if (icode != CODE_FOR_nothing)
6622 {
6623 struct expand_operand op;
6624 /* If the target is not sutitable then create a new target. */
6625 if (target == NULL_RTX
6626 || !REG_P (target)
6627 || GET_MODE (target) != Pmode)
6628 target = gen_reg_rtx (Pmode);
6629 create_output_operand (&op, target, Pmode);
6630 expand_insn (icode, 1, &op);
6631 return target;
6632 }
6633 error ("__builtin_thread_pointer is not supported on this target");
6634 return const0_rtx;
6635 }
6636
6637 static void
6638 expand_builtin_set_thread_pointer (tree exp)
6639 {
6640 enum insn_code icode;
6641 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6642 return;
6643 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6644 if (icode != CODE_FOR_nothing)
6645 {
6646 struct expand_operand op;
6647 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6648 Pmode, EXPAND_NORMAL);
6649 create_input_operand (&op, val, Pmode);
6650 expand_insn (icode, 1, &op);
6651 return;
6652 }
6653 error ("__builtin_set_thread_pointer is not supported on this target");
6654 }
6655
6656 \f
6657 /* Emit code to restore the current value of stack. */
6658
6659 static void
6660 expand_stack_restore (tree var)
6661 {
6662 rtx_insn *prev;
6663 rtx sa = expand_normal (var);
6664
6665 sa = convert_memory_address (Pmode, sa);
6666
6667 prev = get_last_insn ();
6668 emit_stack_restore (SAVE_BLOCK, sa);
6669
6670 record_new_stack_level ();
6671
6672 fixup_args_size_notes (prev, get_last_insn (), 0);
6673 }
6674
6675 /* Emit code to save the current value of stack. */
6676
6677 static rtx
6678 expand_stack_save (void)
6679 {
6680 rtx ret = NULL_RTX;
6681
6682 emit_stack_save (SAVE_BLOCK, &ret);
6683 return ret;
6684 }
6685
6686 /* Emit code to get the openacc gang, worker or vector id or size. */
6687
6688 static rtx
6689 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6690 {
6691 const char *name;
6692 rtx fallback_retval;
6693 rtx_insn *(*gen_fn) (rtx, rtx);
6694 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6695 {
6696 case BUILT_IN_GOACC_PARLEVEL_ID:
6697 name = "__builtin_goacc_parlevel_id";
6698 fallback_retval = const0_rtx;
6699 gen_fn = targetm.gen_oacc_dim_pos;
6700 break;
6701 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6702 name = "__builtin_goacc_parlevel_size";
6703 fallback_retval = const1_rtx;
6704 gen_fn = targetm.gen_oacc_dim_size;
6705 break;
6706 default:
6707 gcc_unreachable ();
6708 }
6709
6710 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6711 {
6712 error ("%qs only supported in OpenACC code", name);
6713 return const0_rtx;
6714 }
6715
6716 tree arg = CALL_EXPR_ARG (exp, 0);
6717 if (TREE_CODE (arg) != INTEGER_CST)
6718 {
6719 error ("non-constant argument 0 to %qs", name);
6720 return const0_rtx;
6721 }
6722
6723 int dim = TREE_INT_CST_LOW (arg);
6724 switch (dim)
6725 {
6726 case GOMP_DIM_GANG:
6727 case GOMP_DIM_WORKER:
6728 case GOMP_DIM_VECTOR:
6729 break;
6730 default:
6731 error ("illegal argument 0 to %qs", name);
6732 return const0_rtx;
6733 }
6734
6735 if (ignore)
6736 return target;
6737
6738 if (target == NULL_RTX)
6739 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6740
6741 if (!targetm.have_oacc_dim_size ())
6742 {
6743 emit_move_insn (target, fallback_retval);
6744 return target;
6745 }
6746
6747 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6748 emit_insn (gen_fn (reg, GEN_INT (dim)));
6749 if (reg != target)
6750 emit_move_insn (target, reg);
6751
6752 return target;
6753 }
6754
6755 /* Expand a string compare operation using a sequence of char comparison
6756 to get rid of the calling overhead, with result going to TARGET if
6757 that's convenient.
6758
6759 VAR_STR is the variable string source;
6760 CONST_STR is the constant string source;
6761 LENGTH is the number of chars to compare;
6762 CONST_STR_N indicates which source string is the constant string;
6763 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6764
6765 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6766
6767 target = var_str[0] - const_str[0];
6768 if (target != 0)
6769 goto ne_label;
6770 ...
6771 target = var_str[length - 2] - const_str[length - 2];
6772 if (target != 0)
6773 goto ne_label;
6774 target = var_str[length - 1] - const_str[length - 1];
6775 ne_label:
6776 */
6777
6778 static rtx
6779 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6780 unsigned HOST_WIDE_INT length,
6781 int const_str_n, machine_mode mode,
6782 bool is_memcmp)
6783 {
6784 HOST_WIDE_INT offset = 0;
6785 rtx var_rtx_array
6786 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6787 rtx var_rtx = NULL_RTX;
6788 rtx const_rtx = NULL_RTX;
6789 rtx result = target ? target : gen_reg_rtx (mode);
6790 rtx_code_label *ne_label = gen_label_rtx ();
6791 tree unit_type_node = is_memcmp ? unsigned_char_type_node : char_type_node;
6792 scalar_int_mode unit_mode
6793 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6794
6795 start_sequence ();
6796
6797 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6798 {
6799 var_rtx
6800 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6801 const_rtx = c_readstr (const_str + offset, unit_mode);
6802 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6803 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6804
6805 result = expand_simple_binop (mode, MINUS, op0, op1,
6806 result, is_memcmp ? 1 : 0, OPTAB_WIDEN);
6807 if (i < length - 1)
6808 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6809 mode, true, ne_label);
6810 offset += GET_MODE_SIZE (unit_mode);
6811 }
6812
6813 emit_label (ne_label);
6814 rtx_insn *insns = get_insns ();
6815 end_sequence ();
6816 emit_insn (insns);
6817
6818 return result;
6819 }
6820
6821 /* Inline expansion a call to str(n)cmp, with result going to
6822 TARGET if that's convenient.
6823 If the call is not been inlined, return NULL_RTX. */
6824 static rtx
6825 inline_expand_builtin_string_cmp (tree exp, rtx target, bool is_memcmp)
6826 {
6827 tree fndecl = get_callee_fndecl (exp);
6828 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6829 unsigned HOST_WIDE_INT length = 0;
6830 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6831
6832 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6833 || fcode == BUILT_IN_STRNCMP
6834 || fcode == BUILT_IN_MEMCMP);
6835
6836 tree arg1 = CALL_EXPR_ARG (exp, 0);
6837 tree arg2 = CALL_EXPR_ARG (exp, 1);
6838 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6839
6840 unsigned HOST_WIDE_INT len1 = 0;
6841 unsigned HOST_WIDE_INT len2 = 0;
6842 unsigned HOST_WIDE_INT len3 = 0;
6843
6844 const char *src_str1 = c_getstr (arg1, &len1);
6845 const char *src_str2 = c_getstr (arg2, &len2);
6846
6847 /* If neither strings is constant string, the call is not qualify. */
6848 if (!src_str1 && !src_str2)
6849 return NULL_RTX;
6850
6851 /* For strncmp, if the length is not a const, not qualify. */
6852 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6853 return NULL_RTX;
6854
6855 int const_str_n = 0;
6856 if (!len1)
6857 const_str_n = 2;
6858 else if (!len2)
6859 const_str_n = 1;
6860 else if (len2 > len1)
6861 const_str_n = 1;
6862 else
6863 const_str_n = 2;
6864
6865 gcc_checking_assert (const_str_n > 0);
6866 length = (const_str_n == 1) ? len1 : len2;
6867
6868 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6869 length = len3;
6870
6871 /* If the length of the comparision is larger than the threshold,
6872 do nothing. */
6873 if (length > (unsigned HOST_WIDE_INT)
6874 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6875 return NULL_RTX;
6876
6877 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6878
6879 /* Now, start inline expansion the call. */
6880 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6881 (const_str_n == 1) ? src_str1 : src_str2, length,
6882 const_str_n, mode, is_memcmp);
6883 }
6884
6885 /* Expand an expression EXP that calls a built-in function,
6886 with result going to TARGET if that's convenient
6887 (and in mode MODE if that's convenient).
6888 SUBTARGET may be used as the target for computing one of EXP's operands.
6889 IGNORE is nonzero if the value is to be ignored. */
6890
6891 rtx
6892 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6893 int ignore)
6894 {
6895 tree fndecl = get_callee_fndecl (exp);
6896 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6897 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6898 int flags;
6899
6900 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6901 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6902
6903 /* When ASan is enabled, we don't want to expand some memory/string
6904 builtins and rely on libsanitizer's hooks. This allows us to avoid
6905 redundant checks and be sure, that possible overflow will be detected
6906 by ASan. */
6907
6908 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6909 return expand_call (exp, target, ignore);
6910
6911 /* When not optimizing, generate calls to library functions for a certain
6912 set of builtins. */
6913 if (!optimize
6914 && !called_as_built_in (fndecl)
6915 && fcode != BUILT_IN_FORK
6916 && fcode != BUILT_IN_EXECL
6917 && fcode != BUILT_IN_EXECV
6918 && fcode != BUILT_IN_EXECLP
6919 && fcode != BUILT_IN_EXECLE
6920 && fcode != BUILT_IN_EXECVP
6921 && fcode != BUILT_IN_EXECVE
6922 && !ALLOCA_FUNCTION_CODE_P (fcode)
6923 && fcode != BUILT_IN_FREE)
6924 return expand_call (exp, target, ignore);
6925
6926 /* The built-in function expanders test for target == const0_rtx
6927 to determine whether the function's result will be ignored. */
6928 if (ignore)
6929 target = const0_rtx;
6930
6931 /* If the result of a pure or const built-in function is ignored, and
6932 none of its arguments are volatile, we can avoid expanding the
6933 built-in call and just evaluate the arguments for side-effects. */
6934 if (target == const0_rtx
6935 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6936 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6937 {
6938 bool volatilep = false;
6939 tree arg;
6940 call_expr_arg_iterator iter;
6941
6942 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6943 if (TREE_THIS_VOLATILE (arg))
6944 {
6945 volatilep = true;
6946 break;
6947 }
6948
6949 if (! volatilep)
6950 {
6951 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6952 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6953 return const0_rtx;
6954 }
6955 }
6956
6957 switch (fcode)
6958 {
6959 CASE_FLT_FN (BUILT_IN_FABS):
6960 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6961 case BUILT_IN_FABSD32:
6962 case BUILT_IN_FABSD64:
6963 case BUILT_IN_FABSD128:
6964 target = expand_builtin_fabs (exp, target, subtarget);
6965 if (target)
6966 return target;
6967 break;
6968
6969 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6970 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6971 target = expand_builtin_copysign (exp, target, subtarget);
6972 if (target)
6973 return target;
6974 break;
6975
6976 /* Just do a normal library call if we were unable to fold
6977 the values. */
6978 CASE_FLT_FN (BUILT_IN_CABS):
6979 break;
6980
6981 CASE_FLT_FN (BUILT_IN_FMA):
6982 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6983 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6984 if (target)
6985 return target;
6986 break;
6987
6988 CASE_FLT_FN (BUILT_IN_ILOGB):
6989 if (! flag_unsafe_math_optimizations)
6990 break;
6991 gcc_fallthrough ();
6992 CASE_FLT_FN (BUILT_IN_ISINF):
6993 CASE_FLT_FN (BUILT_IN_FINITE):
6994 case BUILT_IN_ISFINITE:
6995 case BUILT_IN_ISNORMAL:
6996 target = expand_builtin_interclass_mathfn (exp, target);
6997 if (target)
6998 return target;
6999 break;
7000
7001 CASE_FLT_FN (BUILT_IN_ICEIL):
7002 CASE_FLT_FN (BUILT_IN_LCEIL):
7003 CASE_FLT_FN (BUILT_IN_LLCEIL):
7004 CASE_FLT_FN (BUILT_IN_LFLOOR):
7005 CASE_FLT_FN (BUILT_IN_IFLOOR):
7006 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7007 target = expand_builtin_int_roundingfn (exp, target);
7008 if (target)
7009 return target;
7010 break;
7011
7012 CASE_FLT_FN (BUILT_IN_IRINT):
7013 CASE_FLT_FN (BUILT_IN_LRINT):
7014 CASE_FLT_FN (BUILT_IN_LLRINT):
7015 CASE_FLT_FN (BUILT_IN_IROUND):
7016 CASE_FLT_FN (BUILT_IN_LROUND):
7017 CASE_FLT_FN (BUILT_IN_LLROUND):
7018 target = expand_builtin_int_roundingfn_2 (exp, target);
7019 if (target)
7020 return target;
7021 break;
7022
7023 CASE_FLT_FN (BUILT_IN_POWI):
7024 target = expand_builtin_powi (exp, target);
7025 if (target)
7026 return target;
7027 break;
7028
7029 CASE_FLT_FN (BUILT_IN_CEXPI):
7030 target = expand_builtin_cexpi (exp, target);
7031 gcc_assert (target);
7032 return target;
7033
7034 CASE_FLT_FN (BUILT_IN_SIN):
7035 CASE_FLT_FN (BUILT_IN_COS):
7036 if (! flag_unsafe_math_optimizations)
7037 break;
7038 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7039 if (target)
7040 return target;
7041 break;
7042
7043 CASE_FLT_FN (BUILT_IN_SINCOS):
7044 if (! flag_unsafe_math_optimizations)
7045 break;
7046 target = expand_builtin_sincos (exp);
7047 if (target)
7048 return target;
7049 break;
7050
7051 case BUILT_IN_APPLY_ARGS:
7052 return expand_builtin_apply_args ();
7053
7054 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7055 FUNCTION with a copy of the parameters described by
7056 ARGUMENTS, and ARGSIZE. It returns a block of memory
7057 allocated on the stack into which is stored all the registers
7058 that might possibly be used for returning the result of a
7059 function. ARGUMENTS is the value returned by
7060 __builtin_apply_args. ARGSIZE is the number of bytes of
7061 arguments that must be copied. ??? How should this value be
7062 computed? We'll also need a safe worst case value for varargs
7063 functions. */
7064 case BUILT_IN_APPLY:
7065 if (!validate_arglist (exp, POINTER_TYPE,
7066 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7067 && !validate_arglist (exp, REFERENCE_TYPE,
7068 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7069 return const0_rtx;
7070 else
7071 {
7072 rtx ops[3];
7073
7074 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7075 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7076 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7077
7078 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7079 }
7080
7081 /* __builtin_return (RESULT) causes the function to return the
7082 value described by RESULT. RESULT is address of the block of
7083 memory returned by __builtin_apply. */
7084 case BUILT_IN_RETURN:
7085 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7086 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7087 return const0_rtx;
7088
7089 case BUILT_IN_SAVEREGS:
7090 return expand_builtin_saveregs ();
7091
7092 case BUILT_IN_VA_ARG_PACK:
7093 /* All valid uses of __builtin_va_arg_pack () are removed during
7094 inlining. */
7095 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7096 return const0_rtx;
7097
7098 case BUILT_IN_VA_ARG_PACK_LEN:
7099 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7100 inlining. */
7101 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7102 return const0_rtx;
7103
7104 /* Return the address of the first anonymous stack arg. */
7105 case BUILT_IN_NEXT_ARG:
7106 if (fold_builtin_next_arg (exp, false))
7107 return const0_rtx;
7108 return expand_builtin_next_arg ();
7109
7110 case BUILT_IN_CLEAR_CACHE:
7111 target = expand_builtin___clear_cache (exp);
7112 if (target)
7113 return target;
7114 break;
7115
7116 case BUILT_IN_CLASSIFY_TYPE:
7117 return expand_builtin_classify_type (exp);
7118
7119 case BUILT_IN_CONSTANT_P:
7120 return const0_rtx;
7121
7122 case BUILT_IN_FRAME_ADDRESS:
7123 case BUILT_IN_RETURN_ADDRESS:
7124 return expand_builtin_frame_address (fndecl, exp);
7125
7126 /* Returns the address of the area where the structure is returned.
7127 0 otherwise. */
7128 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7129 if (call_expr_nargs (exp) != 0
7130 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7131 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7132 return const0_rtx;
7133 else
7134 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7135
7136 CASE_BUILT_IN_ALLOCA:
7137 target = expand_builtin_alloca (exp);
7138 if (target)
7139 return target;
7140 break;
7141
7142 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7143 return expand_asan_emit_allocas_unpoison (exp);
7144
7145 case BUILT_IN_STACK_SAVE:
7146 return expand_stack_save ();
7147
7148 case BUILT_IN_STACK_RESTORE:
7149 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7150 return const0_rtx;
7151
7152 case BUILT_IN_BSWAP16:
7153 case BUILT_IN_BSWAP32:
7154 case BUILT_IN_BSWAP64:
7155 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7156 if (target)
7157 return target;
7158 break;
7159
7160 CASE_INT_FN (BUILT_IN_FFS):
7161 target = expand_builtin_unop (target_mode, exp, target,
7162 subtarget, ffs_optab);
7163 if (target)
7164 return target;
7165 break;
7166
7167 CASE_INT_FN (BUILT_IN_CLZ):
7168 target = expand_builtin_unop (target_mode, exp, target,
7169 subtarget, clz_optab);
7170 if (target)
7171 return target;
7172 break;
7173
7174 CASE_INT_FN (BUILT_IN_CTZ):
7175 target = expand_builtin_unop (target_mode, exp, target,
7176 subtarget, ctz_optab);
7177 if (target)
7178 return target;
7179 break;
7180
7181 CASE_INT_FN (BUILT_IN_CLRSB):
7182 target = expand_builtin_unop (target_mode, exp, target,
7183 subtarget, clrsb_optab);
7184 if (target)
7185 return target;
7186 break;
7187
7188 CASE_INT_FN (BUILT_IN_POPCOUNT):
7189 target = expand_builtin_unop (target_mode, exp, target,
7190 subtarget, popcount_optab);
7191 if (target)
7192 return target;
7193 break;
7194
7195 CASE_INT_FN (BUILT_IN_PARITY):
7196 target = expand_builtin_unop (target_mode, exp, target,
7197 subtarget, parity_optab);
7198 if (target)
7199 return target;
7200 break;
7201
7202 case BUILT_IN_STRLEN:
7203 target = expand_builtin_strlen (exp, target, target_mode);
7204 if (target)
7205 return target;
7206 break;
7207
7208 case BUILT_IN_STRNLEN:
7209 target = expand_builtin_strnlen (exp, target, target_mode);
7210 if (target)
7211 return target;
7212 break;
7213
7214 case BUILT_IN_STRCAT:
7215 target = expand_builtin_strcat (exp, target);
7216 if (target)
7217 return target;
7218 break;
7219
7220 case BUILT_IN_STRCPY:
7221 target = expand_builtin_strcpy (exp, target);
7222 if (target)
7223 return target;
7224 break;
7225
7226 case BUILT_IN_STRNCAT:
7227 target = expand_builtin_strncat (exp, target);
7228 if (target)
7229 return target;
7230 break;
7231
7232 case BUILT_IN_STRNCPY:
7233 target = expand_builtin_strncpy (exp, target);
7234 if (target)
7235 return target;
7236 break;
7237
7238 case BUILT_IN_STPCPY:
7239 target = expand_builtin_stpcpy (exp, target, mode);
7240 if (target)
7241 return target;
7242 break;
7243
7244 case BUILT_IN_STPNCPY:
7245 target = expand_builtin_stpncpy (exp, target);
7246 if (target)
7247 return target;
7248 break;
7249
7250 case BUILT_IN_MEMCHR:
7251 target = expand_builtin_memchr (exp, target);
7252 if (target)
7253 return target;
7254 break;
7255
7256 case BUILT_IN_MEMCPY:
7257 target = expand_builtin_memcpy (exp, target);
7258 if (target)
7259 return target;
7260 break;
7261
7262 case BUILT_IN_MEMMOVE:
7263 target = expand_builtin_memmove (exp, target);
7264 if (target)
7265 return target;
7266 break;
7267
7268 case BUILT_IN_MEMPCPY:
7269 target = expand_builtin_mempcpy (exp, target);
7270 if (target)
7271 return target;
7272 break;
7273
7274 case BUILT_IN_MEMSET:
7275 target = expand_builtin_memset (exp, target, mode);
7276 if (target)
7277 return target;
7278 break;
7279
7280 case BUILT_IN_BZERO:
7281 target = expand_builtin_bzero (exp);
7282 if (target)
7283 return target;
7284 break;
7285
7286 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7287 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7288 when changing it to a strcmp call. */
7289 case BUILT_IN_STRCMP_EQ:
7290 target = expand_builtin_memcmp (exp, target, true);
7291 if (target)
7292 return target;
7293
7294 /* Change this call back to a BUILT_IN_STRCMP. */
7295 TREE_OPERAND (exp, 1)
7296 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7297
7298 /* Delete the last parameter. */
7299 unsigned int i;
7300 vec<tree, va_gc> *arg_vec;
7301 vec_alloc (arg_vec, 2);
7302 for (i = 0; i < 2; i++)
7303 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7304 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7305 /* FALLTHROUGH */
7306
7307 case BUILT_IN_STRCMP:
7308 target = expand_builtin_strcmp (exp, target);
7309 if (target)
7310 return target;
7311 break;
7312
7313 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7314 back to a BUILT_IN_STRNCMP. */
7315 case BUILT_IN_STRNCMP_EQ:
7316 target = expand_builtin_memcmp (exp, target, true);
7317 if (target)
7318 return target;
7319
7320 /* Change it back to a BUILT_IN_STRNCMP. */
7321 TREE_OPERAND (exp, 1)
7322 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7323 /* FALLTHROUGH */
7324
7325 case BUILT_IN_STRNCMP:
7326 target = expand_builtin_strncmp (exp, target, mode);
7327 if (target)
7328 return target;
7329 break;
7330
7331 case BUILT_IN_BCMP:
7332 case BUILT_IN_MEMCMP:
7333 case BUILT_IN_MEMCMP_EQ:
7334 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7335 if (target)
7336 return target;
7337 if (fcode == BUILT_IN_MEMCMP_EQ)
7338 {
7339 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7340 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7341 }
7342 break;
7343
7344 case BUILT_IN_SETJMP:
7345 /* This should have been lowered to the builtins below. */
7346 gcc_unreachable ();
7347
7348 case BUILT_IN_SETJMP_SETUP:
7349 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7350 and the receiver label. */
7351 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7352 {
7353 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7354 VOIDmode, EXPAND_NORMAL);
7355 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7356 rtx_insn *label_r = label_rtx (label);
7357
7358 /* This is copied from the handling of non-local gotos. */
7359 expand_builtin_setjmp_setup (buf_addr, label_r);
7360 nonlocal_goto_handler_labels
7361 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7362 nonlocal_goto_handler_labels);
7363 /* ??? Do not let expand_label treat us as such since we would
7364 not want to be both on the list of non-local labels and on
7365 the list of forced labels. */
7366 FORCED_LABEL (label) = 0;
7367 return const0_rtx;
7368 }
7369 break;
7370
7371 case BUILT_IN_SETJMP_RECEIVER:
7372 /* __builtin_setjmp_receiver is passed the receiver label. */
7373 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7374 {
7375 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7376 rtx_insn *label_r = label_rtx (label);
7377
7378 expand_builtin_setjmp_receiver (label_r);
7379 return const0_rtx;
7380 }
7381 break;
7382
7383 /* __builtin_longjmp is passed a pointer to an array of five words.
7384 It's similar to the C library longjmp function but works with
7385 __builtin_setjmp above. */
7386 case BUILT_IN_LONGJMP:
7387 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7388 {
7389 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7390 VOIDmode, EXPAND_NORMAL);
7391 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7392
7393 if (value != const1_rtx)
7394 {
7395 error ("%<__builtin_longjmp%> second argument must be 1");
7396 return const0_rtx;
7397 }
7398
7399 expand_builtin_longjmp (buf_addr, value);
7400 return const0_rtx;
7401 }
7402 break;
7403
7404 case BUILT_IN_NONLOCAL_GOTO:
7405 target = expand_builtin_nonlocal_goto (exp);
7406 if (target)
7407 return target;
7408 break;
7409
7410 /* This updates the setjmp buffer that is its argument with the value
7411 of the current stack pointer. */
7412 case BUILT_IN_UPDATE_SETJMP_BUF:
7413 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7414 {
7415 rtx buf_addr
7416 = expand_normal (CALL_EXPR_ARG (exp, 0));
7417
7418 expand_builtin_update_setjmp_buf (buf_addr);
7419 return const0_rtx;
7420 }
7421 break;
7422
7423 case BUILT_IN_TRAP:
7424 expand_builtin_trap ();
7425 return const0_rtx;
7426
7427 case BUILT_IN_UNREACHABLE:
7428 expand_builtin_unreachable ();
7429 return const0_rtx;
7430
7431 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7432 case BUILT_IN_SIGNBITD32:
7433 case BUILT_IN_SIGNBITD64:
7434 case BUILT_IN_SIGNBITD128:
7435 target = expand_builtin_signbit (exp, target);
7436 if (target)
7437 return target;
7438 break;
7439
7440 /* Various hooks for the DWARF 2 __throw routine. */
7441 case BUILT_IN_UNWIND_INIT:
7442 expand_builtin_unwind_init ();
7443 return const0_rtx;
7444 case BUILT_IN_DWARF_CFA:
7445 return virtual_cfa_rtx;
7446 #ifdef DWARF2_UNWIND_INFO
7447 case BUILT_IN_DWARF_SP_COLUMN:
7448 return expand_builtin_dwarf_sp_column ();
7449 case BUILT_IN_INIT_DWARF_REG_SIZES:
7450 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7451 return const0_rtx;
7452 #endif
7453 case BUILT_IN_FROB_RETURN_ADDR:
7454 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7455 case BUILT_IN_EXTRACT_RETURN_ADDR:
7456 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7457 case BUILT_IN_EH_RETURN:
7458 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7459 CALL_EXPR_ARG (exp, 1));
7460 return const0_rtx;
7461 case BUILT_IN_EH_RETURN_DATA_REGNO:
7462 return expand_builtin_eh_return_data_regno (exp);
7463 case BUILT_IN_EXTEND_POINTER:
7464 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7465 case BUILT_IN_EH_POINTER:
7466 return expand_builtin_eh_pointer (exp);
7467 case BUILT_IN_EH_FILTER:
7468 return expand_builtin_eh_filter (exp);
7469 case BUILT_IN_EH_COPY_VALUES:
7470 return expand_builtin_eh_copy_values (exp);
7471
7472 case BUILT_IN_VA_START:
7473 return expand_builtin_va_start (exp);
7474 case BUILT_IN_VA_END:
7475 return expand_builtin_va_end (exp);
7476 case BUILT_IN_VA_COPY:
7477 return expand_builtin_va_copy (exp);
7478 case BUILT_IN_EXPECT:
7479 return expand_builtin_expect (exp, target);
7480 case BUILT_IN_ASSUME_ALIGNED:
7481 return expand_builtin_assume_aligned (exp, target);
7482 case BUILT_IN_PREFETCH:
7483 expand_builtin_prefetch (exp);
7484 return const0_rtx;
7485
7486 case BUILT_IN_INIT_TRAMPOLINE:
7487 return expand_builtin_init_trampoline (exp, true);
7488 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7489 return expand_builtin_init_trampoline (exp, false);
7490 case BUILT_IN_ADJUST_TRAMPOLINE:
7491 return expand_builtin_adjust_trampoline (exp);
7492
7493 case BUILT_IN_INIT_DESCRIPTOR:
7494 return expand_builtin_init_descriptor (exp);
7495 case BUILT_IN_ADJUST_DESCRIPTOR:
7496 return expand_builtin_adjust_descriptor (exp);
7497
7498 case BUILT_IN_FORK:
7499 case BUILT_IN_EXECL:
7500 case BUILT_IN_EXECV:
7501 case BUILT_IN_EXECLP:
7502 case BUILT_IN_EXECLE:
7503 case BUILT_IN_EXECVP:
7504 case BUILT_IN_EXECVE:
7505 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7506 if (target)
7507 return target;
7508 break;
7509
7510 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7511 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7512 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7513 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7514 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7515 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7516 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7517 if (target)
7518 return target;
7519 break;
7520
7521 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7522 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7523 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7524 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7525 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7526 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7527 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7528 if (target)
7529 return target;
7530 break;
7531
7532 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7533 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7534 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7535 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7536 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7537 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7538 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7539 if (target)
7540 return target;
7541 break;
7542
7543 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7544 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7545 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7546 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7547 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7548 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7549 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7550 if (target)
7551 return target;
7552 break;
7553
7554 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7555 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7556 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7557 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7558 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7559 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7560 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7561 if (target)
7562 return target;
7563 break;
7564
7565 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7566 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7567 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7568 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7569 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7570 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7571 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7572 if (target)
7573 return target;
7574 break;
7575
7576 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7577 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7578 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7579 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7580 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7581 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7582 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7583 if (target)
7584 return target;
7585 break;
7586
7587 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7588 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7589 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7590 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7591 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7592 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7593 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7594 if (target)
7595 return target;
7596 break;
7597
7598 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7599 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7600 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7601 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7602 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7603 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7604 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7605 if (target)
7606 return target;
7607 break;
7608
7609 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7610 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7611 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7612 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7613 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7614 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7615 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7616 if (target)
7617 return target;
7618 break;
7619
7620 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7621 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7622 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7623 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7624 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7625 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7626 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7627 if (target)
7628 return target;
7629 break;
7630
7631 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7632 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7633 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7634 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7635 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7636 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7637 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7638 if (target)
7639 return target;
7640 break;
7641
7642 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7643 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7644 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7645 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7646 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7647 if (mode == VOIDmode)
7648 mode = TYPE_MODE (boolean_type_node);
7649 if (!target || !register_operand (target, mode))
7650 target = gen_reg_rtx (mode);
7651
7652 mode = get_builtin_sync_mode
7653 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7654 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7655 if (target)
7656 return target;
7657 break;
7658
7659 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7660 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7661 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7662 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7663 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7664 mode = get_builtin_sync_mode
7665 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7666 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7667 if (target)
7668 return target;
7669 break;
7670
7671 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7672 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7673 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7674 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7675 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7676 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7677 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7678 if (target)
7679 return target;
7680 break;
7681
7682 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7683 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7684 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7685 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7686 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7687 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7688 expand_builtin_sync_lock_release (mode, exp);
7689 return const0_rtx;
7690
7691 case BUILT_IN_SYNC_SYNCHRONIZE:
7692 expand_builtin_sync_synchronize ();
7693 return const0_rtx;
7694
7695 case BUILT_IN_ATOMIC_EXCHANGE_1:
7696 case BUILT_IN_ATOMIC_EXCHANGE_2:
7697 case BUILT_IN_ATOMIC_EXCHANGE_4:
7698 case BUILT_IN_ATOMIC_EXCHANGE_8:
7699 case BUILT_IN_ATOMIC_EXCHANGE_16:
7700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7701 target = expand_builtin_atomic_exchange (mode, exp, target);
7702 if (target)
7703 return target;
7704 break;
7705
7706 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7707 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7708 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7709 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7710 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7711 {
7712 unsigned int nargs, z;
7713 vec<tree, va_gc> *vec;
7714
7715 mode =
7716 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7717 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7718 if (target)
7719 return target;
7720
7721 /* If this is turned into an external library call, the weak parameter
7722 must be dropped to match the expected parameter list. */
7723 nargs = call_expr_nargs (exp);
7724 vec_alloc (vec, nargs - 1);
7725 for (z = 0; z < 3; z++)
7726 vec->quick_push (CALL_EXPR_ARG (exp, z));
7727 /* Skip the boolean weak parameter. */
7728 for (z = 4; z < 6; z++)
7729 vec->quick_push (CALL_EXPR_ARG (exp, z));
7730 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7731 break;
7732 }
7733
7734 case BUILT_IN_ATOMIC_LOAD_1:
7735 case BUILT_IN_ATOMIC_LOAD_2:
7736 case BUILT_IN_ATOMIC_LOAD_4:
7737 case BUILT_IN_ATOMIC_LOAD_8:
7738 case BUILT_IN_ATOMIC_LOAD_16:
7739 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7740 target = expand_builtin_atomic_load (mode, exp, target);
7741 if (target)
7742 return target;
7743 break;
7744
7745 case BUILT_IN_ATOMIC_STORE_1:
7746 case BUILT_IN_ATOMIC_STORE_2:
7747 case BUILT_IN_ATOMIC_STORE_4:
7748 case BUILT_IN_ATOMIC_STORE_8:
7749 case BUILT_IN_ATOMIC_STORE_16:
7750 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7751 target = expand_builtin_atomic_store (mode, exp);
7752 if (target)
7753 return const0_rtx;
7754 break;
7755
7756 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7757 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7758 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7759 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7760 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7761 {
7762 enum built_in_function lib;
7763 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7764 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7765 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7766 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7767 ignore, lib);
7768 if (target)
7769 return target;
7770 break;
7771 }
7772 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7773 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7774 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7775 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7776 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7777 {
7778 enum built_in_function lib;
7779 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7780 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7781 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7782 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7783 ignore, lib);
7784 if (target)
7785 return target;
7786 break;
7787 }
7788 case BUILT_IN_ATOMIC_AND_FETCH_1:
7789 case BUILT_IN_ATOMIC_AND_FETCH_2:
7790 case BUILT_IN_ATOMIC_AND_FETCH_4:
7791 case BUILT_IN_ATOMIC_AND_FETCH_8:
7792 case BUILT_IN_ATOMIC_AND_FETCH_16:
7793 {
7794 enum built_in_function lib;
7795 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7796 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7797 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7798 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7799 ignore, lib);
7800 if (target)
7801 return target;
7802 break;
7803 }
7804 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7805 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7806 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7807 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7808 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7809 {
7810 enum built_in_function lib;
7811 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7812 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7813 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7814 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7815 ignore, lib);
7816 if (target)
7817 return target;
7818 break;
7819 }
7820 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7821 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7822 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7823 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7824 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7825 {
7826 enum built_in_function lib;
7827 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7828 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7829 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7830 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7831 ignore, lib);
7832 if (target)
7833 return target;
7834 break;
7835 }
7836 case BUILT_IN_ATOMIC_OR_FETCH_1:
7837 case BUILT_IN_ATOMIC_OR_FETCH_2:
7838 case BUILT_IN_ATOMIC_OR_FETCH_4:
7839 case BUILT_IN_ATOMIC_OR_FETCH_8:
7840 case BUILT_IN_ATOMIC_OR_FETCH_16:
7841 {
7842 enum built_in_function lib;
7843 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7844 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7845 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7846 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7847 ignore, lib);
7848 if (target)
7849 return target;
7850 break;
7851 }
7852 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7853 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7854 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7855 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7856 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7857 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7858 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7859 ignore, BUILT_IN_NONE);
7860 if (target)
7861 return target;
7862 break;
7863
7864 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7865 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7866 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7867 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7868 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7869 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7870 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7871 ignore, BUILT_IN_NONE);
7872 if (target)
7873 return target;
7874 break;
7875
7876 case BUILT_IN_ATOMIC_FETCH_AND_1:
7877 case BUILT_IN_ATOMIC_FETCH_AND_2:
7878 case BUILT_IN_ATOMIC_FETCH_AND_4:
7879 case BUILT_IN_ATOMIC_FETCH_AND_8:
7880 case BUILT_IN_ATOMIC_FETCH_AND_16:
7881 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7882 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7883 ignore, BUILT_IN_NONE);
7884 if (target)
7885 return target;
7886 break;
7887
7888 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7889 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7890 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7891 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7892 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7893 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7894 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7895 ignore, BUILT_IN_NONE);
7896 if (target)
7897 return target;
7898 break;
7899
7900 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7901 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7902 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7903 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7904 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7905 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7906 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7907 ignore, BUILT_IN_NONE);
7908 if (target)
7909 return target;
7910 break;
7911
7912 case BUILT_IN_ATOMIC_FETCH_OR_1:
7913 case BUILT_IN_ATOMIC_FETCH_OR_2:
7914 case BUILT_IN_ATOMIC_FETCH_OR_4:
7915 case BUILT_IN_ATOMIC_FETCH_OR_8:
7916 case BUILT_IN_ATOMIC_FETCH_OR_16:
7917 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7918 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7919 ignore, BUILT_IN_NONE);
7920 if (target)
7921 return target;
7922 break;
7923
7924 case BUILT_IN_ATOMIC_TEST_AND_SET:
7925 return expand_builtin_atomic_test_and_set (exp, target);
7926
7927 case BUILT_IN_ATOMIC_CLEAR:
7928 return expand_builtin_atomic_clear (exp);
7929
7930 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7931 return expand_builtin_atomic_always_lock_free (exp);
7932
7933 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7934 target = expand_builtin_atomic_is_lock_free (exp);
7935 if (target)
7936 return target;
7937 break;
7938
7939 case BUILT_IN_ATOMIC_THREAD_FENCE:
7940 expand_builtin_atomic_thread_fence (exp);
7941 return const0_rtx;
7942
7943 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7944 expand_builtin_atomic_signal_fence (exp);
7945 return const0_rtx;
7946
7947 case BUILT_IN_OBJECT_SIZE:
7948 return expand_builtin_object_size (exp);
7949
7950 case BUILT_IN_MEMCPY_CHK:
7951 case BUILT_IN_MEMPCPY_CHK:
7952 case BUILT_IN_MEMMOVE_CHK:
7953 case BUILT_IN_MEMSET_CHK:
7954 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7955 if (target)
7956 return target;
7957 break;
7958
7959 case BUILT_IN_STRCPY_CHK:
7960 case BUILT_IN_STPCPY_CHK:
7961 case BUILT_IN_STRNCPY_CHK:
7962 case BUILT_IN_STPNCPY_CHK:
7963 case BUILT_IN_STRCAT_CHK:
7964 case BUILT_IN_STRNCAT_CHK:
7965 case BUILT_IN_SNPRINTF_CHK:
7966 case BUILT_IN_VSNPRINTF_CHK:
7967 maybe_emit_chk_warning (exp, fcode);
7968 break;
7969
7970 case BUILT_IN_SPRINTF_CHK:
7971 case BUILT_IN_VSPRINTF_CHK:
7972 maybe_emit_sprintf_chk_warning (exp, fcode);
7973 break;
7974
7975 case BUILT_IN_FREE:
7976 if (warn_free_nonheap_object)
7977 maybe_emit_free_warning (exp);
7978 break;
7979
7980 case BUILT_IN_THREAD_POINTER:
7981 return expand_builtin_thread_pointer (exp, target);
7982
7983 case BUILT_IN_SET_THREAD_POINTER:
7984 expand_builtin_set_thread_pointer (exp);
7985 return const0_rtx;
7986
7987 case BUILT_IN_ACC_ON_DEVICE:
7988 /* Do library call, if we failed to expand the builtin when
7989 folding. */
7990 break;
7991
7992 case BUILT_IN_GOACC_PARLEVEL_ID:
7993 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7994 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7995
7996 default: /* just do library call, if unknown builtin */
7997 break;
7998 }
7999
8000 /* The switch statement above can drop through to cause the function
8001 to be called normally. */
8002 return expand_call (exp, target, ignore);
8003 }
8004
8005 /* Determine whether a tree node represents a call to a built-in
8006 function. If the tree T is a call to a built-in function with
8007 the right number of arguments of the appropriate types, return
8008 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8009 Otherwise the return value is END_BUILTINS. */
8010
8011 enum built_in_function
8012 builtin_mathfn_code (const_tree t)
8013 {
8014 const_tree fndecl, arg, parmlist;
8015 const_tree argtype, parmtype;
8016 const_call_expr_arg_iterator iter;
8017
8018 if (TREE_CODE (t) != CALL_EXPR)
8019 return END_BUILTINS;
8020
8021 fndecl = get_callee_fndecl (t);
8022 if (fndecl == NULL_TREE
8023 || TREE_CODE (fndecl) != FUNCTION_DECL
8024 || ! DECL_BUILT_IN (fndecl)
8025 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8026 return END_BUILTINS;
8027
8028 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8029 init_const_call_expr_arg_iterator (t, &iter);
8030 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8031 {
8032 /* If a function doesn't take a variable number of arguments,
8033 the last element in the list will have type `void'. */
8034 parmtype = TREE_VALUE (parmlist);
8035 if (VOID_TYPE_P (parmtype))
8036 {
8037 if (more_const_call_expr_args_p (&iter))
8038 return END_BUILTINS;
8039 return DECL_FUNCTION_CODE (fndecl);
8040 }
8041
8042 if (! more_const_call_expr_args_p (&iter))
8043 return END_BUILTINS;
8044
8045 arg = next_const_call_expr_arg (&iter);
8046 argtype = TREE_TYPE (arg);
8047
8048 if (SCALAR_FLOAT_TYPE_P (parmtype))
8049 {
8050 if (! SCALAR_FLOAT_TYPE_P (argtype))
8051 return END_BUILTINS;
8052 }
8053 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8054 {
8055 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8056 return END_BUILTINS;
8057 }
8058 else if (POINTER_TYPE_P (parmtype))
8059 {
8060 if (! POINTER_TYPE_P (argtype))
8061 return END_BUILTINS;
8062 }
8063 else if (INTEGRAL_TYPE_P (parmtype))
8064 {
8065 if (! INTEGRAL_TYPE_P (argtype))
8066 return END_BUILTINS;
8067 }
8068 else
8069 return END_BUILTINS;
8070 }
8071
8072 /* Variable-length argument list. */
8073 return DECL_FUNCTION_CODE (fndecl);
8074 }
8075
8076 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8077 evaluate to a constant. */
8078
8079 static tree
8080 fold_builtin_constant_p (tree arg)
8081 {
8082 /* We return 1 for a numeric type that's known to be a constant
8083 value at compile-time or for an aggregate type that's a
8084 literal constant. */
8085 STRIP_NOPS (arg);
8086
8087 /* If we know this is a constant, emit the constant of one. */
8088 if (CONSTANT_CLASS_P (arg)
8089 || (TREE_CODE (arg) == CONSTRUCTOR
8090 && TREE_CONSTANT (arg)))
8091 return integer_one_node;
8092 if (TREE_CODE (arg) == ADDR_EXPR)
8093 {
8094 tree op = TREE_OPERAND (arg, 0);
8095 if (TREE_CODE (op) == STRING_CST
8096 || (TREE_CODE (op) == ARRAY_REF
8097 && integer_zerop (TREE_OPERAND (op, 1))
8098 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8099 return integer_one_node;
8100 }
8101
8102 /* If this expression has side effects, show we don't know it to be a
8103 constant. Likewise if it's a pointer or aggregate type since in
8104 those case we only want literals, since those are only optimized
8105 when generating RTL, not later.
8106 And finally, if we are compiling an initializer, not code, we
8107 need to return a definite result now; there's not going to be any
8108 more optimization done. */
8109 if (TREE_SIDE_EFFECTS (arg)
8110 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8111 || POINTER_TYPE_P (TREE_TYPE (arg))
8112 || cfun == 0
8113 || folding_initializer
8114 || force_folding_builtin_constant_p)
8115 return integer_zero_node;
8116
8117 return NULL_TREE;
8118 }
8119
8120 /* Create builtin_expect with PRED and EXPECTED as its arguments and
8121 return it as a truthvalue. */
8122
8123 static tree
8124 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8125 tree predictor)
8126 {
8127 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8128
8129 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
8130 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8131 ret_type = TREE_TYPE (TREE_TYPE (fn));
8132 pred_type = TREE_VALUE (arg_types);
8133 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8134
8135 pred = fold_convert_loc (loc, pred_type, pred);
8136 expected = fold_convert_loc (loc, expected_type, expected);
8137 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8138 predictor);
8139
8140 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8141 build_int_cst (ret_type, 0));
8142 }
8143
8144 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
8145 NULL_TREE if no simplification is possible. */
8146
8147 tree
8148 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
8149 {
8150 tree inner, fndecl, inner_arg0;
8151 enum tree_code code;
8152
8153 /* Distribute the expected value over short-circuiting operators.
8154 See through the cast from truthvalue_type_node to long. */
8155 inner_arg0 = arg0;
8156 while (CONVERT_EXPR_P (inner_arg0)
8157 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8158 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8159 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8160
8161 /* If this is a builtin_expect within a builtin_expect keep the
8162 inner one. See through a comparison against a constant. It
8163 might have been added to create a thruthvalue. */
8164 inner = inner_arg0;
8165
8166 if (COMPARISON_CLASS_P (inner)
8167 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8168 inner = TREE_OPERAND (inner, 0);
8169
8170 if (TREE_CODE (inner) == CALL_EXPR
8171 && (fndecl = get_callee_fndecl (inner))
8172 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8173 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
8174 return arg0;
8175
8176 inner = inner_arg0;
8177 code = TREE_CODE (inner);
8178 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8179 {
8180 tree op0 = TREE_OPERAND (inner, 0);
8181 tree op1 = TREE_OPERAND (inner, 1);
8182 arg1 = save_expr (arg1);
8183
8184 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8185 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
8186 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8187
8188 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8189 }
8190
8191 /* If the argument isn't invariant then there's nothing else we can do. */
8192 if (!TREE_CONSTANT (inner_arg0))
8193 return NULL_TREE;
8194
8195 /* If we expect that a comparison against the argument will fold to
8196 a constant return the constant. In practice, this means a true
8197 constant or the address of a non-weak symbol. */
8198 inner = inner_arg0;
8199 STRIP_NOPS (inner);
8200 if (TREE_CODE (inner) == ADDR_EXPR)
8201 {
8202 do
8203 {
8204 inner = TREE_OPERAND (inner, 0);
8205 }
8206 while (TREE_CODE (inner) == COMPONENT_REF
8207 || TREE_CODE (inner) == ARRAY_REF);
8208 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8209 return NULL_TREE;
8210 }
8211
8212 /* Otherwise, ARG0 already has the proper type for the return value. */
8213 return arg0;
8214 }
8215
8216 /* Fold a call to __builtin_classify_type with argument ARG. */
8217
8218 static tree
8219 fold_builtin_classify_type (tree arg)
8220 {
8221 if (arg == 0)
8222 return build_int_cst (integer_type_node, no_type_class);
8223
8224 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8225 }
8226
8227 /* Fold a call to __builtin_strlen with argument ARG. */
8228
8229 static tree
8230 fold_builtin_strlen (location_t loc, tree type, tree arg)
8231 {
8232 if (!validate_arg (arg, POINTER_TYPE))
8233 return NULL_TREE;
8234 else
8235 {
8236 tree len = c_strlen (arg, 0);
8237
8238 if (len)
8239 return fold_convert_loc (loc, type, len);
8240
8241 return NULL_TREE;
8242 }
8243 }
8244
8245 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8246
8247 static tree
8248 fold_builtin_inf (location_t loc, tree type, int warn)
8249 {
8250 REAL_VALUE_TYPE real;
8251
8252 /* __builtin_inff is intended to be usable to define INFINITY on all
8253 targets. If an infinity is not available, INFINITY expands "to a
8254 positive constant of type float that overflows at translation
8255 time", footnote "In this case, using INFINITY will violate the
8256 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8257 Thus we pedwarn to ensure this constraint violation is
8258 diagnosed. */
8259 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8260 pedwarn (loc, 0, "target format does not support infinity");
8261
8262 real_inf (&real);
8263 return build_real (type, real);
8264 }
8265
8266 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8267 NULL_TREE if no simplification can be made. */
8268
8269 static tree
8270 fold_builtin_sincos (location_t loc,
8271 tree arg0, tree arg1, tree arg2)
8272 {
8273 tree type;
8274 tree fndecl, call = NULL_TREE;
8275
8276 if (!validate_arg (arg0, REAL_TYPE)
8277 || !validate_arg (arg1, POINTER_TYPE)
8278 || !validate_arg (arg2, POINTER_TYPE))
8279 return NULL_TREE;
8280
8281 type = TREE_TYPE (arg0);
8282
8283 /* Calculate the result when the argument is a constant. */
8284 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8285 if (fn == END_BUILTINS)
8286 return NULL_TREE;
8287
8288 /* Canonicalize sincos to cexpi. */
8289 if (TREE_CODE (arg0) == REAL_CST)
8290 {
8291 tree complex_type = build_complex_type (type);
8292 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8293 }
8294 if (!call)
8295 {
8296 if (!targetm.libc_has_function (function_c99_math_complex)
8297 || !builtin_decl_implicit_p (fn))
8298 return NULL_TREE;
8299 fndecl = builtin_decl_explicit (fn);
8300 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8301 call = builtin_save_expr (call);
8302 }
8303
8304 tree ptype = build_pointer_type (type);
8305 arg1 = fold_convert (ptype, arg1);
8306 arg2 = fold_convert (ptype, arg2);
8307 return build2 (COMPOUND_EXPR, void_type_node,
8308 build2 (MODIFY_EXPR, void_type_node,
8309 build_fold_indirect_ref_loc (loc, arg1),
8310 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8311 build2 (MODIFY_EXPR, void_type_node,
8312 build_fold_indirect_ref_loc (loc, arg2),
8313 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8314 }
8315
8316 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8317 Return NULL_TREE if no simplification can be made. */
8318
8319 static tree
8320 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8321 {
8322 if (!validate_arg (arg1, POINTER_TYPE)
8323 || !validate_arg (arg2, POINTER_TYPE)
8324 || !validate_arg (len, INTEGER_TYPE))
8325 return NULL_TREE;
8326
8327 /* If the LEN parameter is zero, return zero. */
8328 if (integer_zerop (len))
8329 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8330 arg1, arg2);
8331
8332 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8333 if (operand_equal_p (arg1, arg2, 0))
8334 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8335
8336 /* If len parameter is one, return an expression corresponding to
8337 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8338 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8339 {
8340 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8341 tree cst_uchar_ptr_node
8342 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8343
8344 tree ind1
8345 = fold_convert_loc (loc, integer_type_node,
8346 build1 (INDIRECT_REF, cst_uchar_node,
8347 fold_convert_loc (loc,
8348 cst_uchar_ptr_node,
8349 arg1)));
8350 tree ind2
8351 = fold_convert_loc (loc, integer_type_node,
8352 build1 (INDIRECT_REF, cst_uchar_node,
8353 fold_convert_loc (loc,
8354 cst_uchar_ptr_node,
8355 arg2)));
8356 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8357 }
8358
8359 return NULL_TREE;
8360 }
8361
8362 /* Fold a call to builtin isascii with argument ARG. */
8363
8364 static tree
8365 fold_builtin_isascii (location_t loc, tree arg)
8366 {
8367 if (!validate_arg (arg, INTEGER_TYPE))
8368 return NULL_TREE;
8369 else
8370 {
8371 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8372 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8373 build_int_cst (integer_type_node,
8374 ~ (unsigned HOST_WIDE_INT) 0x7f));
8375 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8376 arg, integer_zero_node);
8377 }
8378 }
8379
8380 /* Fold a call to builtin toascii with argument ARG. */
8381
8382 static tree
8383 fold_builtin_toascii (location_t loc, tree arg)
8384 {
8385 if (!validate_arg (arg, INTEGER_TYPE))
8386 return NULL_TREE;
8387
8388 /* Transform toascii(c) -> (c & 0x7f). */
8389 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8390 build_int_cst (integer_type_node, 0x7f));
8391 }
8392
8393 /* Fold a call to builtin isdigit with argument ARG. */
8394
8395 static tree
8396 fold_builtin_isdigit (location_t loc, tree arg)
8397 {
8398 if (!validate_arg (arg, INTEGER_TYPE))
8399 return NULL_TREE;
8400 else
8401 {
8402 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8403 /* According to the C standard, isdigit is unaffected by locale.
8404 However, it definitely is affected by the target character set. */
8405 unsigned HOST_WIDE_INT target_digit0
8406 = lang_hooks.to_target_charset ('0');
8407
8408 if (target_digit0 == 0)
8409 return NULL_TREE;
8410
8411 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8412 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8413 build_int_cst (unsigned_type_node, target_digit0));
8414 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8415 build_int_cst (unsigned_type_node, 9));
8416 }
8417 }
8418
8419 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8420
8421 static tree
8422 fold_builtin_fabs (location_t loc, tree arg, tree type)
8423 {
8424 if (!validate_arg (arg, REAL_TYPE))
8425 return NULL_TREE;
8426
8427 arg = fold_convert_loc (loc, type, arg);
8428 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8429 }
8430
8431 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8432
8433 static tree
8434 fold_builtin_abs (location_t loc, tree arg, tree type)
8435 {
8436 if (!validate_arg (arg, INTEGER_TYPE))
8437 return NULL_TREE;
8438
8439 arg = fold_convert_loc (loc, type, arg);
8440 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8441 }
8442
8443 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8444
8445 static tree
8446 fold_builtin_carg (location_t loc, tree arg, tree type)
8447 {
8448 if (validate_arg (arg, COMPLEX_TYPE)
8449 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8450 {
8451 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8452
8453 if (atan2_fn)
8454 {
8455 tree new_arg = builtin_save_expr (arg);
8456 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8457 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8458 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8459 }
8460 }
8461
8462 return NULL_TREE;
8463 }
8464
8465 /* Fold a call to builtin frexp, we can assume the base is 2. */
8466
8467 static tree
8468 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8469 {
8470 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8471 return NULL_TREE;
8472
8473 STRIP_NOPS (arg0);
8474
8475 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8476 return NULL_TREE;
8477
8478 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8479
8480 /* Proceed if a valid pointer type was passed in. */
8481 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8482 {
8483 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8484 tree frac, exp;
8485
8486 switch (value->cl)
8487 {
8488 case rvc_zero:
8489 /* For +-0, return (*exp = 0, +-0). */
8490 exp = integer_zero_node;
8491 frac = arg0;
8492 break;
8493 case rvc_nan:
8494 case rvc_inf:
8495 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8496 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8497 case rvc_normal:
8498 {
8499 /* Since the frexp function always expects base 2, and in
8500 GCC normalized significands are already in the range
8501 [0.5, 1.0), we have exactly what frexp wants. */
8502 REAL_VALUE_TYPE frac_rvt = *value;
8503 SET_REAL_EXP (&frac_rvt, 0);
8504 frac = build_real (rettype, frac_rvt);
8505 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8506 }
8507 break;
8508 default:
8509 gcc_unreachable ();
8510 }
8511
8512 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8513 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8514 TREE_SIDE_EFFECTS (arg1) = 1;
8515 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8516 }
8517
8518 return NULL_TREE;
8519 }
8520
8521 /* Fold a call to builtin modf. */
8522
8523 static tree
8524 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8525 {
8526 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8527 return NULL_TREE;
8528
8529 STRIP_NOPS (arg0);
8530
8531 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8532 return NULL_TREE;
8533
8534 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8535
8536 /* Proceed if a valid pointer type was passed in. */
8537 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8538 {
8539 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8540 REAL_VALUE_TYPE trunc, frac;
8541
8542 switch (value->cl)
8543 {
8544 case rvc_nan:
8545 case rvc_zero:
8546 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8547 trunc = frac = *value;
8548 break;
8549 case rvc_inf:
8550 /* For +-Inf, return (*arg1 = arg0, +-0). */
8551 frac = dconst0;
8552 frac.sign = value->sign;
8553 trunc = *value;
8554 break;
8555 case rvc_normal:
8556 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8557 real_trunc (&trunc, VOIDmode, value);
8558 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8559 /* If the original number was negative and already
8560 integral, then the fractional part is -0.0. */
8561 if (value->sign && frac.cl == rvc_zero)
8562 frac.sign = value->sign;
8563 break;
8564 }
8565
8566 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8567 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8568 build_real (rettype, trunc));
8569 TREE_SIDE_EFFECTS (arg1) = 1;
8570 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8571 build_real (rettype, frac));
8572 }
8573
8574 return NULL_TREE;
8575 }
8576
8577 /* Given a location LOC, an interclass builtin function decl FNDECL
8578 and its single argument ARG, return an folded expression computing
8579 the same, or NULL_TREE if we either couldn't or didn't want to fold
8580 (the latter happen if there's an RTL instruction available). */
8581
8582 static tree
8583 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8584 {
8585 machine_mode mode;
8586
8587 if (!validate_arg (arg, REAL_TYPE))
8588 return NULL_TREE;
8589
8590 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8591 return NULL_TREE;
8592
8593 mode = TYPE_MODE (TREE_TYPE (arg));
8594
8595 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8596
8597 /* If there is no optab, try generic code. */
8598 switch (DECL_FUNCTION_CODE (fndecl))
8599 {
8600 tree result;
8601
8602 CASE_FLT_FN (BUILT_IN_ISINF):
8603 {
8604 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8605 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8606 tree type = TREE_TYPE (arg);
8607 REAL_VALUE_TYPE r;
8608 char buf[128];
8609
8610 if (is_ibm_extended)
8611 {
8612 /* NaN and Inf are encoded in the high-order double value
8613 only. The low-order value is not significant. */
8614 type = double_type_node;
8615 mode = DFmode;
8616 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8617 }
8618 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8619 real_from_string (&r, buf);
8620 result = build_call_expr (isgr_fn, 2,
8621 fold_build1_loc (loc, ABS_EXPR, type, arg),
8622 build_real (type, r));
8623 return result;
8624 }
8625 CASE_FLT_FN (BUILT_IN_FINITE):
8626 case BUILT_IN_ISFINITE:
8627 {
8628 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8629 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8630 tree type = TREE_TYPE (arg);
8631 REAL_VALUE_TYPE r;
8632 char buf[128];
8633
8634 if (is_ibm_extended)
8635 {
8636 /* NaN and Inf are encoded in the high-order double value
8637 only. The low-order value is not significant. */
8638 type = double_type_node;
8639 mode = DFmode;
8640 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8641 }
8642 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8643 real_from_string (&r, buf);
8644 result = build_call_expr (isle_fn, 2,
8645 fold_build1_loc (loc, ABS_EXPR, type, arg),
8646 build_real (type, r));
8647 /*result = fold_build2_loc (loc, UNGT_EXPR,
8648 TREE_TYPE (TREE_TYPE (fndecl)),
8649 fold_build1_loc (loc, ABS_EXPR, type, arg),
8650 build_real (type, r));
8651 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8652 TREE_TYPE (TREE_TYPE (fndecl)),
8653 result);*/
8654 return result;
8655 }
8656 case BUILT_IN_ISNORMAL:
8657 {
8658 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8659 islessequal(fabs(x),DBL_MAX). */
8660 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8661 tree type = TREE_TYPE (arg);
8662 tree orig_arg, max_exp, min_exp;
8663 machine_mode orig_mode = mode;
8664 REAL_VALUE_TYPE rmax, rmin;
8665 char buf[128];
8666
8667 orig_arg = arg = builtin_save_expr (arg);
8668 if (is_ibm_extended)
8669 {
8670 /* Use double to test the normal range of IBM extended
8671 precision. Emin for IBM extended precision is
8672 different to emin for IEEE double, being 53 higher
8673 since the low double exponent is at least 53 lower
8674 than the high double exponent. */
8675 type = double_type_node;
8676 mode = DFmode;
8677 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8678 }
8679 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8680
8681 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8682 real_from_string (&rmax, buf);
8683 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8684 real_from_string (&rmin, buf);
8685 max_exp = build_real (type, rmax);
8686 min_exp = build_real (type, rmin);
8687
8688 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8689 if (is_ibm_extended)
8690 {
8691 /* Testing the high end of the range is done just using
8692 the high double, using the same test as isfinite().
8693 For the subnormal end of the range we first test the
8694 high double, then if its magnitude is equal to the
8695 limit of 0x1p-969, we test whether the low double is
8696 non-zero and opposite sign to the high double. */
8697 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8698 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8699 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8700 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8701 arg, min_exp);
8702 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8703 complex_double_type_node, orig_arg);
8704 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8705 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8706 tree zero = build_real (type, dconst0);
8707 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8708 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8709 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8710 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8711 fold_build3 (COND_EXPR,
8712 integer_type_node,
8713 hilt, logt, lolt));
8714 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8715 eq_min, ok_lo);
8716 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8717 gt_min, eq_min);
8718 }
8719 else
8720 {
8721 tree const isge_fn
8722 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8723 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8724 }
8725 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8726 max_exp, min_exp);
8727 return result;
8728 }
8729 default:
8730 break;
8731 }
8732
8733 return NULL_TREE;
8734 }
8735
8736 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8737 ARG is the argument for the call. */
8738
8739 static tree
8740 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8741 {
8742 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8743
8744 if (!validate_arg (arg, REAL_TYPE))
8745 return NULL_TREE;
8746
8747 switch (builtin_index)
8748 {
8749 case BUILT_IN_ISINF:
8750 if (!HONOR_INFINITIES (arg))
8751 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8752
8753 return NULL_TREE;
8754
8755 case BUILT_IN_ISINF_SIGN:
8756 {
8757 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8758 /* In a boolean context, GCC will fold the inner COND_EXPR to
8759 1. So e.g. "if (isinf_sign(x))" would be folded to just
8760 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8761 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8762 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8763 tree tmp = NULL_TREE;
8764
8765 arg = builtin_save_expr (arg);
8766
8767 if (signbit_fn && isinf_fn)
8768 {
8769 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8770 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8771
8772 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8773 signbit_call, integer_zero_node);
8774 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8775 isinf_call, integer_zero_node);
8776
8777 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8778 integer_minus_one_node, integer_one_node);
8779 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8780 isinf_call, tmp,
8781 integer_zero_node);
8782 }
8783
8784 return tmp;
8785 }
8786
8787 case BUILT_IN_ISFINITE:
8788 if (!HONOR_NANS (arg)
8789 && !HONOR_INFINITIES (arg))
8790 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8791
8792 return NULL_TREE;
8793
8794 case BUILT_IN_ISNAN:
8795 if (!HONOR_NANS (arg))
8796 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8797
8798 {
8799 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8800 if (is_ibm_extended)
8801 {
8802 /* NaN and Inf are encoded in the high-order double value
8803 only. The low-order value is not significant. */
8804 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8805 }
8806 }
8807 arg = builtin_save_expr (arg);
8808 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8809
8810 default:
8811 gcc_unreachable ();
8812 }
8813 }
8814
8815 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8816 This builtin will generate code to return the appropriate floating
8817 point classification depending on the value of the floating point
8818 number passed in. The possible return values must be supplied as
8819 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8820 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8821 one floating point argument which is "type generic". */
8822
8823 static tree
8824 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8825 {
8826 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8827 arg, type, res, tmp;
8828 machine_mode mode;
8829 REAL_VALUE_TYPE r;
8830 char buf[128];
8831
8832 /* Verify the required arguments in the original call. */
8833 if (nargs != 6
8834 || !validate_arg (args[0], INTEGER_TYPE)
8835 || !validate_arg (args[1], INTEGER_TYPE)
8836 || !validate_arg (args[2], INTEGER_TYPE)
8837 || !validate_arg (args[3], INTEGER_TYPE)
8838 || !validate_arg (args[4], INTEGER_TYPE)
8839 || !validate_arg (args[5], REAL_TYPE))
8840 return NULL_TREE;
8841
8842 fp_nan = args[0];
8843 fp_infinite = args[1];
8844 fp_normal = args[2];
8845 fp_subnormal = args[3];
8846 fp_zero = args[4];
8847 arg = args[5];
8848 type = TREE_TYPE (arg);
8849 mode = TYPE_MODE (type);
8850 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8851
8852 /* fpclassify(x) ->
8853 isnan(x) ? FP_NAN :
8854 (fabs(x) == Inf ? FP_INFINITE :
8855 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8856 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8857
8858 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8859 build_real (type, dconst0));
8860 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8861 tmp, fp_zero, fp_subnormal);
8862
8863 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8864 real_from_string (&r, buf);
8865 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8866 arg, build_real (type, r));
8867 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8868
8869 if (HONOR_INFINITIES (mode))
8870 {
8871 real_inf (&r);
8872 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8873 build_real (type, r));
8874 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8875 fp_infinite, res);
8876 }
8877
8878 if (HONOR_NANS (mode))
8879 {
8880 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8881 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8882 }
8883
8884 return res;
8885 }
8886
8887 /* Fold a call to an unordered comparison function such as
8888 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8889 being called and ARG0 and ARG1 are the arguments for the call.
8890 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8891 the opposite of the desired result. UNORDERED_CODE is used
8892 for modes that can hold NaNs and ORDERED_CODE is used for
8893 the rest. */
8894
8895 static tree
8896 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8897 enum tree_code unordered_code,
8898 enum tree_code ordered_code)
8899 {
8900 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8901 enum tree_code code;
8902 tree type0, type1;
8903 enum tree_code code0, code1;
8904 tree cmp_type = NULL_TREE;
8905
8906 type0 = TREE_TYPE (arg0);
8907 type1 = TREE_TYPE (arg1);
8908
8909 code0 = TREE_CODE (type0);
8910 code1 = TREE_CODE (type1);
8911
8912 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8913 /* Choose the wider of two real types. */
8914 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8915 ? type0 : type1;
8916 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8917 cmp_type = type0;
8918 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8919 cmp_type = type1;
8920
8921 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8922 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8923
8924 if (unordered_code == UNORDERED_EXPR)
8925 {
8926 if (!HONOR_NANS (arg0))
8927 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8928 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8929 }
8930
8931 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8932 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8933 fold_build2_loc (loc, code, type, arg0, arg1));
8934 }
8935
8936 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8937 arithmetics if it can never overflow, or into internal functions that
8938 return both result of arithmetics and overflowed boolean flag in
8939 a complex integer result, or some other check for overflow.
8940 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8941 checking part of that. */
8942
8943 static tree
8944 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8945 tree arg0, tree arg1, tree arg2)
8946 {
8947 enum internal_fn ifn = IFN_LAST;
8948 /* The code of the expression corresponding to the type-generic
8949 built-in, or ERROR_MARK for the type-specific ones. */
8950 enum tree_code opcode = ERROR_MARK;
8951 bool ovf_only = false;
8952
8953 switch (fcode)
8954 {
8955 case BUILT_IN_ADD_OVERFLOW_P:
8956 ovf_only = true;
8957 /* FALLTHRU */
8958 case BUILT_IN_ADD_OVERFLOW:
8959 opcode = PLUS_EXPR;
8960 /* FALLTHRU */
8961 case BUILT_IN_SADD_OVERFLOW:
8962 case BUILT_IN_SADDL_OVERFLOW:
8963 case BUILT_IN_SADDLL_OVERFLOW:
8964 case BUILT_IN_UADD_OVERFLOW:
8965 case BUILT_IN_UADDL_OVERFLOW:
8966 case BUILT_IN_UADDLL_OVERFLOW:
8967 ifn = IFN_ADD_OVERFLOW;
8968 break;
8969 case BUILT_IN_SUB_OVERFLOW_P:
8970 ovf_only = true;
8971 /* FALLTHRU */
8972 case BUILT_IN_SUB_OVERFLOW:
8973 opcode = MINUS_EXPR;
8974 /* FALLTHRU */
8975 case BUILT_IN_SSUB_OVERFLOW:
8976 case BUILT_IN_SSUBL_OVERFLOW:
8977 case BUILT_IN_SSUBLL_OVERFLOW:
8978 case BUILT_IN_USUB_OVERFLOW:
8979 case BUILT_IN_USUBL_OVERFLOW:
8980 case BUILT_IN_USUBLL_OVERFLOW:
8981 ifn = IFN_SUB_OVERFLOW;
8982 break;
8983 case BUILT_IN_MUL_OVERFLOW_P:
8984 ovf_only = true;
8985 /* FALLTHRU */
8986 case BUILT_IN_MUL_OVERFLOW:
8987 opcode = MULT_EXPR;
8988 /* FALLTHRU */
8989 case BUILT_IN_SMUL_OVERFLOW:
8990 case BUILT_IN_SMULL_OVERFLOW:
8991 case BUILT_IN_SMULLL_OVERFLOW:
8992 case BUILT_IN_UMUL_OVERFLOW:
8993 case BUILT_IN_UMULL_OVERFLOW:
8994 case BUILT_IN_UMULLL_OVERFLOW:
8995 ifn = IFN_MUL_OVERFLOW;
8996 break;
8997 default:
8998 gcc_unreachable ();
8999 }
9000
9001 /* For the "generic" overloads, the first two arguments can have different
9002 types and the last argument determines the target type to use to check
9003 for overflow. The arguments of the other overloads all have the same
9004 type. */
9005 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9006
9007 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9008 arguments are constant, attempt to fold the built-in call into a constant
9009 expression indicating whether or not it detected an overflow. */
9010 if (ovf_only
9011 && TREE_CODE (arg0) == INTEGER_CST
9012 && TREE_CODE (arg1) == INTEGER_CST)
9013 /* Perform the computation in the target type and check for overflow. */
9014 return omit_one_operand_loc (loc, boolean_type_node,
9015 arith_overflowed_p (opcode, type, arg0, arg1)
9016 ? boolean_true_node : boolean_false_node,
9017 arg2);
9018
9019 tree ctype = build_complex_type (type);
9020 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9021 2, arg0, arg1);
9022 tree tgt = save_expr (call);
9023 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9024 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9025 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9026
9027 if (ovf_only)
9028 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9029
9030 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9031 tree store
9032 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9033 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9034 }
9035
9036 /* Fold a call to __builtin_FILE to a constant string. */
9037
9038 static inline tree
9039 fold_builtin_FILE (location_t loc)
9040 {
9041 if (const char *fname = LOCATION_FILE (loc))
9042 {
9043 /* The documentation says this builtin is equivalent to the preprocessor
9044 __FILE__ macro so it appears appropriate to use the same file prefix
9045 mappings. */
9046 fname = remap_macro_filename (fname);
9047 return build_string_literal (strlen (fname) + 1, fname);
9048 }
9049
9050 return build_string_literal (1, "");
9051 }
9052
9053 /* Fold a call to __builtin_FUNCTION to a constant string. */
9054
9055 static inline tree
9056 fold_builtin_FUNCTION ()
9057 {
9058 const char *name = "";
9059
9060 if (current_function_decl)
9061 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9062
9063 return build_string_literal (strlen (name) + 1, name);
9064 }
9065
9066 /* Fold a call to __builtin_LINE to an integer constant. */
9067
9068 static inline tree
9069 fold_builtin_LINE (location_t loc, tree type)
9070 {
9071 return build_int_cst (type, LOCATION_LINE (loc));
9072 }
9073
9074 /* Fold a call to built-in function FNDECL with 0 arguments.
9075 This function returns NULL_TREE if no simplification was possible. */
9076
9077 static tree
9078 fold_builtin_0 (location_t loc, tree fndecl)
9079 {
9080 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9081 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9082 switch (fcode)
9083 {
9084 case BUILT_IN_FILE:
9085 return fold_builtin_FILE (loc);
9086
9087 case BUILT_IN_FUNCTION:
9088 return fold_builtin_FUNCTION ();
9089
9090 case BUILT_IN_LINE:
9091 return fold_builtin_LINE (loc, type);
9092
9093 CASE_FLT_FN (BUILT_IN_INF):
9094 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9095 case BUILT_IN_INFD32:
9096 case BUILT_IN_INFD64:
9097 case BUILT_IN_INFD128:
9098 return fold_builtin_inf (loc, type, true);
9099
9100 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9101 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9102 return fold_builtin_inf (loc, type, false);
9103
9104 case BUILT_IN_CLASSIFY_TYPE:
9105 return fold_builtin_classify_type (NULL_TREE);
9106
9107 default:
9108 break;
9109 }
9110 return NULL_TREE;
9111 }
9112
9113 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9114 This function returns NULL_TREE if no simplification was possible. */
9115
9116 static tree
9117 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9118 {
9119 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9120 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9121
9122 if (TREE_CODE (arg0) == ERROR_MARK)
9123 return NULL_TREE;
9124
9125 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9126 return ret;
9127
9128 switch (fcode)
9129 {
9130 case BUILT_IN_CONSTANT_P:
9131 {
9132 tree val = fold_builtin_constant_p (arg0);
9133
9134 /* Gimplification will pull the CALL_EXPR for the builtin out of
9135 an if condition. When not optimizing, we'll not CSE it back.
9136 To avoid link error types of regressions, return false now. */
9137 if (!val && !optimize)
9138 val = integer_zero_node;
9139
9140 return val;
9141 }
9142
9143 case BUILT_IN_CLASSIFY_TYPE:
9144 return fold_builtin_classify_type (arg0);
9145
9146 case BUILT_IN_STRLEN:
9147 return fold_builtin_strlen (loc, type, arg0);
9148
9149 CASE_FLT_FN (BUILT_IN_FABS):
9150 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9151 case BUILT_IN_FABSD32:
9152 case BUILT_IN_FABSD64:
9153 case BUILT_IN_FABSD128:
9154 return fold_builtin_fabs (loc, arg0, type);
9155
9156 case BUILT_IN_ABS:
9157 case BUILT_IN_LABS:
9158 case BUILT_IN_LLABS:
9159 case BUILT_IN_IMAXABS:
9160 return fold_builtin_abs (loc, arg0, type);
9161
9162 CASE_FLT_FN (BUILT_IN_CONJ):
9163 if (validate_arg (arg0, COMPLEX_TYPE)
9164 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9165 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9166 break;
9167
9168 CASE_FLT_FN (BUILT_IN_CREAL):
9169 if (validate_arg (arg0, COMPLEX_TYPE)
9170 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9171 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9172 break;
9173
9174 CASE_FLT_FN (BUILT_IN_CIMAG):
9175 if (validate_arg (arg0, COMPLEX_TYPE)
9176 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9177 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9178 break;
9179
9180 CASE_FLT_FN (BUILT_IN_CARG):
9181 return fold_builtin_carg (loc, arg0, type);
9182
9183 case BUILT_IN_ISASCII:
9184 return fold_builtin_isascii (loc, arg0);
9185
9186 case BUILT_IN_TOASCII:
9187 return fold_builtin_toascii (loc, arg0);
9188
9189 case BUILT_IN_ISDIGIT:
9190 return fold_builtin_isdigit (loc, arg0);
9191
9192 CASE_FLT_FN (BUILT_IN_FINITE):
9193 case BUILT_IN_FINITED32:
9194 case BUILT_IN_FINITED64:
9195 case BUILT_IN_FINITED128:
9196 case BUILT_IN_ISFINITE:
9197 {
9198 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9199 if (ret)
9200 return ret;
9201 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9202 }
9203
9204 CASE_FLT_FN (BUILT_IN_ISINF):
9205 case BUILT_IN_ISINFD32:
9206 case BUILT_IN_ISINFD64:
9207 case BUILT_IN_ISINFD128:
9208 {
9209 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9210 if (ret)
9211 return ret;
9212 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9213 }
9214
9215 case BUILT_IN_ISNORMAL:
9216 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9217
9218 case BUILT_IN_ISINF_SIGN:
9219 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9220
9221 CASE_FLT_FN (BUILT_IN_ISNAN):
9222 case BUILT_IN_ISNAND32:
9223 case BUILT_IN_ISNAND64:
9224 case BUILT_IN_ISNAND128:
9225 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9226
9227 case BUILT_IN_FREE:
9228 if (integer_zerop (arg0))
9229 return build_empty_stmt (loc);
9230 break;
9231
9232 default:
9233 break;
9234 }
9235
9236 return NULL_TREE;
9237
9238 }
9239
9240 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9241 This function returns NULL_TREE if no simplification was possible. */
9242
9243 static tree
9244 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9245 {
9246 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9247 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9248
9249 if (TREE_CODE (arg0) == ERROR_MARK
9250 || TREE_CODE (arg1) == ERROR_MARK)
9251 return NULL_TREE;
9252
9253 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9254 return ret;
9255
9256 switch (fcode)
9257 {
9258 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9259 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9260 if (validate_arg (arg0, REAL_TYPE)
9261 && validate_arg (arg1, POINTER_TYPE))
9262 return do_mpfr_lgamma_r (arg0, arg1, type);
9263 break;
9264
9265 CASE_FLT_FN (BUILT_IN_FREXP):
9266 return fold_builtin_frexp (loc, arg0, arg1, type);
9267
9268 CASE_FLT_FN (BUILT_IN_MODF):
9269 return fold_builtin_modf (loc, arg0, arg1, type);
9270
9271 case BUILT_IN_STRSPN:
9272 return fold_builtin_strspn (loc, arg0, arg1);
9273
9274 case BUILT_IN_STRCSPN:
9275 return fold_builtin_strcspn (loc, arg0, arg1);
9276
9277 case BUILT_IN_STRPBRK:
9278 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9279
9280 case BUILT_IN_EXPECT:
9281 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9282
9283 case BUILT_IN_ISGREATER:
9284 return fold_builtin_unordered_cmp (loc, fndecl,
9285 arg0, arg1, UNLE_EXPR, LE_EXPR);
9286 case BUILT_IN_ISGREATEREQUAL:
9287 return fold_builtin_unordered_cmp (loc, fndecl,
9288 arg0, arg1, UNLT_EXPR, LT_EXPR);
9289 case BUILT_IN_ISLESS:
9290 return fold_builtin_unordered_cmp (loc, fndecl,
9291 arg0, arg1, UNGE_EXPR, GE_EXPR);
9292 case BUILT_IN_ISLESSEQUAL:
9293 return fold_builtin_unordered_cmp (loc, fndecl,
9294 arg0, arg1, UNGT_EXPR, GT_EXPR);
9295 case BUILT_IN_ISLESSGREATER:
9296 return fold_builtin_unordered_cmp (loc, fndecl,
9297 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9298 case BUILT_IN_ISUNORDERED:
9299 return fold_builtin_unordered_cmp (loc, fndecl,
9300 arg0, arg1, UNORDERED_EXPR,
9301 NOP_EXPR);
9302
9303 /* We do the folding for va_start in the expander. */
9304 case BUILT_IN_VA_START:
9305 break;
9306
9307 case BUILT_IN_OBJECT_SIZE:
9308 return fold_builtin_object_size (arg0, arg1);
9309
9310 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9311 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9312
9313 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9314 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9315
9316 default:
9317 break;
9318 }
9319 return NULL_TREE;
9320 }
9321
9322 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9323 and ARG2.
9324 This function returns NULL_TREE if no simplification was possible. */
9325
9326 static tree
9327 fold_builtin_3 (location_t loc, tree fndecl,
9328 tree arg0, tree arg1, tree arg2)
9329 {
9330 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9331 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9332
9333 if (TREE_CODE (arg0) == ERROR_MARK
9334 || TREE_CODE (arg1) == ERROR_MARK
9335 || TREE_CODE (arg2) == ERROR_MARK)
9336 return NULL_TREE;
9337
9338 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9339 arg0, arg1, arg2))
9340 return ret;
9341
9342 switch (fcode)
9343 {
9344
9345 CASE_FLT_FN (BUILT_IN_SINCOS):
9346 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9347
9348 CASE_FLT_FN (BUILT_IN_REMQUO):
9349 if (validate_arg (arg0, REAL_TYPE)
9350 && validate_arg (arg1, REAL_TYPE)
9351 && validate_arg (arg2, POINTER_TYPE))
9352 return do_mpfr_remquo (arg0, arg1, arg2);
9353 break;
9354
9355 case BUILT_IN_MEMCMP:
9356 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9357
9358 case BUILT_IN_EXPECT:
9359 return fold_builtin_expect (loc, arg0, arg1, arg2);
9360
9361 case BUILT_IN_ADD_OVERFLOW:
9362 case BUILT_IN_SUB_OVERFLOW:
9363 case BUILT_IN_MUL_OVERFLOW:
9364 case BUILT_IN_ADD_OVERFLOW_P:
9365 case BUILT_IN_SUB_OVERFLOW_P:
9366 case BUILT_IN_MUL_OVERFLOW_P:
9367 case BUILT_IN_SADD_OVERFLOW:
9368 case BUILT_IN_SADDL_OVERFLOW:
9369 case BUILT_IN_SADDLL_OVERFLOW:
9370 case BUILT_IN_SSUB_OVERFLOW:
9371 case BUILT_IN_SSUBL_OVERFLOW:
9372 case BUILT_IN_SSUBLL_OVERFLOW:
9373 case BUILT_IN_SMUL_OVERFLOW:
9374 case BUILT_IN_SMULL_OVERFLOW:
9375 case BUILT_IN_SMULLL_OVERFLOW:
9376 case BUILT_IN_UADD_OVERFLOW:
9377 case BUILT_IN_UADDL_OVERFLOW:
9378 case BUILT_IN_UADDLL_OVERFLOW:
9379 case BUILT_IN_USUB_OVERFLOW:
9380 case BUILT_IN_USUBL_OVERFLOW:
9381 case BUILT_IN_USUBLL_OVERFLOW:
9382 case BUILT_IN_UMUL_OVERFLOW:
9383 case BUILT_IN_UMULL_OVERFLOW:
9384 case BUILT_IN_UMULLL_OVERFLOW:
9385 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9386
9387 default:
9388 break;
9389 }
9390 return NULL_TREE;
9391 }
9392
9393 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9394 arguments. IGNORE is true if the result of the
9395 function call is ignored. This function returns NULL_TREE if no
9396 simplification was possible. */
9397
9398 tree
9399 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9400 {
9401 tree ret = NULL_TREE;
9402
9403 switch (nargs)
9404 {
9405 case 0:
9406 ret = fold_builtin_0 (loc, fndecl);
9407 break;
9408 case 1:
9409 ret = fold_builtin_1 (loc, fndecl, args[0]);
9410 break;
9411 case 2:
9412 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9413 break;
9414 case 3:
9415 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9416 break;
9417 default:
9418 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9419 break;
9420 }
9421 if (ret)
9422 {
9423 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9424 SET_EXPR_LOCATION (ret, loc);
9425 return ret;
9426 }
9427 return NULL_TREE;
9428 }
9429
9430 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9431 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9432 of arguments in ARGS to be omitted. OLDNARGS is the number of
9433 elements in ARGS. */
9434
9435 static tree
9436 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9437 int skip, tree fndecl, int n, va_list newargs)
9438 {
9439 int nargs = oldnargs - skip + n;
9440 tree *buffer;
9441
9442 if (n > 0)
9443 {
9444 int i, j;
9445
9446 buffer = XALLOCAVEC (tree, nargs);
9447 for (i = 0; i < n; i++)
9448 buffer[i] = va_arg (newargs, tree);
9449 for (j = skip; j < oldnargs; j++, i++)
9450 buffer[i] = args[j];
9451 }
9452 else
9453 buffer = args + skip;
9454
9455 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9456 }
9457
9458 /* Return true if FNDECL shouldn't be folded right now.
9459 If a built-in function has an inline attribute always_inline
9460 wrapper, defer folding it after always_inline functions have
9461 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9462 might not be performed. */
9463
9464 bool
9465 avoid_folding_inline_builtin (tree fndecl)
9466 {
9467 return (DECL_DECLARED_INLINE_P (fndecl)
9468 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9469 && cfun
9470 && !cfun->always_inline_functions_inlined
9471 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9472 }
9473
9474 /* A wrapper function for builtin folding that prevents warnings for
9475 "statement without effect" and the like, caused by removing the
9476 call node earlier than the warning is generated. */
9477
9478 tree
9479 fold_call_expr (location_t loc, tree exp, bool ignore)
9480 {
9481 tree ret = NULL_TREE;
9482 tree fndecl = get_callee_fndecl (exp);
9483 if (fndecl
9484 && TREE_CODE (fndecl) == FUNCTION_DECL
9485 && DECL_BUILT_IN (fndecl)
9486 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9487 yet. Defer folding until we see all the arguments
9488 (after inlining). */
9489 && !CALL_EXPR_VA_ARG_PACK (exp))
9490 {
9491 int nargs = call_expr_nargs (exp);
9492
9493 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9494 instead last argument is __builtin_va_arg_pack (). Defer folding
9495 even in that case, until arguments are finalized. */
9496 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9497 {
9498 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9499 if (fndecl2
9500 && TREE_CODE (fndecl2) == FUNCTION_DECL
9501 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9502 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9503 return NULL_TREE;
9504 }
9505
9506 if (avoid_folding_inline_builtin (fndecl))
9507 return NULL_TREE;
9508
9509 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9510 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9511 CALL_EXPR_ARGP (exp), ignore);
9512 else
9513 {
9514 tree *args = CALL_EXPR_ARGP (exp);
9515 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9516 if (ret)
9517 return ret;
9518 }
9519 }
9520 return NULL_TREE;
9521 }
9522
9523 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9524 N arguments are passed in the array ARGARRAY. Return a folded
9525 expression or NULL_TREE if no simplification was possible. */
9526
9527 tree
9528 fold_builtin_call_array (location_t loc, tree,
9529 tree fn,
9530 int n,
9531 tree *argarray)
9532 {
9533 if (TREE_CODE (fn) != ADDR_EXPR)
9534 return NULL_TREE;
9535
9536 tree fndecl = TREE_OPERAND (fn, 0);
9537 if (TREE_CODE (fndecl) == FUNCTION_DECL
9538 && DECL_BUILT_IN (fndecl))
9539 {
9540 /* If last argument is __builtin_va_arg_pack (), arguments to this
9541 function are not finalized yet. Defer folding until they are. */
9542 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9543 {
9544 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9545 if (fndecl2
9546 && TREE_CODE (fndecl2) == FUNCTION_DECL
9547 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9548 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9549 return NULL_TREE;
9550 }
9551 if (avoid_folding_inline_builtin (fndecl))
9552 return NULL_TREE;
9553 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9554 return targetm.fold_builtin (fndecl, n, argarray, false);
9555 else
9556 return fold_builtin_n (loc, fndecl, argarray, n, false);
9557 }
9558
9559 return NULL_TREE;
9560 }
9561
9562 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9563 along with N new arguments specified as the "..." parameters. SKIP
9564 is the number of arguments in EXP to be omitted. This function is used
9565 to do varargs-to-varargs transformations. */
9566
9567 static tree
9568 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9569 {
9570 va_list ap;
9571 tree t;
9572
9573 va_start (ap, n);
9574 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9575 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9576 va_end (ap);
9577
9578 return t;
9579 }
9580
9581 /* Validate a single argument ARG against a tree code CODE representing
9582 a type. Return true when argument is valid. */
9583
9584 static bool
9585 validate_arg (const_tree arg, enum tree_code code)
9586 {
9587 if (!arg)
9588 return false;
9589 else if (code == POINTER_TYPE)
9590 return POINTER_TYPE_P (TREE_TYPE (arg));
9591 else if (code == INTEGER_TYPE)
9592 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9593 return code == TREE_CODE (TREE_TYPE (arg));
9594 }
9595
9596 /* This function validates the types of a function call argument list
9597 against a specified list of tree_codes. If the last specifier is a 0,
9598 that represents an ellipses, otherwise the last specifier must be a
9599 VOID_TYPE.
9600
9601 This is the GIMPLE version of validate_arglist. Eventually we want to
9602 completely convert builtins.c to work from GIMPLEs and the tree based
9603 validate_arglist will then be removed. */
9604
9605 bool
9606 validate_gimple_arglist (const gcall *call, ...)
9607 {
9608 enum tree_code code;
9609 bool res = 0;
9610 va_list ap;
9611 const_tree arg;
9612 size_t i;
9613
9614 va_start (ap, call);
9615 i = 0;
9616
9617 do
9618 {
9619 code = (enum tree_code) va_arg (ap, int);
9620 switch (code)
9621 {
9622 case 0:
9623 /* This signifies an ellipses, any further arguments are all ok. */
9624 res = true;
9625 goto end;
9626 case VOID_TYPE:
9627 /* This signifies an endlink, if no arguments remain, return
9628 true, otherwise return false. */
9629 res = (i == gimple_call_num_args (call));
9630 goto end;
9631 default:
9632 /* If no parameters remain or the parameter's code does not
9633 match the specified code, return false. Otherwise continue
9634 checking any remaining arguments. */
9635 arg = gimple_call_arg (call, i++);
9636 if (!validate_arg (arg, code))
9637 goto end;
9638 break;
9639 }
9640 }
9641 while (1);
9642
9643 /* We need gotos here since we can only have one VA_CLOSE in a
9644 function. */
9645 end: ;
9646 va_end (ap);
9647
9648 return res;
9649 }
9650
9651 /* Default target-specific builtin expander that does nothing. */
9652
9653 rtx
9654 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9655 rtx target ATTRIBUTE_UNUSED,
9656 rtx subtarget ATTRIBUTE_UNUSED,
9657 machine_mode mode ATTRIBUTE_UNUSED,
9658 int ignore ATTRIBUTE_UNUSED)
9659 {
9660 return NULL_RTX;
9661 }
9662
9663 /* Returns true is EXP represents data that would potentially reside
9664 in a readonly section. */
9665
9666 bool
9667 readonly_data_expr (tree exp)
9668 {
9669 STRIP_NOPS (exp);
9670
9671 if (TREE_CODE (exp) != ADDR_EXPR)
9672 return false;
9673
9674 exp = get_base_address (TREE_OPERAND (exp, 0));
9675 if (!exp)
9676 return false;
9677
9678 /* Make sure we call decl_readonly_section only for trees it
9679 can handle (since it returns true for everything it doesn't
9680 understand). */
9681 if (TREE_CODE (exp) == STRING_CST
9682 || TREE_CODE (exp) == CONSTRUCTOR
9683 || (VAR_P (exp) && TREE_STATIC (exp)))
9684 return decl_readonly_section (exp, 0);
9685 else
9686 return false;
9687 }
9688
9689 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9690 to the call, and TYPE is its return type.
9691
9692 Return NULL_TREE if no simplification was possible, otherwise return the
9693 simplified form of the call as a tree.
9694
9695 The simplified form may be a constant or other expression which
9696 computes the same value, but in a more efficient manner (including
9697 calls to other builtin functions).
9698
9699 The call may contain arguments which need to be evaluated, but
9700 which are not useful to determine the result of the call. In
9701 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9702 COMPOUND_EXPR will be an argument which must be evaluated.
9703 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9704 COMPOUND_EXPR in the chain will contain the tree for the simplified
9705 form of the builtin function call. */
9706
9707 static tree
9708 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9709 {
9710 if (!validate_arg (s1, POINTER_TYPE)
9711 || !validate_arg (s2, POINTER_TYPE))
9712 return NULL_TREE;
9713 else
9714 {
9715 tree fn;
9716 const char *p1, *p2;
9717
9718 p2 = c_getstr (s2);
9719 if (p2 == NULL)
9720 return NULL_TREE;
9721
9722 p1 = c_getstr (s1);
9723 if (p1 != NULL)
9724 {
9725 const char *r = strpbrk (p1, p2);
9726 tree tem;
9727
9728 if (r == NULL)
9729 return build_int_cst (TREE_TYPE (s1), 0);
9730
9731 /* Return an offset into the constant string argument. */
9732 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9733 return fold_convert_loc (loc, type, tem);
9734 }
9735
9736 if (p2[0] == '\0')
9737 /* strpbrk(x, "") == NULL.
9738 Evaluate and ignore s1 in case it had side-effects. */
9739 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9740
9741 if (p2[1] != '\0')
9742 return NULL_TREE; /* Really call strpbrk. */
9743
9744 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9745 if (!fn)
9746 return NULL_TREE;
9747
9748 /* New argument list transforming strpbrk(s1, s2) to
9749 strchr(s1, s2[0]). */
9750 return build_call_expr_loc (loc, fn, 2, s1,
9751 build_int_cst (integer_type_node, p2[0]));
9752 }
9753 }
9754
9755 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9756 to the call.
9757
9758 Return NULL_TREE if no simplification was possible, otherwise return the
9759 simplified form of the call as a tree.
9760
9761 The simplified form may be a constant or other expression which
9762 computes the same value, but in a more efficient manner (including
9763 calls to other builtin functions).
9764
9765 The call may contain arguments which need to be evaluated, but
9766 which are not useful to determine the result of the call. In
9767 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9768 COMPOUND_EXPR will be an argument which must be evaluated.
9769 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9770 COMPOUND_EXPR in the chain will contain the tree for the simplified
9771 form of the builtin function call. */
9772
9773 static tree
9774 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9775 {
9776 if (!validate_arg (s1, POINTER_TYPE)
9777 || !validate_arg (s2, POINTER_TYPE))
9778 return NULL_TREE;
9779 else
9780 {
9781 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9782
9783 /* If either argument is "", return NULL_TREE. */
9784 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9785 /* Evaluate and ignore both arguments in case either one has
9786 side-effects. */
9787 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9788 s1, s2);
9789 return NULL_TREE;
9790 }
9791 }
9792
9793 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9794 to the call.
9795
9796 Return NULL_TREE if no simplification was possible, otherwise return the
9797 simplified form of the call as a tree.
9798
9799 The simplified form may be a constant or other expression which
9800 computes the same value, but in a more efficient manner (including
9801 calls to other builtin functions).
9802
9803 The call may contain arguments which need to be evaluated, but
9804 which are not useful to determine the result of the call. In
9805 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9806 COMPOUND_EXPR will be an argument which must be evaluated.
9807 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9808 COMPOUND_EXPR in the chain will contain the tree for the simplified
9809 form of the builtin function call. */
9810
9811 static tree
9812 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9813 {
9814 if (!validate_arg (s1, POINTER_TYPE)
9815 || !validate_arg (s2, POINTER_TYPE))
9816 return NULL_TREE;
9817 else
9818 {
9819 /* If the first argument is "", return NULL_TREE. */
9820 const char *p1 = c_getstr (s1);
9821 if (p1 && *p1 == '\0')
9822 {
9823 /* Evaluate and ignore argument s2 in case it has
9824 side-effects. */
9825 return omit_one_operand_loc (loc, size_type_node,
9826 size_zero_node, s2);
9827 }
9828
9829 /* If the second argument is "", return __builtin_strlen(s1). */
9830 const char *p2 = c_getstr (s2);
9831 if (p2 && *p2 == '\0')
9832 {
9833 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9834
9835 /* If the replacement _DECL isn't initialized, don't do the
9836 transformation. */
9837 if (!fn)
9838 return NULL_TREE;
9839
9840 return build_call_expr_loc (loc, fn, 1, s1);
9841 }
9842 return NULL_TREE;
9843 }
9844 }
9845
9846 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9847 produced. False otherwise. This is done so that we don't output the error
9848 or warning twice or three times. */
9849
9850 bool
9851 fold_builtin_next_arg (tree exp, bool va_start_p)
9852 {
9853 tree fntype = TREE_TYPE (current_function_decl);
9854 int nargs = call_expr_nargs (exp);
9855 tree arg;
9856 /* There is good chance the current input_location points inside the
9857 definition of the va_start macro (perhaps on the token for
9858 builtin) in a system header, so warnings will not be emitted.
9859 Use the location in real source code. */
9860 source_location current_location =
9861 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9862 NULL);
9863
9864 if (!stdarg_p (fntype))
9865 {
9866 error ("%<va_start%> used in function with fixed args");
9867 return true;
9868 }
9869
9870 if (va_start_p)
9871 {
9872 if (va_start_p && (nargs != 2))
9873 {
9874 error ("wrong number of arguments to function %<va_start%>");
9875 return true;
9876 }
9877 arg = CALL_EXPR_ARG (exp, 1);
9878 }
9879 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9880 when we checked the arguments and if needed issued a warning. */
9881 else
9882 {
9883 if (nargs == 0)
9884 {
9885 /* Evidently an out of date version of <stdarg.h>; can't validate
9886 va_start's second argument, but can still work as intended. */
9887 warning_at (current_location,
9888 OPT_Wvarargs,
9889 "%<__builtin_next_arg%> called without an argument");
9890 return true;
9891 }
9892 else if (nargs > 1)
9893 {
9894 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9895 return true;
9896 }
9897 arg = CALL_EXPR_ARG (exp, 0);
9898 }
9899
9900 if (TREE_CODE (arg) == SSA_NAME)
9901 arg = SSA_NAME_VAR (arg);
9902
9903 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9904 or __builtin_next_arg (0) the first time we see it, after checking
9905 the arguments and if needed issuing a warning. */
9906 if (!integer_zerop (arg))
9907 {
9908 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9909
9910 /* Strip off all nops for the sake of the comparison. This
9911 is not quite the same as STRIP_NOPS. It does more.
9912 We must also strip off INDIRECT_EXPR for C++ reference
9913 parameters. */
9914 while (CONVERT_EXPR_P (arg)
9915 || TREE_CODE (arg) == INDIRECT_REF)
9916 arg = TREE_OPERAND (arg, 0);
9917 if (arg != last_parm)
9918 {
9919 /* FIXME: Sometimes with the tree optimizers we can get the
9920 not the last argument even though the user used the last
9921 argument. We just warn and set the arg to be the last
9922 argument so that we will get wrong-code because of
9923 it. */
9924 warning_at (current_location,
9925 OPT_Wvarargs,
9926 "second parameter of %<va_start%> not last named argument");
9927 }
9928
9929 /* Undefined by C99 7.15.1.4p4 (va_start):
9930 "If the parameter parmN is declared with the register storage
9931 class, with a function or array type, or with a type that is
9932 not compatible with the type that results after application of
9933 the default argument promotions, the behavior is undefined."
9934 */
9935 else if (DECL_REGISTER (arg))
9936 {
9937 warning_at (current_location,
9938 OPT_Wvarargs,
9939 "undefined behavior when second parameter of "
9940 "%<va_start%> is declared with %<register%> storage");
9941 }
9942
9943 /* We want to verify the second parameter just once before the tree
9944 optimizers are run and then avoid keeping it in the tree,
9945 as otherwise we could warn even for correct code like:
9946 void foo (int i, ...)
9947 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9948 if (va_start_p)
9949 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9950 else
9951 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9952 }
9953 return false;
9954 }
9955
9956
9957 /* Expand a call EXP to __builtin_object_size. */
9958
9959 static rtx
9960 expand_builtin_object_size (tree exp)
9961 {
9962 tree ost;
9963 int object_size_type;
9964 tree fndecl = get_callee_fndecl (exp);
9965
9966 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9967 {
9968 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9969 exp, fndecl);
9970 expand_builtin_trap ();
9971 return const0_rtx;
9972 }
9973
9974 ost = CALL_EXPR_ARG (exp, 1);
9975 STRIP_NOPS (ost);
9976
9977 if (TREE_CODE (ost) != INTEGER_CST
9978 || tree_int_cst_sgn (ost) < 0
9979 || compare_tree_int (ost, 3) > 0)
9980 {
9981 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9982 exp, fndecl);
9983 expand_builtin_trap ();
9984 return const0_rtx;
9985 }
9986
9987 object_size_type = tree_to_shwi (ost);
9988
9989 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9990 }
9991
9992 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9993 FCODE is the BUILT_IN_* to use.
9994 Return NULL_RTX if we failed; the caller should emit a normal call,
9995 otherwise try to get the result in TARGET, if convenient (and in
9996 mode MODE if that's convenient). */
9997
9998 static rtx
9999 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10000 enum built_in_function fcode)
10001 {
10002 if (!validate_arglist (exp,
10003 POINTER_TYPE,
10004 fcode == BUILT_IN_MEMSET_CHK
10005 ? INTEGER_TYPE : POINTER_TYPE,
10006 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10007 return NULL_RTX;
10008
10009 tree dest = CALL_EXPR_ARG (exp, 0);
10010 tree src = CALL_EXPR_ARG (exp, 1);
10011 tree len = CALL_EXPR_ARG (exp, 2);
10012 tree size = CALL_EXPR_ARG (exp, 3);
10013
10014 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10015 /*str=*/NULL_TREE, size);
10016
10017 if (!tree_fits_uhwi_p (size))
10018 return NULL_RTX;
10019
10020 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10021 {
10022 /* Avoid transforming the checking call to an ordinary one when
10023 an overflow has been detected or when the call couldn't be
10024 validated because the size is not constant. */
10025 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10026 return NULL_RTX;
10027
10028 tree fn = NULL_TREE;
10029 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10030 mem{cpy,pcpy,move,set} is available. */
10031 switch (fcode)
10032 {
10033 case BUILT_IN_MEMCPY_CHK:
10034 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10035 break;
10036 case BUILT_IN_MEMPCPY_CHK:
10037 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10038 break;
10039 case BUILT_IN_MEMMOVE_CHK:
10040 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10041 break;
10042 case BUILT_IN_MEMSET_CHK:
10043 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10044 break;
10045 default:
10046 break;
10047 }
10048
10049 if (! fn)
10050 return NULL_RTX;
10051
10052 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10053 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10054 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10055 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10056 }
10057 else if (fcode == BUILT_IN_MEMSET_CHK)
10058 return NULL_RTX;
10059 else
10060 {
10061 unsigned int dest_align = get_pointer_alignment (dest);
10062
10063 /* If DEST is not a pointer type, call the normal function. */
10064 if (dest_align == 0)
10065 return NULL_RTX;
10066
10067 /* If SRC and DEST are the same (and not volatile), do nothing. */
10068 if (operand_equal_p (src, dest, 0))
10069 {
10070 tree expr;
10071
10072 if (fcode != BUILT_IN_MEMPCPY_CHK)
10073 {
10074 /* Evaluate and ignore LEN in case it has side-effects. */
10075 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10076 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10077 }
10078
10079 expr = fold_build_pointer_plus (dest, len);
10080 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10081 }
10082
10083 /* __memmove_chk special case. */
10084 if (fcode == BUILT_IN_MEMMOVE_CHK)
10085 {
10086 unsigned int src_align = get_pointer_alignment (src);
10087
10088 if (src_align == 0)
10089 return NULL_RTX;
10090
10091 /* If src is categorized for a readonly section we can use
10092 normal __memcpy_chk. */
10093 if (readonly_data_expr (src))
10094 {
10095 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10096 if (!fn)
10097 return NULL_RTX;
10098 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10099 dest, src, len, size);
10100 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10101 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10102 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10103 }
10104 }
10105 return NULL_RTX;
10106 }
10107 }
10108
10109 /* Emit warning if a buffer overflow is detected at compile time. */
10110
10111 static void
10112 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10113 {
10114 /* The source string. */
10115 tree srcstr = NULL_TREE;
10116 /* The size of the destination object. */
10117 tree objsize = NULL_TREE;
10118 /* The string that is being concatenated with (as in __strcat_chk)
10119 or null if it isn't. */
10120 tree catstr = NULL_TREE;
10121 /* The maximum length of the source sequence in a bounded operation
10122 (such as __strncat_chk) or null if the operation isn't bounded
10123 (such as __strcat_chk). */
10124 tree maxread = NULL_TREE;
10125 /* The exact size of the access (such as in __strncpy_chk). */
10126 tree size = NULL_TREE;
10127
10128 switch (fcode)
10129 {
10130 case BUILT_IN_STRCPY_CHK:
10131 case BUILT_IN_STPCPY_CHK:
10132 srcstr = CALL_EXPR_ARG (exp, 1);
10133 objsize = CALL_EXPR_ARG (exp, 2);
10134 break;
10135
10136 case BUILT_IN_STRCAT_CHK:
10137 /* For __strcat_chk the warning will be emitted only if overflowing
10138 by at least strlen (dest) + 1 bytes. */
10139 catstr = CALL_EXPR_ARG (exp, 0);
10140 srcstr = CALL_EXPR_ARG (exp, 1);
10141 objsize = CALL_EXPR_ARG (exp, 2);
10142 break;
10143
10144 case BUILT_IN_STRNCAT_CHK:
10145 catstr = CALL_EXPR_ARG (exp, 0);
10146 srcstr = CALL_EXPR_ARG (exp, 1);
10147 maxread = CALL_EXPR_ARG (exp, 2);
10148 objsize = CALL_EXPR_ARG (exp, 3);
10149 break;
10150
10151 case BUILT_IN_STRNCPY_CHK:
10152 case BUILT_IN_STPNCPY_CHK:
10153 srcstr = CALL_EXPR_ARG (exp, 1);
10154 size = CALL_EXPR_ARG (exp, 2);
10155 objsize = CALL_EXPR_ARG (exp, 3);
10156 break;
10157
10158 case BUILT_IN_SNPRINTF_CHK:
10159 case BUILT_IN_VSNPRINTF_CHK:
10160 maxread = CALL_EXPR_ARG (exp, 1);
10161 objsize = CALL_EXPR_ARG (exp, 3);
10162 break;
10163 default:
10164 gcc_unreachable ();
10165 }
10166
10167 if (catstr && maxread)
10168 {
10169 /* Check __strncat_chk. There is no way to determine the length
10170 of the string to which the source string is being appended so
10171 just warn when the length of the source string is not known. */
10172 check_strncat_sizes (exp, objsize);
10173 return;
10174 }
10175
10176 /* The destination argument is the first one for all built-ins above. */
10177 tree dst = CALL_EXPR_ARG (exp, 0);
10178
10179 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10180 }
10181
10182 /* Emit warning if a buffer overflow is detected at compile time
10183 in __sprintf_chk/__vsprintf_chk calls. */
10184
10185 static void
10186 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10187 {
10188 tree size, len, fmt;
10189 const char *fmt_str;
10190 int nargs = call_expr_nargs (exp);
10191
10192 /* Verify the required arguments in the original call. */
10193
10194 if (nargs < 4)
10195 return;
10196 size = CALL_EXPR_ARG (exp, 2);
10197 fmt = CALL_EXPR_ARG (exp, 3);
10198
10199 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10200 return;
10201
10202 /* Check whether the format is a literal string constant. */
10203 fmt_str = c_getstr (fmt);
10204 if (fmt_str == NULL)
10205 return;
10206
10207 if (!init_target_chars ())
10208 return;
10209
10210 /* If the format doesn't contain % args or %%, we know its size. */
10211 if (strchr (fmt_str, target_percent) == 0)
10212 len = build_int_cstu (size_type_node, strlen (fmt_str));
10213 /* If the format is "%s" and first ... argument is a string literal,
10214 we know it too. */
10215 else if (fcode == BUILT_IN_SPRINTF_CHK
10216 && strcmp (fmt_str, target_percent_s) == 0)
10217 {
10218 tree arg;
10219
10220 if (nargs < 5)
10221 return;
10222 arg = CALL_EXPR_ARG (exp, 4);
10223 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10224 return;
10225
10226 len = c_strlen (arg, 1);
10227 if (!len || ! tree_fits_uhwi_p (len))
10228 return;
10229 }
10230 else
10231 return;
10232
10233 /* Add one for the terminating nul. */
10234 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10235
10236 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10237 /*maxread=*/NULL_TREE, len, size);
10238 }
10239
10240 /* Emit warning if a free is called with address of a variable. */
10241
10242 static void
10243 maybe_emit_free_warning (tree exp)
10244 {
10245 tree arg = CALL_EXPR_ARG (exp, 0);
10246
10247 STRIP_NOPS (arg);
10248 if (TREE_CODE (arg) != ADDR_EXPR)
10249 return;
10250
10251 arg = get_base_address (TREE_OPERAND (arg, 0));
10252 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10253 return;
10254
10255 if (SSA_VAR_P (arg))
10256 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10257 "%Kattempt to free a non-heap object %qD", exp, arg);
10258 else
10259 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10260 "%Kattempt to free a non-heap object", exp);
10261 }
10262
10263 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10264 if possible. */
10265
10266 static tree
10267 fold_builtin_object_size (tree ptr, tree ost)
10268 {
10269 unsigned HOST_WIDE_INT bytes;
10270 int object_size_type;
10271
10272 if (!validate_arg (ptr, POINTER_TYPE)
10273 || !validate_arg (ost, INTEGER_TYPE))
10274 return NULL_TREE;
10275
10276 STRIP_NOPS (ost);
10277
10278 if (TREE_CODE (ost) != INTEGER_CST
10279 || tree_int_cst_sgn (ost) < 0
10280 || compare_tree_int (ost, 3) > 0)
10281 return NULL_TREE;
10282
10283 object_size_type = tree_to_shwi (ost);
10284
10285 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10286 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10287 and (size_t) 0 for types 2 and 3. */
10288 if (TREE_SIDE_EFFECTS (ptr))
10289 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10290
10291 if (TREE_CODE (ptr) == ADDR_EXPR)
10292 {
10293 compute_builtin_object_size (ptr, object_size_type, &bytes);
10294 if (wi::fits_to_tree_p (bytes, size_type_node))
10295 return build_int_cstu (size_type_node, bytes);
10296 }
10297 else if (TREE_CODE (ptr) == SSA_NAME)
10298 {
10299 /* If object size is not known yet, delay folding until
10300 later. Maybe subsequent passes will help determining
10301 it. */
10302 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10303 && wi::fits_to_tree_p (bytes, size_type_node))
10304 return build_int_cstu (size_type_node, bytes);
10305 }
10306
10307 return NULL_TREE;
10308 }
10309
10310 /* Builtins with folding operations that operate on "..." arguments
10311 need special handling; we need to store the arguments in a convenient
10312 data structure before attempting any folding. Fortunately there are
10313 only a few builtins that fall into this category. FNDECL is the
10314 function, EXP is the CALL_EXPR for the call. */
10315
10316 static tree
10317 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10318 {
10319 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10320 tree ret = NULL_TREE;
10321
10322 switch (fcode)
10323 {
10324 case BUILT_IN_FPCLASSIFY:
10325 ret = fold_builtin_fpclassify (loc, args, nargs);
10326 break;
10327
10328 default:
10329 break;
10330 }
10331 if (ret)
10332 {
10333 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10334 SET_EXPR_LOCATION (ret, loc);
10335 TREE_NO_WARNING (ret) = 1;
10336 return ret;
10337 }
10338 return NULL_TREE;
10339 }
10340
10341 /* Initialize format string characters in the target charset. */
10342
10343 bool
10344 init_target_chars (void)
10345 {
10346 static bool init;
10347 if (!init)
10348 {
10349 target_newline = lang_hooks.to_target_charset ('\n');
10350 target_percent = lang_hooks.to_target_charset ('%');
10351 target_c = lang_hooks.to_target_charset ('c');
10352 target_s = lang_hooks.to_target_charset ('s');
10353 if (target_newline == 0 || target_percent == 0 || target_c == 0
10354 || target_s == 0)
10355 return false;
10356
10357 target_percent_c[0] = target_percent;
10358 target_percent_c[1] = target_c;
10359 target_percent_c[2] = '\0';
10360
10361 target_percent_s[0] = target_percent;
10362 target_percent_s[1] = target_s;
10363 target_percent_s[2] = '\0';
10364
10365 target_percent_s_newline[0] = target_percent;
10366 target_percent_s_newline[1] = target_s;
10367 target_percent_s_newline[2] = target_newline;
10368 target_percent_s_newline[3] = '\0';
10369
10370 init = true;
10371 }
10372 return true;
10373 }
10374
10375 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10376 and no overflow/underflow occurred. INEXACT is true if M was not
10377 exactly calculated. TYPE is the tree type for the result. This
10378 function assumes that you cleared the MPFR flags and then
10379 calculated M to see if anything subsequently set a flag prior to
10380 entering this function. Return NULL_TREE if any checks fail. */
10381
10382 static tree
10383 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10384 {
10385 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10386 overflow/underflow occurred. If -frounding-math, proceed iff the
10387 result of calling FUNC was exact. */
10388 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10389 && (!flag_rounding_math || !inexact))
10390 {
10391 REAL_VALUE_TYPE rr;
10392
10393 real_from_mpfr (&rr, m, type, GMP_RNDN);
10394 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10395 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10396 but the mpft_t is not, then we underflowed in the
10397 conversion. */
10398 if (real_isfinite (&rr)
10399 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10400 {
10401 REAL_VALUE_TYPE rmode;
10402
10403 real_convert (&rmode, TYPE_MODE (type), &rr);
10404 /* Proceed iff the specified mode can hold the value. */
10405 if (real_identical (&rmode, &rr))
10406 return build_real (type, rmode);
10407 }
10408 }
10409 return NULL_TREE;
10410 }
10411
10412 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10413 number and no overflow/underflow occurred. INEXACT is true if M
10414 was not exactly calculated. TYPE is the tree type for the result.
10415 This function assumes that you cleared the MPFR flags and then
10416 calculated M to see if anything subsequently set a flag prior to
10417 entering this function. Return NULL_TREE if any checks fail, if
10418 FORCE_CONVERT is true, then bypass the checks. */
10419
10420 static tree
10421 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10422 {
10423 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10424 overflow/underflow occurred. If -frounding-math, proceed iff the
10425 result of calling FUNC was exact. */
10426 if (force_convert
10427 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10428 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10429 && (!flag_rounding_math || !inexact)))
10430 {
10431 REAL_VALUE_TYPE re, im;
10432
10433 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10434 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10435 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10436 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10437 but the mpft_t is not, then we underflowed in the
10438 conversion. */
10439 if (force_convert
10440 || (real_isfinite (&re) && real_isfinite (&im)
10441 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10442 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10443 {
10444 REAL_VALUE_TYPE re_mode, im_mode;
10445
10446 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10447 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10448 /* Proceed iff the specified mode can hold the value. */
10449 if (force_convert
10450 || (real_identical (&re_mode, &re)
10451 && real_identical (&im_mode, &im)))
10452 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10453 build_real (TREE_TYPE (type), im_mode));
10454 }
10455 }
10456 return NULL_TREE;
10457 }
10458
10459 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10460 the pointer *(ARG_QUO) and return the result. The type is taken
10461 from the type of ARG0 and is used for setting the precision of the
10462 calculation and results. */
10463
10464 static tree
10465 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10466 {
10467 tree const type = TREE_TYPE (arg0);
10468 tree result = NULL_TREE;
10469
10470 STRIP_NOPS (arg0);
10471 STRIP_NOPS (arg1);
10472
10473 /* To proceed, MPFR must exactly represent the target floating point
10474 format, which only happens when the target base equals two. */
10475 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10476 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10477 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10478 {
10479 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10480 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10481
10482 if (real_isfinite (ra0) && real_isfinite (ra1))
10483 {
10484 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10485 const int prec = fmt->p;
10486 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10487 tree result_rem;
10488 long integer_quo;
10489 mpfr_t m0, m1;
10490
10491 mpfr_inits2 (prec, m0, m1, NULL);
10492 mpfr_from_real (m0, ra0, GMP_RNDN);
10493 mpfr_from_real (m1, ra1, GMP_RNDN);
10494 mpfr_clear_flags ();
10495 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10496 /* Remquo is independent of the rounding mode, so pass
10497 inexact=0 to do_mpfr_ckconv(). */
10498 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10499 mpfr_clears (m0, m1, NULL);
10500 if (result_rem)
10501 {
10502 /* MPFR calculates quo in the host's long so it may
10503 return more bits in quo than the target int can hold
10504 if sizeof(host long) > sizeof(target int). This can
10505 happen even for native compilers in LP64 mode. In
10506 these cases, modulo the quo value with the largest
10507 number that the target int can hold while leaving one
10508 bit for the sign. */
10509 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10510 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10511
10512 /* Dereference the quo pointer argument. */
10513 arg_quo = build_fold_indirect_ref (arg_quo);
10514 /* Proceed iff a valid pointer type was passed in. */
10515 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10516 {
10517 /* Set the value. */
10518 tree result_quo
10519 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10520 build_int_cst (TREE_TYPE (arg_quo),
10521 integer_quo));
10522 TREE_SIDE_EFFECTS (result_quo) = 1;
10523 /* Combine the quo assignment with the rem. */
10524 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10525 result_quo, result_rem));
10526 }
10527 }
10528 }
10529 }
10530 return result;
10531 }
10532
10533 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10534 resulting value as a tree with type TYPE. The mpfr precision is
10535 set to the precision of TYPE. We assume that this mpfr function
10536 returns zero if the result could be calculated exactly within the
10537 requested precision. In addition, the integer pointer represented
10538 by ARG_SG will be dereferenced and set to the appropriate signgam
10539 (-1,1) value. */
10540
10541 static tree
10542 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10543 {
10544 tree result = NULL_TREE;
10545
10546 STRIP_NOPS (arg);
10547
10548 /* To proceed, MPFR must exactly represent the target floating point
10549 format, which only happens when the target base equals two. Also
10550 verify ARG is a constant and that ARG_SG is an int pointer. */
10551 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10552 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10553 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10554 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10555 {
10556 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10557
10558 /* In addition to NaN and Inf, the argument cannot be zero or a
10559 negative integer. */
10560 if (real_isfinite (ra)
10561 && ra->cl != rvc_zero
10562 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10563 {
10564 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10565 const int prec = fmt->p;
10566 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10567 int inexact, sg;
10568 mpfr_t m;
10569 tree result_lg;
10570
10571 mpfr_init2 (m, prec);
10572 mpfr_from_real (m, ra, GMP_RNDN);
10573 mpfr_clear_flags ();
10574 inexact = mpfr_lgamma (m, &sg, m, rnd);
10575 result_lg = do_mpfr_ckconv (m, type, inexact);
10576 mpfr_clear (m);
10577 if (result_lg)
10578 {
10579 tree result_sg;
10580
10581 /* Dereference the arg_sg pointer argument. */
10582 arg_sg = build_fold_indirect_ref (arg_sg);
10583 /* Assign the signgam value into *arg_sg. */
10584 result_sg = fold_build2 (MODIFY_EXPR,
10585 TREE_TYPE (arg_sg), arg_sg,
10586 build_int_cst (TREE_TYPE (arg_sg), sg));
10587 TREE_SIDE_EFFECTS (result_sg) = 1;
10588 /* Combine the signgam assignment with the lgamma result. */
10589 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10590 result_sg, result_lg));
10591 }
10592 }
10593 }
10594
10595 return result;
10596 }
10597
10598 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10599 mpc function FUNC on it and return the resulting value as a tree
10600 with type TYPE. The mpfr precision is set to the precision of
10601 TYPE. We assume that function FUNC returns zero if the result
10602 could be calculated exactly within the requested precision. If
10603 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10604 in the arguments and/or results. */
10605
10606 tree
10607 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10608 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10609 {
10610 tree result = NULL_TREE;
10611
10612 STRIP_NOPS (arg0);
10613 STRIP_NOPS (arg1);
10614
10615 /* To proceed, MPFR must exactly represent the target floating point
10616 format, which only happens when the target base equals two. */
10617 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10618 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10619 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10620 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10621 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10622 {
10623 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10624 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10625 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10626 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10627
10628 if (do_nonfinite
10629 || (real_isfinite (re0) && real_isfinite (im0)
10630 && real_isfinite (re1) && real_isfinite (im1)))
10631 {
10632 const struct real_format *const fmt =
10633 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10634 const int prec = fmt->p;
10635 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10636 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10637 int inexact;
10638 mpc_t m0, m1;
10639
10640 mpc_init2 (m0, prec);
10641 mpc_init2 (m1, prec);
10642 mpfr_from_real (mpc_realref (m0), re0, rnd);
10643 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10644 mpfr_from_real (mpc_realref (m1), re1, rnd);
10645 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10646 mpfr_clear_flags ();
10647 inexact = func (m0, m0, m1, crnd);
10648 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10649 mpc_clear (m0);
10650 mpc_clear (m1);
10651 }
10652 }
10653
10654 return result;
10655 }
10656
10657 /* A wrapper function for builtin folding that prevents warnings for
10658 "statement without effect" and the like, caused by removing the
10659 call node earlier than the warning is generated. */
10660
10661 tree
10662 fold_call_stmt (gcall *stmt, bool ignore)
10663 {
10664 tree ret = NULL_TREE;
10665 tree fndecl = gimple_call_fndecl (stmt);
10666 location_t loc = gimple_location (stmt);
10667 if (fndecl
10668 && TREE_CODE (fndecl) == FUNCTION_DECL
10669 && DECL_BUILT_IN (fndecl)
10670 && !gimple_call_va_arg_pack_p (stmt))
10671 {
10672 int nargs = gimple_call_num_args (stmt);
10673 tree *args = (nargs > 0
10674 ? gimple_call_arg_ptr (stmt, 0)
10675 : &error_mark_node);
10676
10677 if (avoid_folding_inline_builtin (fndecl))
10678 return NULL_TREE;
10679 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10680 {
10681 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10682 }
10683 else
10684 {
10685 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10686 if (ret)
10687 {
10688 /* Propagate location information from original call to
10689 expansion of builtin. Otherwise things like
10690 maybe_emit_chk_warning, that operate on the expansion
10691 of a builtin, will use the wrong location information. */
10692 if (gimple_has_location (stmt))
10693 {
10694 tree realret = ret;
10695 if (TREE_CODE (ret) == NOP_EXPR)
10696 realret = TREE_OPERAND (ret, 0);
10697 if (CAN_HAVE_LOCATION_P (realret)
10698 && !EXPR_HAS_LOCATION (realret))
10699 SET_EXPR_LOCATION (realret, loc);
10700 return realret;
10701 }
10702 return ret;
10703 }
10704 }
10705 }
10706 return NULL_TREE;
10707 }
10708
10709 /* Look up the function in builtin_decl that corresponds to DECL
10710 and set ASMSPEC as its user assembler name. DECL must be a
10711 function decl that declares a builtin. */
10712
10713 void
10714 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10715 {
10716 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10717 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10718 && asmspec != 0);
10719
10720 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10721 set_user_assembler_name (builtin, asmspec);
10722
10723 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10724 && INT_TYPE_SIZE < BITS_PER_WORD)
10725 {
10726 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10727 set_user_assembler_libfunc ("ffs", asmspec);
10728 set_optab_libfunc (ffs_optab, mode, "ffs");
10729 }
10730 }
10731
10732 /* Return true if DECL is a builtin that expands to a constant or similarly
10733 simple code. */
10734 bool
10735 is_simple_builtin (tree decl)
10736 {
10737 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10738 switch (DECL_FUNCTION_CODE (decl))
10739 {
10740 /* Builtins that expand to constants. */
10741 case BUILT_IN_CONSTANT_P:
10742 case BUILT_IN_EXPECT:
10743 case BUILT_IN_OBJECT_SIZE:
10744 case BUILT_IN_UNREACHABLE:
10745 /* Simple register moves or loads from stack. */
10746 case BUILT_IN_ASSUME_ALIGNED:
10747 case BUILT_IN_RETURN_ADDRESS:
10748 case BUILT_IN_EXTRACT_RETURN_ADDR:
10749 case BUILT_IN_FROB_RETURN_ADDR:
10750 case BUILT_IN_RETURN:
10751 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10752 case BUILT_IN_FRAME_ADDRESS:
10753 case BUILT_IN_VA_END:
10754 case BUILT_IN_STACK_SAVE:
10755 case BUILT_IN_STACK_RESTORE:
10756 /* Exception state returns or moves registers around. */
10757 case BUILT_IN_EH_FILTER:
10758 case BUILT_IN_EH_POINTER:
10759 case BUILT_IN_EH_COPY_VALUES:
10760 return true;
10761
10762 default:
10763 return false;
10764 }
10765
10766 return false;
10767 }
10768
10769 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10770 most probably expanded inline into reasonably simple code. This is a
10771 superset of is_simple_builtin. */
10772 bool
10773 is_inexpensive_builtin (tree decl)
10774 {
10775 if (!decl)
10776 return false;
10777 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10778 return true;
10779 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10780 switch (DECL_FUNCTION_CODE (decl))
10781 {
10782 case BUILT_IN_ABS:
10783 CASE_BUILT_IN_ALLOCA:
10784 case BUILT_IN_BSWAP16:
10785 case BUILT_IN_BSWAP32:
10786 case BUILT_IN_BSWAP64:
10787 case BUILT_IN_CLZ:
10788 case BUILT_IN_CLZIMAX:
10789 case BUILT_IN_CLZL:
10790 case BUILT_IN_CLZLL:
10791 case BUILT_IN_CTZ:
10792 case BUILT_IN_CTZIMAX:
10793 case BUILT_IN_CTZL:
10794 case BUILT_IN_CTZLL:
10795 case BUILT_IN_FFS:
10796 case BUILT_IN_FFSIMAX:
10797 case BUILT_IN_FFSL:
10798 case BUILT_IN_FFSLL:
10799 case BUILT_IN_IMAXABS:
10800 case BUILT_IN_FINITE:
10801 case BUILT_IN_FINITEF:
10802 case BUILT_IN_FINITEL:
10803 case BUILT_IN_FINITED32:
10804 case BUILT_IN_FINITED64:
10805 case BUILT_IN_FINITED128:
10806 case BUILT_IN_FPCLASSIFY:
10807 case BUILT_IN_ISFINITE:
10808 case BUILT_IN_ISINF_SIGN:
10809 case BUILT_IN_ISINF:
10810 case BUILT_IN_ISINFF:
10811 case BUILT_IN_ISINFL:
10812 case BUILT_IN_ISINFD32:
10813 case BUILT_IN_ISINFD64:
10814 case BUILT_IN_ISINFD128:
10815 case BUILT_IN_ISNAN:
10816 case BUILT_IN_ISNANF:
10817 case BUILT_IN_ISNANL:
10818 case BUILT_IN_ISNAND32:
10819 case BUILT_IN_ISNAND64:
10820 case BUILT_IN_ISNAND128:
10821 case BUILT_IN_ISNORMAL:
10822 case BUILT_IN_ISGREATER:
10823 case BUILT_IN_ISGREATEREQUAL:
10824 case BUILT_IN_ISLESS:
10825 case BUILT_IN_ISLESSEQUAL:
10826 case BUILT_IN_ISLESSGREATER:
10827 case BUILT_IN_ISUNORDERED:
10828 case BUILT_IN_VA_ARG_PACK:
10829 case BUILT_IN_VA_ARG_PACK_LEN:
10830 case BUILT_IN_VA_COPY:
10831 case BUILT_IN_TRAP:
10832 case BUILT_IN_SAVEREGS:
10833 case BUILT_IN_POPCOUNTL:
10834 case BUILT_IN_POPCOUNTLL:
10835 case BUILT_IN_POPCOUNTIMAX:
10836 case BUILT_IN_POPCOUNT:
10837 case BUILT_IN_PARITYL:
10838 case BUILT_IN_PARITYLL:
10839 case BUILT_IN_PARITYIMAX:
10840 case BUILT_IN_PARITY:
10841 case BUILT_IN_LABS:
10842 case BUILT_IN_LLABS:
10843 case BUILT_IN_PREFETCH:
10844 case BUILT_IN_ACC_ON_DEVICE:
10845 return true;
10846
10847 default:
10848 return is_simple_builtin (decl);
10849 }
10850
10851 return false;
10852 }
10853
10854 /* Return true if T is a constant and the value cast to a target char
10855 can be represented by a host char.
10856 Store the casted char constant in *P if so. */
10857
10858 bool
10859 target_char_cst_p (tree t, char *p)
10860 {
10861 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10862 return false;
10863
10864 *p = (char)tree_to_uhwi (t);
10865 return true;
10866 }
10867
10868 /* Return the maximum object size. */
10869
10870 tree
10871 max_object_size (void)
10872 {
10873 /* To do: Make this a configurable parameter. */
10874 return TYPE_MAX_VALUE (ptrdiff_type_node);
10875 }