5f00208ff090cf8afa3eaa30e1ffcdc063def20c
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
80
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
87 {
88 #include "builtins.def"
89 };
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
173
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
177
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
185
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
196
197 /* Return true if NAME starts with __builtin_ or __sync_. */
198
199 static bool
200 is_builtin_name (const char *name)
201 {
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
208 return false;
209 }
210
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
214
215 bool
216 called_as_built_in (tree node)
217 {
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
223 }
224
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
229
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
235 whereas foo() itself starts on an even address.
236
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
239
240 static bool
241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
243 {
244 poly_int64 bitsize, bitpos;
245 tree offset;
246 machine_mode mode;
247 int unsignedp, reversep, volatilep;
248 unsigned int align = BITS_PER_UNIT;
249 bool known_alignment = false;
250
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 &unsignedp, &reversep, &volatilep);
255
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
258 if (TREE_CODE (exp) == FUNCTION_DECL)
259 {
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
266 }
267 else if (TREE_CODE (exp) == LABEL_DECL)
268 ;
269 else if (TREE_CODE (exp) == CONST_DECL)
270 {
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
273 align = TYPE_ALIGN (TREE_TYPE (exp));
274 if (CONSTANT_CLASS_P (exp))
275 align = targetm.constant_alignment (exp, align);
276
277 known_alignment = true;
278 }
279 else if (DECL_P (exp))
280 {
281 align = DECL_ALIGN (exp);
282 known_alignment = true;
283 }
284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
287 {
288 tree addr = TREE_OPERAND (exp, 0);
289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
292
293 /* If the address is explicitely aligned, handle that. */
294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
296 {
297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
299 align = least_bit_hwi (ptr_bitmask);
300 addr = TREE_OPERAND (addr, 0);
301 }
302
303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305 align = MAX (ptr_align, align);
306
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
309
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
312 if (TREE_CODE (exp) == TARGET_MEM_REF)
313 {
314 if (TMR_INDEX (exp))
315 {
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
320 }
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
324 }
325
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
329 alignment knowledge and if using that alignment would
330 improve the situation. */
331 unsigned int talign;
332 if (!addr_p && !known_alignment
333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
336 else
337 {
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
343 }
344 }
345 else if (TREE_CODE (exp) == STRING_CST)
346 {
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 if (CONSTANT_CLASS_P (exp))
351 align = targetm.constant_alignment (exp, align);
352
353 known_alignment = true;
354 }
355
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
358 if (offset)
359 {
360 unsigned int trailing_zeros = tree_ctz (offset);
361 if (trailing_zeros < HOST_BITS_PER_INT)
362 {
363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
366 }
367 }
368
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
373 {
374 align = alt_align;
375 known_alignment = false;
376 }
377
378 *alignp = align;
379 *bitposp = bitpos.coeffs[0] & (align - 1);
380 return known_alignment;
381 }
382
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
391 {
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
393 }
394
395 /* Return the alignment in bits of EXP, an object. */
396
397 unsigned int
398 get_object_alignment (tree exp)
399 {
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
402
403 get_object_alignment_1 (exp, &align, &bitpos);
404
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
407
408 if (bitpos != 0)
409 align = least_bit_hwi (bitpos);
410 return align;
411 }
412
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
417
418 If EXP is not a pointer, false is returned too. */
419
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
423 {
424 STRIP_NOPS (exp);
425
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 {
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
438 {
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
441 {
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
445 }
446 }
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
450 }
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
453 {
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 {
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 /* We cannot really tell whether this result is an approximation. */
466 return false;
467 }
468 else
469 {
470 *bitposp = 0;
471 *alignp = BITS_PER_UNIT;
472 return false;
473 }
474 }
475 else if (TREE_CODE (exp) == INTEGER_CST)
476 {
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
481 }
482
483 *bitposp = 0;
484 *alignp = BITS_PER_UNIT;
485 return false;
486 }
487
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
494
495 unsigned int
496 get_pointer_alignment (tree exp)
497 {
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
500
501 get_pointer_alignment_1 (exp, &align, &bitpos);
502
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
505
506 if (bitpos != 0)
507 align = least_bit_hwi (bitpos);
508
509 return align;
510 }
511
512 /* Return the number of leading non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515
516 unsigned
517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518 {
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520
521 unsigned n;
522
523 if (eltsize == 1)
524 {
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
527 {
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
531 }
532 }
533 else
534 {
535 for (n = 0; n < maxelts; n++)
536 {
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
540 }
541 }
542 return n;
543 }
544
545 /* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
548
549 void
550 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
551 {
552 if (TREE_NO_WARNING (arg))
553 return;
554
555 loc = expansion_point_location_if_in_system_header (loc);
556
557 if (warning_at (loc, OPT_Wstringop_overflow_,
558 "%qs argument missing terminating nul", fn))
559 {
560 inform (DECL_SOURCE_LOCATION (decl),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg) = 1;
563 }
564 }
565
566 /* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
568 element. Otherwise return null. */
569
570 tree
571 unterminated_array (tree exp)
572 {
573 tree nonstr = NULL;
574 c_strlen (exp, 1, &nonstr);
575 return nonstr;
576 }
577
578 /* Compute the length of a null-terminated character string or wide
579 character string handling character sizes of 1, 2, and 4 bytes.
580 TREE_STRING_LENGTH is not the right way because it evaluates to
581 the size of the character array in bytes (as opposed to characters)
582 and because it can contain a zero byte in the middle.
583
584 ONLY_VALUE should be nonzero if the result is not going to be emitted
585 into the instruction stream and zero if it is going to be expanded.
586 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
587 is returned, otherwise NULL, since
588 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
589 evaluate the side-effects.
590
591 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
592 accesses. Note that this implies the result is not going to be emitted
593 into the instruction stream.
594
595 If a not zero-terminated string value is encountered and NONSTR is
596 non-zero, the declaration of the string value is assigned to *NONSTR.
597 *NONSTR is accumulating, thus not cleared on success, therefore it has
598 to be initialized to NULL_TREE by the caller.
599
600 ELTSIZE is 1 for normal single byte character strings, and 2 or
601 4 for wide characer strings. ELTSIZE is by default 1.
602
603 The value returned is of type `ssizetype'. */
604
605 tree
606 c_strlen (tree src, int only_value, tree *nonstr, unsigned eltsize)
607 {
608 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
609 STRIP_NOPS (src);
610 if (TREE_CODE (src) == COND_EXPR
611 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
612 {
613 tree len1, len2;
614
615 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, nonstr, eltsize);
616 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, nonstr, eltsize);
617 if (tree_int_cst_equal (len1, len2))
618 return len1;
619 }
620
621 if (TREE_CODE (src) == COMPOUND_EXPR
622 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
623 return c_strlen (TREE_OPERAND (src, 1), only_value, nonstr, eltsize);
624
625 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
626
627 /* Offset from the beginning of the string in bytes. */
628 tree byteoff;
629 tree memsize;
630 tree decl;
631 src = string_constant (src, &byteoff, &memsize, &decl);
632 if (src == 0)
633 return NULL_TREE;
634
635 /* Determine the size of the string element. */
636 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
637 return NULL_TREE;
638
639 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
640 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
641 in case the latter is less than the size of the array, such as when
642 SRC refers to a short string literal used to initialize a large array.
643 In that case, the elements of the array after the terminating NUL are
644 all NUL. */
645 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
646 strelts = strelts / eltsize;
647
648 if (!tree_fits_uhwi_p (memsize))
649 return NULL_TREE;
650
651 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
652
653 /* PTR can point to the byte representation of any string type, including
654 char* and wchar_t*. */
655 const char *ptr = TREE_STRING_POINTER (src);
656
657 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
658 {
659 /* The code below works only for single byte character types. */
660 if (eltsize != 1)
661 return NULL_TREE;
662
663 /* If the string has an internal NUL character followed by any
664 non-NUL characters (e.g., "foo\0bar"), we can't compute
665 the offset to the following NUL if we don't know where to
666 start searching for it. */
667 unsigned len = string_length (ptr, eltsize, strelts);
668
669 /* Return when an embedded null character is found or none at all. */
670 if (len + 1 < strelts)
671 return NULL_TREE;
672 else if (len >= maxelts)
673 {
674 if (nonstr && decl)
675 *nonstr = decl;
676 return NULL_TREE;
677 }
678
679 /* For empty strings the result should be zero. */
680 if (len == 0)
681 return ssize_int (0);
682
683 /* We don't know the starting offset, but we do know that the string
684 has no internal zero bytes. If the offset falls within the bounds
685 of the string subtract the offset from the length of the string,
686 and return that. Otherwise the length is zero. Take care to
687 use SAVE_EXPR in case the OFFSET has side-effects. */
688 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
689 offsave = fold_convert (ssizetype, offsave);
690 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
691 build_int_cst (ssizetype, len));
692 tree lenexp = size_diffop_loc (loc, ssize_int (len), offsave);
693 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
694 build_zero_cst (ssizetype));
695 }
696
697 /* Offset from the beginning of the string in elements. */
698 HOST_WIDE_INT eltoff;
699
700 /* We have a known offset into the string. Start searching there for
701 a null character if we can represent it as a single HOST_WIDE_INT. */
702 if (byteoff == 0)
703 eltoff = 0;
704 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
705 eltoff = -1;
706 else
707 eltoff = tree_to_uhwi (byteoff) / eltsize;
708
709 /* If the offset is known to be out of bounds, warn, and call strlen at
710 runtime. */
711 if (eltoff < 0 || eltoff >= maxelts)
712 {
713 /* Suppress multiple warnings for propagated constant strings. */
714 if (only_value != 2
715 && !TREE_NO_WARNING (src))
716 {
717 warning_at (loc, OPT_Warray_bounds,
718 "offset %qwi outside bounds of constant string",
719 eltoff);
720 TREE_NO_WARNING (src) = 1;
721 }
722 return NULL_TREE;
723 }
724
725 /* If eltoff is larger than strelts but less than maxelts the
726 string length is zero, since the excess memory will be zero. */
727 if (eltoff > strelts)
728 return ssize_int (0);
729
730 /* Use strlen to search for the first zero byte. Since any strings
731 constructed with build_string will have nulls appended, we win even
732 if we get handed something like (char[4])"abcd".
733
734 Since ELTOFF is our starting index into the string, no further
735 calculation is needed. */
736 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
737 strelts - eltoff);
738
739 /* Don't know what to return if there was no zero termination.
740 Ideally this would turn into a gcc_checking_assert over time. */
741 if (len >= maxelts - eltoff)
742 {
743 if (nonstr && decl)
744 *nonstr = decl;
745 return NULL_TREE;
746 }
747
748 return ssize_int (len);
749 }
750
751 /* Return a constant integer corresponding to target reading
752 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
753
754 static rtx
755 c_readstr (const char *str, scalar_int_mode mode)
756 {
757 HOST_WIDE_INT ch;
758 unsigned int i, j;
759 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
760
761 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
762 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
763 / HOST_BITS_PER_WIDE_INT;
764
765 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
766 for (i = 0; i < len; i++)
767 tmp[i] = 0;
768
769 ch = 1;
770 for (i = 0; i < GET_MODE_SIZE (mode); i++)
771 {
772 j = i;
773 if (WORDS_BIG_ENDIAN)
774 j = GET_MODE_SIZE (mode) - i - 1;
775 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
776 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
777 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
778 j *= BITS_PER_UNIT;
779
780 if (ch)
781 ch = (unsigned char) str[i];
782 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
783 }
784
785 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
786 return immed_wide_int_const (c, mode);
787 }
788
789 /* Cast a target constant CST to target CHAR and if that value fits into
790 host char type, return zero and put that value into variable pointed to by
791 P. */
792
793 static int
794 target_char_cast (tree cst, char *p)
795 {
796 unsigned HOST_WIDE_INT val, hostval;
797
798 if (TREE_CODE (cst) != INTEGER_CST
799 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
800 return 1;
801
802 /* Do not care if it fits or not right here. */
803 val = TREE_INT_CST_LOW (cst);
804
805 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
806 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
807
808 hostval = val;
809 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
810 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
811
812 if (val != hostval)
813 return 1;
814
815 *p = hostval;
816 return 0;
817 }
818
819 /* Similar to save_expr, but assumes that arbitrary code is not executed
820 in between the multiple evaluations. In particular, we assume that a
821 non-addressable local variable will not be modified. */
822
823 static tree
824 builtin_save_expr (tree exp)
825 {
826 if (TREE_CODE (exp) == SSA_NAME
827 || (TREE_ADDRESSABLE (exp) == 0
828 && (TREE_CODE (exp) == PARM_DECL
829 || (VAR_P (exp) && !TREE_STATIC (exp)))))
830 return exp;
831
832 return save_expr (exp);
833 }
834
835 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
836 times to get the address of either a higher stack frame, or a return
837 address located within it (depending on FNDECL_CODE). */
838
839 static rtx
840 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
841 {
842 int i;
843 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
844 if (tem == NULL_RTX)
845 {
846 /* For a zero count with __builtin_return_address, we don't care what
847 frame address we return, because target-specific definitions will
848 override us. Therefore frame pointer elimination is OK, and using
849 the soft frame pointer is OK.
850
851 For a nonzero count, or a zero count with __builtin_frame_address,
852 we require a stable offset from the current frame pointer to the
853 previous one, so we must use the hard frame pointer, and
854 we must disable frame pointer elimination. */
855 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
856 tem = frame_pointer_rtx;
857 else
858 {
859 tem = hard_frame_pointer_rtx;
860
861 /* Tell reload not to eliminate the frame pointer. */
862 crtl->accesses_prior_frames = 1;
863 }
864 }
865
866 if (count > 0)
867 SETUP_FRAME_ADDRESSES ();
868
869 /* On the SPARC, the return address is not in the frame, it is in a
870 register. There is no way to access it off of the current frame
871 pointer, but it can be accessed off the previous frame pointer by
872 reading the value from the register window save area. */
873 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
874 count--;
875
876 /* Scan back COUNT frames to the specified frame. */
877 for (i = 0; i < count; i++)
878 {
879 /* Assume the dynamic chain pointer is in the word that the
880 frame address points to, unless otherwise specified. */
881 tem = DYNAMIC_CHAIN_ADDRESS (tem);
882 tem = memory_address (Pmode, tem);
883 tem = gen_frame_mem (Pmode, tem);
884 tem = copy_to_reg (tem);
885 }
886
887 /* For __builtin_frame_address, return what we've got. But, on
888 the SPARC for example, we may have to add a bias. */
889 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
890 return FRAME_ADDR_RTX (tem);
891
892 /* For __builtin_return_address, get the return address from that frame. */
893 #ifdef RETURN_ADDR_RTX
894 tem = RETURN_ADDR_RTX (count, tem);
895 #else
896 tem = memory_address (Pmode,
897 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
898 tem = gen_frame_mem (Pmode, tem);
899 #endif
900 return tem;
901 }
902
903 /* Alias set used for setjmp buffer. */
904 static alias_set_type setjmp_alias_set = -1;
905
906 /* Construct the leading half of a __builtin_setjmp call. Control will
907 return to RECEIVER_LABEL. This is also called directly by the SJLJ
908 exception handling code. */
909
910 void
911 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
912 {
913 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
914 rtx stack_save;
915 rtx mem;
916
917 if (setjmp_alias_set == -1)
918 setjmp_alias_set = new_alias_set ();
919
920 buf_addr = convert_memory_address (Pmode, buf_addr);
921
922 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
923
924 /* We store the frame pointer and the address of receiver_label in
925 the buffer and use the rest of it for the stack save area, which
926 is machine-dependent. */
927
928 mem = gen_rtx_MEM (Pmode, buf_addr);
929 set_mem_alias_set (mem, setjmp_alias_set);
930 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
931
932 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
933 GET_MODE_SIZE (Pmode))),
934 set_mem_alias_set (mem, setjmp_alias_set);
935
936 emit_move_insn (validize_mem (mem),
937 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
938
939 stack_save = gen_rtx_MEM (sa_mode,
940 plus_constant (Pmode, buf_addr,
941 2 * GET_MODE_SIZE (Pmode)));
942 set_mem_alias_set (stack_save, setjmp_alias_set);
943 emit_stack_save (SAVE_NONLOCAL, &stack_save);
944
945 /* If there is further processing to do, do it. */
946 if (targetm.have_builtin_setjmp_setup ())
947 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
948
949 /* We have a nonlocal label. */
950 cfun->has_nonlocal_label = 1;
951 }
952
953 /* Construct the trailing part of a __builtin_setjmp call. This is
954 also called directly by the SJLJ exception handling code.
955 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
956
957 void
958 expand_builtin_setjmp_receiver (rtx receiver_label)
959 {
960 rtx chain;
961
962 /* Mark the FP as used when we get here, so we have to make sure it's
963 marked as used by this function. */
964 emit_use (hard_frame_pointer_rtx);
965
966 /* Mark the static chain as clobbered here so life information
967 doesn't get messed up for it. */
968 chain = rtx_for_static_chain (current_function_decl, true);
969 if (chain && REG_P (chain))
970 emit_clobber (chain);
971
972 /* Now put in the code to restore the frame pointer, and argument
973 pointer, if needed. */
974 if (! targetm.have_nonlocal_goto ())
975 {
976 /* First adjust our frame pointer to its actual value. It was
977 previously set to the start of the virtual area corresponding to
978 the stacked variables when we branched here and now needs to be
979 adjusted to the actual hardware fp value.
980
981 Assignments to virtual registers are converted by
982 instantiate_virtual_regs into the corresponding assignment
983 to the underlying register (fp in this case) that makes
984 the original assignment true.
985 So the following insn will actually be decrementing fp by
986 TARGET_STARTING_FRAME_OFFSET. */
987 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
988
989 /* Restoring the frame pointer also modifies the hard frame pointer.
990 Mark it used (so that the previous assignment remains live once
991 the frame pointer is eliminated) and clobbered (to represent the
992 implicit update from the assignment). */
993 emit_use (hard_frame_pointer_rtx);
994 emit_clobber (hard_frame_pointer_rtx);
995 }
996
997 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
998 {
999 /* If the argument pointer can be eliminated in favor of the
1000 frame pointer, we don't need to restore it. We assume here
1001 that if such an elimination is present, it can always be used.
1002 This is the case on all known machines; if we don't make this
1003 assumption, we do unnecessary saving on many machines. */
1004 size_t i;
1005 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1006
1007 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1008 if (elim_regs[i].from == ARG_POINTER_REGNUM
1009 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1010 break;
1011
1012 if (i == ARRAY_SIZE (elim_regs))
1013 {
1014 /* Now restore our arg pointer from the address at which it
1015 was saved in our stack frame. */
1016 emit_move_insn (crtl->args.internal_arg_pointer,
1017 copy_to_reg (get_arg_pointer_save_area ()));
1018 }
1019 }
1020
1021 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1022 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1023 else if (targetm.have_nonlocal_goto_receiver ())
1024 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1025 else
1026 { /* Nothing */ }
1027
1028 /* We must not allow the code we just generated to be reordered by
1029 scheduling. Specifically, the update of the frame pointer must
1030 happen immediately, not later. */
1031 emit_insn (gen_blockage ());
1032 }
1033
1034 /* __builtin_longjmp is passed a pointer to an array of five words (not
1035 all will be used on all machines). It operates similarly to the C
1036 library function of the same name, but is more efficient. Much of
1037 the code below is copied from the handling of non-local gotos. */
1038
1039 static void
1040 expand_builtin_longjmp (rtx buf_addr, rtx value)
1041 {
1042 rtx fp, lab, stack;
1043 rtx_insn *insn, *last;
1044 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1045
1046 /* DRAP is needed for stack realign if longjmp is expanded to current
1047 function */
1048 if (SUPPORTS_STACK_ALIGNMENT)
1049 crtl->need_drap = true;
1050
1051 if (setjmp_alias_set == -1)
1052 setjmp_alias_set = new_alias_set ();
1053
1054 buf_addr = convert_memory_address (Pmode, buf_addr);
1055
1056 buf_addr = force_reg (Pmode, buf_addr);
1057
1058 /* We require that the user must pass a second argument of 1, because
1059 that is what builtin_setjmp will return. */
1060 gcc_assert (value == const1_rtx);
1061
1062 last = get_last_insn ();
1063 if (targetm.have_builtin_longjmp ())
1064 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1065 else
1066 {
1067 fp = gen_rtx_MEM (Pmode, buf_addr);
1068 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1069 GET_MODE_SIZE (Pmode)));
1070
1071 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1072 2 * GET_MODE_SIZE (Pmode)));
1073 set_mem_alias_set (fp, setjmp_alias_set);
1074 set_mem_alias_set (lab, setjmp_alias_set);
1075 set_mem_alias_set (stack, setjmp_alias_set);
1076
1077 /* Pick up FP, label, and SP from the block and jump. This code is
1078 from expand_goto in stmt.c; see there for detailed comments. */
1079 if (targetm.have_nonlocal_goto ())
1080 /* We have to pass a value to the nonlocal_goto pattern that will
1081 get copied into the static_chain pointer, but it does not matter
1082 what that value is, because builtin_setjmp does not use it. */
1083 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1084 else
1085 {
1086 lab = copy_to_reg (lab);
1087
1088 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1089 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1090
1091 emit_move_insn (hard_frame_pointer_rtx, fp);
1092 emit_stack_restore (SAVE_NONLOCAL, stack);
1093
1094 emit_use (hard_frame_pointer_rtx);
1095 emit_use (stack_pointer_rtx);
1096 emit_indirect_jump (lab);
1097 }
1098 }
1099
1100 /* Search backwards and mark the jump insn as a non-local goto.
1101 Note that this precludes the use of __builtin_longjmp to a
1102 __builtin_setjmp target in the same function. However, we've
1103 already cautioned the user that these functions are for
1104 internal exception handling use only. */
1105 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1106 {
1107 gcc_assert (insn != last);
1108
1109 if (JUMP_P (insn))
1110 {
1111 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1112 break;
1113 }
1114 else if (CALL_P (insn))
1115 break;
1116 }
1117 }
1118
1119 static inline bool
1120 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1121 {
1122 return (iter->i < iter->n);
1123 }
1124
1125 /* This function validates the types of a function call argument list
1126 against a specified list of tree_codes. If the last specifier is a 0,
1127 that represents an ellipsis, otherwise the last specifier must be a
1128 VOID_TYPE. */
1129
1130 static bool
1131 validate_arglist (const_tree callexpr, ...)
1132 {
1133 enum tree_code code;
1134 bool res = 0;
1135 va_list ap;
1136 const_call_expr_arg_iterator iter;
1137 const_tree arg;
1138
1139 va_start (ap, callexpr);
1140 init_const_call_expr_arg_iterator (callexpr, &iter);
1141
1142 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1143 tree fn = CALL_EXPR_FN (callexpr);
1144 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1145
1146 for (unsigned argno = 1; ; ++argno)
1147 {
1148 code = (enum tree_code) va_arg (ap, int);
1149
1150 switch (code)
1151 {
1152 case 0:
1153 /* This signifies an ellipses, any further arguments are all ok. */
1154 res = true;
1155 goto end;
1156 case VOID_TYPE:
1157 /* This signifies an endlink, if no arguments remain, return
1158 true, otherwise return false. */
1159 res = !more_const_call_expr_args_p (&iter);
1160 goto end;
1161 case POINTER_TYPE:
1162 /* The actual argument must be nonnull when either the whole
1163 called function has been declared nonnull, or when the formal
1164 argument corresponding to the actual argument has been. */
1165 if (argmap
1166 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1167 {
1168 arg = next_const_call_expr_arg (&iter);
1169 if (!validate_arg (arg, code) || integer_zerop (arg))
1170 goto end;
1171 break;
1172 }
1173 /* FALLTHRU */
1174 default:
1175 /* If no parameters remain or the parameter's code does not
1176 match the specified code, return false. Otherwise continue
1177 checking any remaining arguments. */
1178 arg = next_const_call_expr_arg (&iter);
1179 if (!validate_arg (arg, code))
1180 goto end;
1181 break;
1182 }
1183 }
1184
1185 /* We need gotos here since we can only have one VA_CLOSE in a
1186 function. */
1187 end: ;
1188 va_end (ap);
1189
1190 BITMAP_FREE (argmap);
1191
1192 return res;
1193 }
1194
1195 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1196 and the address of the save area. */
1197
1198 static rtx
1199 expand_builtin_nonlocal_goto (tree exp)
1200 {
1201 tree t_label, t_save_area;
1202 rtx r_label, r_save_area, r_fp, r_sp;
1203 rtx_insn *insn;
1204
1205 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1206 return NULL_RTX;
1207
1208 t_label = CALL_EXPR_ARG (exp, 0);
1209 t_save_area = CALL_EXPR_ARG (exp, 1);
1210
1211 r_label = expand_normal (t_label);
1212 r_label = convert_memory_address (Pmode, r_label);
1213 r_save_area = expand_normal (t_save_area);
1214 r_save_area = convert_memory_address (Pmode, r_save_area);
1215 /* Copy the address of the save location to a register just in case it was
1216 based on the frame pointer. */
1217 r_save_area = copy_to_reg (r_save_area);
1218 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1219 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1220 plus_constant (Pmode, r_save_area,
1221 GET_MODE_SIZE (Pmode)));
1222
1223 crtl->has_nonlocal_goto = 1;
1224
1225 /* ??? We no longer need to pass the static chain value, afaik. */
1226 if (targetm.have_nonlocal_goto ())
1227 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1228 else
1229 {
1230 r_label = copy_to_reg (r_label);
1231
1232 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1233 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1234
1235 /* Restore frame pointer for containing function. */
1236 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1237 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1238
1239 /* USE of hard_frame_pointer_rtx added for consistency;
1240 not clear if really needed. */
1241 emit_use (hard_frame_pointer_rtx);
1242 emit_use (stack_pointer_rtx);
1243
1244 /* If the architecture is using a GP register, we must
1245 conservatively assume that the target function makes use of it.
1246 The prologue of functions with nonlocal gotos must therefore
1247 initialize the GP register to the appropriate value, and we
1248 must then make sure that this value is live at the point
1249 of the jump. (Note that this doesn't necessarily apply
1250 to targets with a nonlocal_goto pattern; they are free
1251 to implement it in their own way. Note also that this is
1252 a no-op if the GP register is a global invariant.) */
1253 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1254 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1255 emit_use (pic_offset_table_rtx);
1256
1257 emit_indirect_jump (r_label);
1258 }
1259
1260 /* Search backwards to the jump insn and mark it as a
1261 non-local goto. */
1262 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1263 {
1264 if (JUMP_P (insn))
1265 {
1266 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1267 break;
1268 }
1269 else if (CALL_P (insn))
1270 break;
1271 }
1272
1273 return const0_rtx;
1274 }
1275
1276 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1277 (not all will be used on all machines) that was passed to __builtin_setjmp.
1278 It updates the stack pointer in that block to the current value. This is
1279 also called directly by the SJLJ exception handling code. */
1280
1281 void
1282 expand_builtin_update_setjmp_buf (rtx buf_addr)
1283 {
1284 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1285 buf_addr = convert_memory_address (Pmode, buf_addr);
1286 rtx stack_save
1287 = gen_rtx_MEM (sa_mode,
1288 memory_address
1289 (sa_mode,
1290 plus_constant (Pmode, buf_addr,
1291 2 * GET_MODE_SIZE (Pmode))));
1292
1293 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1294 }
1295
1296 /* Expand a call to __builtin_prefetch. For a target that does not support
1297 data prefetch, evaluate the memory address argument in case it has side
1298 effects. */
1299
1300 static void
1301 expand_builtin_prefetch (tree exp)
1302 {
1303 tree arg0, arg1, arg2;
1304 int nargs;
1305 rtx op0, op1, op2;
1306
1307 if (!validate_arglist (exp, POINTER_TYPE, 0))
1308 return;
1309
1310 arg0 = CALL_EXPR_ARG (exp, 0);
1311
1312 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1313 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1314 locality). */
1315 nargs = call_expr_nargs (exp);
1316 if (nargs > 1)
1317 arg1 = CALL_EXPR_ARG (exp, 1);
1318 else
1319 arg1 = integer_zero_node;
1320 if (nargs > 2)
1321 arg2 = CALL_EXPR_ARG (exp, 2);
1322 else
1323 arg2 = integer_three_node;
1324
1325 /* Argument 0 is an address. */
1326 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1327
1328 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1329 if (TREE_CODE (arg1) != INTEGER_CST)
1330 {
1331 error ("second argument to %<__builtin_prefetch%> must be a constant");
1332 arg1 = integer_zero_node;
1333 }
1334 op1 = expand_normal (arg1);
1335 /* Argument 1 must be either zero or one. */
1336 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1337 {
1338 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1339 " using zero");
1340 op1 = const0_rtx;
1341 }
1342
1343 /* Argument 2 (locality) must be a compile-time constant int. */
1344 if (TREE_CODE (arg2) != INTEGER_CST)
1345 {
1346 error ("third argument to %<__builtin_prefetch%> must be a constant");
1347 arg2 = integer_zero_node;
1348 }
1349 op2 = expand_normal (arg2);
1350 /* Argument 2 must be 0, 1, 2, or 3. */
1351 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1352 {
1353 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1354 op2 = const0_rtx;
1355 }
1356
1357 if (targetm.have_prefetch ())
1358 {
1359 struct expand_operand ops[3];
1360
1361 create_address_operand (&ops[0], op0);
1362 create_integer_operand (&ops[1], INTVAL (op1));
1363 create_integer_operand (&ops[2], INTVAL (op2));
1364 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1365 return;
1366 }
1367
1368 /* Don't do anything with direct references to volatile memory, but
1369 generate code to handle other side effects. */
1370 if (!MEM_P (op0) && side_effects_p (op0))
1371 emit_insn (op0);
1372 }
1373
1374 /* Get a MEM rtx for expression EXP which is the address of an operand
1375 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1376 the maximum length of the block of memory that might be accessed or
1377 NULL if unknown. */
1378
1379 static rtx
1380 get_memory_rtx (tree exp, tree len)
1381 {
1382 tree orig_exp = exp;
1383 rtx addr, mem;
1384
1385 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1386 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1387 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1388 exp = TREE_OPERAND (exp, 0);
1389
1390 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1391 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1392
1393 /* Get an expression we can use to find the attributes to assign to MEM.
1394 First remove any nops. */
1395 while (CONVERT_EXPR_P (exp)
1396 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1397 exp = TREE_OPERAND (exp, 0);
1398
1399 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1400 (as builtin stringops may alias with anything). */
1401 exp = fold_build2 (MEM_REF,
1402 build_array_type (char_type_node,
1403 build_range_type (sizetype,
1404 size_one_node, len)),
1405 exp, build_int_cst (ptr_type_node, 0));
1406
1407 /* If the MEM_REF has no acceptable address, try to get the base object
1408 from the original address we got, and build an all-aliasing
1409 unknown-sized access to that one. */
1410 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1411 set_mem_attributes (mem, exp, 0);
1412 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1413 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1414 0))))
1415 {
1416 exp = build_fold_addr_expr (exp);
1417 exp = fold_build2 (MEM_REF,
1418 build_array_type (char_type_node,
1419 build_range_type (sizetype,
1420 size_zero_node,
1421 NULL)),
1422 exp, build_int_cst (ptr_type_node, 0));
1423 set_mem_attributes (mem, exp, 0);
1424 }
1425 set_mem_alias_set (mem, 0);
1426 return mem;
1427 }
1428 \f
1429 /* Built-in functions to perform an untyped call and return. */
1430
1431 #define apply_args_mode \
1432 (this_target_builtins->x_apply_args_mode)
1433 #define apply_result_mode \
1434 (this_target_builtins->x_apply_result_mode)
1435
1436 /* Return the size required for the block returned by __builtin_apply_args,
1437 and initialize apply_args_mode. */
1438
1439 static int
1440 apply_args_size (void)
1441 {
1442 static int size = -1;
1443 int align;
1444 unsigned int regno;
1445
1446 /* The values computed by this function never change. */
1447 if (size < 0)
1448 {
1449 /* The first value is the incoming arg-pointer. */
1450 size = GET_MODE_SIZE (Pmode);
1451
1452 /* The second value is the structure value address unless this is
1453 passed as an "invisible" first argument. */
1454 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1455 size += GET_MODE_SIZE (Pmode);
1456
1457 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1458 if (FUNCTION_ARG_REGNO_P (regno))
1459 {
1460 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1461
1462 gcc_assert (mode != VOIDmode);
1463
1464 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1465 if (size % align != 0)
1466 size = CEIL (size, align) * align;
1467 size += GET_MODE_SIZE (mode);
1468 apply_args_mode[regno] = mode;
1469 }
1470 else
1471 {
1472 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1473 }
1474 }
1475 return size;
1476 }
1477
1478 /* Return the size required for the block returned by __builtin_apply,
1479 and initialize apply_result_mode. */
1480
1481 static int
1482 apply_result_size (void)
1483 {
1484 static int size = -1;
1485 int align, regno;
1486
1487 /* The values computed by this function never change. */
1488 if (size < 0)
1489 {
1490 size = 0;
1491
1492 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1493 if (targetm.calls.function_value_regno_p (regno))
1494 {
1495 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1496
1497 gcc_assert (mode != VOIDmode);
1498
1499 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1500 if (size % align != 0)
1501 size = CEIL (size, align) * align;
1502 size += GET_MODE_SIZE (mode);
1503 apply_result_mode[regno] = mode;
1504 }
1505 else
1506 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1507
1508 /* Allow targets that use untyped_call and untyped_return to override
1509 the size so that machine-specific information can be stored here. */
1510 #ifdef APPLY_RESULT_SIZE
1511 size = APPLY_RESULT_SIZE;
1512 #endif
1513 }
1514 return size;
1515 }
1516
1517 /* Create a vector describing the result block RESULT. If SAVEP is true,
1518 the result block is used to save the values; otherwise it is used to
1519 restore the values. */
1520
1521 static rtx
1522 result_vector (int savep, rtx result)
1523 {
1524 int regno, size, align, nelts;
1525 fixed_size_mode mode;
1526 rtx reg, mem;
1527 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1528
1529 size = nelts = 0;
1530 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1531 if ((mode = apply_result_mode[regno]) != VOIDmode)
1532 {
1533 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1534 if (size % align != 0)
1535 size = CEIL (size, align) * align;
1536 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1537 mem = adjust_address (result, mode, size);
1538 savevec[nelts++] = (savep
1539 ? gen_rtx_SET (mem, reg)
1540 : gen_rtx_SET (reg, mem));
1541 size += GET_MODE_SIZE (mode);
1542 }
1543 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1544 }
1545
1546 /* Save the state required to perform an untyped call with the same
1547 arguments as were passed to the current function. */
1548
1549 static rtx
1550 expand_builtin_apply_args_1 (void)
1551 {
1552 rtx registers, tem;
1553 int size, align, regno;
1554 fixed_size_mode mode;
1555 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1556
1557 /* Create a block where the arg-pointer, structure value address,
1558 and argument registers can be saved. */
1559 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1560
1561 /* Walk past the arg-pointer and structure value address. */
1562 size = GET_MODE_SIZE (Pmode);
1563 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1564 size += GET_MODE_SIZE (Pmode);
1565
1566 /* Save each register used in calling a function to the block. */
1567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1568 if ((mode = apply_args_mode[regno]) != VOIDmode)
1569 {
1570 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1571 if (size % align != 0)
1572 size = CEIL (size, align) * align;
1573
1574 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1575
1576 emit_move_insn (adjust_address (registers, mode, size), tem);
1577 size += GET_MODE_SIZE (mode);
1578 }
1579
1580 /* Save the arg pointer to the block. */
1581 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1582 /* We need the pointer as the caller actually passed them to us, not
1583 as we might have pretended they were passed. Make sure it's a valid
1584 operand, as emit_move_insn isn't expected to handle a PLUS. */
1585 if (STACK_GROWS_DOWNWARD)
1586 tem
1587 = force_operand (plus_constant (Pmode, tem,
1588 crtl->args.pretend_args_size),
1589 NULL_RTX);
1590 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1591
1592 size = GET_MODE_SIZE (Pmode);
1593
1594 /* Save the structure value address unless this is passed as an
1595 "invisible" first argument. */
1596 if (struct_incoming_value)
1597 {
1598 emit_move_insn (adjust_address (registers, Pmode, size),
1599 copy_to_reg (struct_incoming_value));
1600 size += GET_MODE_SIZE (Pmode);
1601 }
1602
1603 /* Return the address of the block. */
1604 return copy_addr_to_reg (XEXP (registers, 0));
1605 }
1606
1607 /* __builtin_apply_args returns block of memory allocated on
1608 the stack into which is stored the arg pointer, structure
1609 value address, static chain, and all the registers that might
1610 possibly be used in performing a function call. The code is
1611 moved to the start of the function so the incoming values are
1612 saved. */
1613
1614 static rtx
1615 expand_builtin_apply_args (void)
1616 {
1617 /* Don't do __builtin_apply_args more than once in a function.
1618 Save the result of the first call and reuse it. */
1619 if (apply_args_value != 0)
1620 return apply_args_value;
1621 {
1622 /* When this function is called, it means that registers must be
1623 saved on entry to this function. So we migrate the
1624 call to the first insn of this function. */
1625 rtx temp;
1626
1627 start_sequence ();
1628 temp = expand_builtin_apply_args_1 ();
1629 rtx_insn *seq = get_insns ();
1630 end_sequence ();
1631
1632 apply_args_value = temp;
1633
1634 /* Put the insns after the NOTE that starts the function.
1635 If this is inside a start_sequence, make the outer-level insn
1636 chain current, so the code is placed at the start of the
1637 function. If internal_arg_pointer is a non-virtual pseudo,
1638 it needs to be placed after the function that initializes
1639 that pseudo. */
1640 push_topmost_sequence ();
1641 if (REG_P (crtl->args.internal_arg_pointer)
1642 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1643 emit_insn_before (seq, parm_birth_insn);
1644 else
1645 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1646 pop_topmost_sequence ();
1647 return temp;
1648 }
1649 }
1650
1651 /* Perform an untyped call and save the state required to perform an
1652 untyped return of whatever value was returned by the given function. */
1653
1654 static rtx
1655 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1656 {
1657 int size, align, regno;
1658 fixed_size_mode mode;
1659 rtx incoming_args, result, reg, dest, src;
1660 rtx_call_insn *call_insn;
1661 rtx old_stack_level = 0;
1662 rtx call_fusage = 0;
1663 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1664
1665 arguments = convert_memory_address (Pmode, arguments);
1666
1667 /* Create a block where the return registers can be saved. */
1668 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1669
1670 /* Fetch the arg pointer from the ARGUMENTS block. */
1671 incoming_args = gen_reg_rtx (Pmode);
1672 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1673 if (!STACK_GROWS_DOWNWARD)
1674 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1675 incoming_args, 0, OPTAB_LIB_WIDEN);
1676
1677 /* Push a new argument block and copy the arguments. Do not allow
1678 the (potential) memcpy call below to interfere with our stack
1679 manipulations. */
1680 do_pending_stack_adjust ();
1681 NO_DEFER_POP;
1682
1683 /* Save the stack with nonlocal if available. */
1684 if (targetm.have_save_stack_nonlocal ())
1685 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1686 else
1687 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1688
1689 /* Allocate a block of memory onto the stack and copy the memory
1690 arguments to the outgoing arguments address. We can pass TRUE
1691 as the 4th argument because we just saved the stack pointer
1692 and will restore it right after the call. */
1693 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1694
1695 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1696 may have already set current_function_calls_alloca to true.
1697 current_function_calls_alloca won't be set if argsize is zero,
1698 so we have to guarantee need_drap is true here. */
1699 if (SUPPORTS_STACK_ALIGNMENT)
1700 crtl->need_drap = true;
1701
1702 dest = virtual_outgoing_args_rtx;
1703 if (!STACK_GROWS_DOWNWARD)
1704 {
1705 if (CONST_INT_P (argsize))
1706 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1707 else
1708 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1709 }
1710 dest = gen_rtx_MEM (BLKmode, dest);
1711 set_mem_align (dest, PARM_BOUNDARY);
1712 src = gen_rtx_MEM (BLKmode, incoming_args);
1713 set_mem_align (src, PARM_BOUNDARY);
1714 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1715
1716 /* Refer to the argument block. */
1717 apply_args_size ();
1718 arguments = gen_rtx_MEM (BLKmode, arguments);
1719 set_mem_align (arguments, PARM_BOUNDARY);
1720
1721 /* Walk past the arg-pointer and structure value address. */
1722 size = GET_MODE_SIZE (Pmode);
1723 if (struct_value)
1724 size += GET_MODE_SIZE (Pmode);
1725
1726 /* Restore each of the registers previously saved. Make USE insns
1727 for each of these registers for use in making the call. */
1728 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1729 if ((mode = apply_args_mode[regno]) != VOIDmode)
1730 {
1731 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1732 if (size % align != 0)
1733 size = CEIL (size, align) * align;
1734 reg = gen_rtx_REG (mode, regno);
1735 emit_move_insn (reg, adjust_address (arguments, mode, size));
1736 use_reg (&call_fusage, reg);
1737 size += GET_MODE_SIZE (mode);
1738 }
1739
1740 /* Restore the structure value address unless this is passed as an
1741 "invisible" first argument. */
1742 size = GET_MODE_SIZE (Pmode);
1743 if (struct_value)
1744 {
1745 rtx value = gen_reg_rtx (Pmode);
1746 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1747 emit_move_insn (struct_value, value);
1748 if (REG_P (struct_value))
1749 use_reg (&call_fusage, struct_value);
1750 size += GET_MODE_SIZE (Pmode);
1751 }
1752
1753 /* All arguments and registers used for the call are set up by now! */
1754 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1755
1756 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1757 and we don't want to load it into a register as an optimization,
1758 because prepare_call_address already did it if it should be done. */
1759 if (GET_CODE (function) != SYMBOL_REF)
1760 function = memory_address (FUNCTION_MODE, function);
1761
1762 /* Generate the actual call instruction and save the return value. */
1763 if (targetm.have_untyped_call ())
1764 {
1765 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1766 emit_call_insn (targetm.gen_untyped_call (mem, result,
1767 result_vector (1, result)));
1768 }
1769 else if (targetm.have_call_value ())
1770 {
1771 rtx valreg = 0;
1772
1773 /* Locate the unique return register. It is not possible to
1774 express a call that sets more than one return register using
1775 call_value; use untyped_call for that. In fact, untyped_call
1776 only needs to save the return registers in the given block. */
1777 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1778 if ((mode = apply_result_mode[regno]) != VOIDmode)
1779 {
1780 gcc_assert (!valreg); /* have_untyped_call required. */
1781
1782 valreg = gen_rtx_REG (mode, regno);
1783 }
1784
1785 emit_insn (targetm.gen_call_value (valreg,
1786 gen_rtx_MEM (FUNCTION_MODE, function),
1787 const0_rtx, NULL_RTX, const0_rtx));
1788
1789 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1790 }
1791 else
1792 gcc_unreachable ();
1793
1794 /* Find the CALL insn we just emitted, and attach the register usage
1795 information. */
1796 call_insn = last_call_insn ();
1797 add_function_usage_to (call_insn, call_fusage);
1798
1799 /* Restore the stack. */
1800 if (targetm.have_save_stack_nonlocal ())
1801 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1802 else
1803 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1804 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1805
1806 OK_DEFER_POP;
1807
1808 /* Return the address of the result block. */
1809 result = copy_addr_to_reg (XEXP (result, 0));
1810 return convert_memory_address (ptr_mode, result);
1811 }
1812
1813 /* Perform an untyped return. */
1814
1815 static void
1816 expand_builtin_return (rtx result)
1817 {
1818 int size, align, regno;
1819 fixed_size_mode mode;
1820 rtx reg;
1821 rtx_insn *call_fusage = 0;
1822
1823 result = convert_memory_address (Pmode, result);
1824
1825 apply_result_size ();
1826 result = gen_rtx_MEM (BLKmode, result);
1827
1828 if (targetm.have_untyped_return ())
1829 {
1830 rtx vector = result_vector (0, result);
1831 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1832 emit_barrier ();
1833 return;
1834 }
1835
1836 /* Restore the return value and note that each value is used. */
1837 size = 0;
1838 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1839 if ((mode = apply_result_mode[regno]) != VOIDmode)
1840 {
1841 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1842 if (size % align != 0)
1843 size = CEIL (size, align) * align;
1844 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1845 emit_move_insn (reg, adjust_address (result, mode, size));
1846
1847 push_to_sequence (call_fusage);
1848 emit_use (reg);
1849 call_fusage = get_insns ();
1850 end_sequence ();
1851 size += GET_MODE_SIZE (mode);
1852 }
1853
1854 /* Put the USE insns before the return. */
1855 emit_insn (call_fusage);
1856
1857 /* Return whatever values was restored by jumping directly to the end
1858 of the function. */
1859 expand_naked_return ();
1860 }
1861
1862 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1863
1864 static enum type_class
1865 type_to_class (tree type)
1866 {
1867 switch (TREE_CODE (type))
1868 {
1869 case VOID_TYPE: return void_type_class;
1870 case INTEGER_TYPE: return integer_type_class;
1871 case ENUMERAL_TYPE: return enumeral_type_class;
1872 case BOOLEAN_TYPE: return boolean_type_class;
1873 case POINTER_TYPE: return pointer_type_class;
1874 case REFERENCE_TYPE: return reference_type_class;
1875 case OFFSET_TYPE: return offset_type_class;
1876 case REAL_TYPE: return real_type_class;
1877 case COMPLEX_TYPE: return complex_type_class;
1878 case FUNCTION_TYPE: return function_type_class;
1879 case METHOD_TYPE: return method_type_class;
1880 case RECORD_TYPE: return record_type_class;
1881 case UNION_TYPE:
1882 case QUAL_UNION_TYPE: return union_type_class;
1883 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1884 ? string_type_class : array_type_class);
1885 case LANG_TYPE: return lang_type_class;
1886 default: return no_type_class;
1887 }
1888 }
1889
1890 /* Expand a call EXP to __builtin_classify_type. */
1891
1892 static rtx
1893 expand_builtin_classify_type (tree exp)
1894 {
1895 if (call_expr_nargs (exp))
1896 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1897 return GEN_INT (no_type_class);
1898 }
1899
1900 /* This helper macro, meant to be used in mathfn_built_in below, determines
1901 which among a set of builtin math functions is appropriate for a given type
1902 mode. The `F' (float) and `L' (long double) are automatically generated
1903 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1904 types, there are additional types that are considered with 'F32', 'F64',
1905 'F128', etc. suffixes. */
1906 #define CASE_MATHFN(MATHFN) \
1907 CASE_CFN_##MATHFN: \
1908 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1909 fcodel = BUILT_IN_##MATHFN##L ; break;
1910 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1911 types. */
1912 #define CASE_MATHFN_FLOATN(MATHFN) \
1913 CASE_CFN_##MATHFN: \
1914 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1915 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1916 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1917 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1918 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1919 break;
1920 /* Similar to above, but appends _R after any F/L suffix. */
1921 #define CASE_MATHFN_REENT(MATHFN) \
1922 case CFN_BUILT_IN_##MATHFN##_R: \
1923 case CFN_BUILT_IN_##MATHFN##F_R: \
1924 case CFN_BUILT_IN_##MATHFN##L_R: \
1925 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1926 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1927
1928 /* Return a function equivalent to FN but operating on floating-point
1929 values of type TYPE, or END_BUILTINS if no such function exists.
1930 This is purely an operation on function codes; it does not guarantee
1931 that the target actually has an implementation of the function. */
1932
1933 static built_in_function
1934 mathfn_built_in_2 (tree type, combined_fn fn)
1935 {
1936 tree mtype;
1937 built_in_function fcode, fcodef, fcodel;
1938 built_in_function fcodef16 = END_BUILTINS;
1939 built_in_function fcodef32 = END_BUILTINS;
1940 built_in_function fcodef64 = END_BUILTINS;
1941 built_in_function fcodef128 = END_BUILTINS;
1942 built_in_function fcodef32x = END_BUILTINS;
1943 built_in_function fcodef64x = END_BUILTINS;
1944 built_in_function fcodef128x = END_BUILTINS;
1945
1946 switch (fn)
1947 {
1948 CASE_MATHFN (ACOS)
1949 CASE_MATHFN (ACOSH)
1950 CASE_MATHFN (ASIN)
1951 CASE_MATHFN (ASINH)
1952 CASE_MATHFN (ATAN)
1953 CASE_MATHFN (ATAN2)
1954 CASE_MATHFN (ATANH)
1955 CASE_MATHFN (CBRT)
1956 CASE_MATHFN_FLOATN (CEIL)
1957 CASE_MATHFN (CEXPI)
1958 CASE_MATHFN_FLOATN (COPYSIGN)
1959 CASE_MATHFN (COS)
1960 CASE_MATHFN (COSH)
1961 CASE_MATHFN (DREM)
1962 CASE_MATHFN (ERF)
1963 CASE_MATHFN (ERFC)
1964 CASE_MATHFN (EXP)
1965 CASE_MATHFN (EXP10)
1966 CASE_MATHFN (EXP2)
1967 CASE_MATHFN (EXPM1)
1968 CASE_MATHFN (FABS)
1969 CASE_MATHFN (FDIM)
1970 CASE_MATHFN_FLOATN (FLOOR)
1971 CASE_MATHFN_FLOATN (FMA)
1972 CASE_MATHFN_FLOATN (FMAX)
1973 CASE_MATHFN_FLOATN (FMIN)
1974 CASE_MATHFN (FMOD)
1975 CASE_MATHFN (FREXP)
1976 CASE_MATHFN (GAMMA)
1977 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1978 CASE_MATHFN (HUGE_VAL)
1979 CASE_MATHFN (HYPOT)
1980 CASE_MATHFN (ILOGB)
1981 CASE_MATHFN (ICEIL)
1982 CASE_MATHFN (IFLOOR)
1983 CASE_MATHFN (INF)
1984 CASE_MATHFN (IRINT)
1985 CASE_MATHFN (IROUND)
1986 CASE_MATHFN (ISINF)
1987 CASE_MATHFN (J0)
1988 CASE_MATHFN (J1)
1989 CASE_MATHFN (JN)
1990 CASE_MATHFN (LCEIL)
1991 CASE_MATHFN (LDEXP)
1992 CASE_MATHFN (LFLOOR)
1993 CASE_MATHFN (LGAMMA)
1994 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1995 CASE_MATHFN (LLCEIL)
1996 CASE_MATHFN (LLFLOOR)
1997 CASE_MATHFN (LLRINT)
1998 CASE_MATHFN (LLROUND)
1999 CASE_MATHFN (LOG)
2000 CASE_MATHFN (LOG10)
2001 CASE_MATHFN (LOG1P)
2002 CASE_MATHFN (LOG2)
2003 CASE_MATHFN (LOGB)
2004 CASE_MATHFN (LRINT)
2005 CASE_MATHFN (LROUND)
2006 CASE_MATHFN (MODF)
2007 CASE_MATHFN (NAN)
2008 CASE_MATHFN (NANS)
2009 CASE_MATHFN_FLOATN (NEARBYINT)
2010 CASE_MATHFN (NEXTAFTER)
2011 CASE_MATHFN (NEXTTOWARD)
2012 CASE_MATHFN (POW)
2013 CASE_MATHFN (POWI)
2014 CASE_MATHFN (POW10)
2015 CASE_MATHFN (REMAINDER)
2016 CASE_MATHFN (REMQUO)
2017 CASE_MATHFN_FLOATN (RINT)
2018 CASE_MATHFN_FLOATN (ROUND)
2019 CASE_MATHFN (SCALB)
2020 CASE_MATHFN (SCALBLN)
2021 CASE_MATHFN (SCALBN)
2022 CASE_MATHFN (SIGNBIT)
2023 CASE_MATHFN (SIGNIFICAND)
2024 CASE_MATHFN (SIN)
2025 CASE_MATHFN (SINCOS)
2026 CASE_MATHFN (SINH)
2027 CASE_MATHFN_FLOATN (SQRT)
2028 CASE_MATHFN (TAN)
2029 CASE_MATHFN (TANH)
2030 CASE_MATHFN (TGAMMA)
2031 CASE_MATHFN_FLOATN (TRUNC)
2032 CASE_MATHFN (Y0)
2033 CASE_MATHFN (Y1)
2034 CASE_MATHFN (YN)
2035
2036 default:
2037 return END_BUILTINS;
2038 }
2039
2040 mtype = TYPE_MAIN_VARIANT (type);
2041 if (mtype == double_type_node)
2042 return fcode;
2043 else if (mtype == float_type_node)
2044 return fcodef;
2045 else if (mtype == long_double_type_node)
2046 return fcodel;
2047 else if (mtype == float16_type_node)
2048 return fcodef16;
2049 else if (mtype == float32_type_node)
2050 return fcodef32;
2051 else if (mtype == float64_type_node)
2052 return fcodef64;
2053 else if (mtype == float128_type_node)
2054 return fcodef128;
2055 else if (mtype == float32x_type_node)
2056 return fcodef32x;
2057 else if (mtype == float64x_type_node)
2058 return fcodef64x;
2059 else if (mtype == float128x_type_node)
2060 return fcodef128x;
2061 else
2062 return END_BUILTINS;
2063 }
2064
2065 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2066 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2067 otherwise use the explicit declaration. If we can't do the conversion,
2068 return null. */
2069
2070 static tree
2071 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2072 {
2073 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2074 if (fcode2 == END_BUILTINS)
2075 return NULL_TREE;
2076
2077 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2078 return NULL_TREE;
2079
2080 return builtin_decl_explicit (fcode2);
2081 }
2082
2083 /* Like mathfn_built_in_1, but always use the implicit array. */
2084
2085 tree
2086 mathfn_built_in (tree type, combined_fn fn)
2087 {
2088 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2089 }
2090
2091 /* Like mathfn_built_in_1, but take a built_in_function and
2092 always use the implicit array. */
2093
2094 tree
2095 mathfn_built_in (tree type, enum built_in_function fn)
2096 {
2097 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2098 }
2099
2100 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2101 return its code, otherwise return IFN_LAST. Note that this function
2102 only tests whether the function is defined in internals.def, not whether
2103 it is actually available on the target. */
2104
2105 internal_fn
2106 associated_internal_fn (tree fndecl)
2107 {
2108 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2109 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2110 switch (DECL_FUNCTION_CODE (fndecl))
2111 {
2112 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2113 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2114 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2115 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2116 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2117 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2118 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2119 #include "internal-fn.def"
2120
2121 CASE_FLT_FN (BUILT_IN_POW10):
2122 return IFN_EXP10;
2123
2124 CASE_FLT_FN (BUILT_IN_DREM):
2125 return IFN_REMAINDER;
2126
2127 CASE_FLT_FN (BUILT_IN_SCALBN):
2128 CASE_FLT_FN (BUILT_IN_SCALBLN):
2129 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2130 return IFN_LDEXP;
2131 return IFN_LAST;
2132
2133 default:
2134 return IFN_LAST;
2135 }
2136 }
2137
2138 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2139 on the current target by a call to an internal function, return the
2140 code of that internal function, otherwise return IFN_LAST. The caller
2141 is responsible for ensuring that any side-effects of the built-in
2142 call are dealt with correctly. E.g. if CALL sets errno, the caller
2143 must decide that the errno result isn't needed or make it available
2144 in some other way. */
2145
2146 internal_fn
2147 replacement_internal_fn (gcall *call)
2148 {
2149 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2150 {
2151 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2152 if (ifn != IFN_LAST)
2153 {
2154 tree_pair types = direct_internal_fn_types (ifn, call);
2155 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2156 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2157 return ifn;
2158 }
2159 }
2160 return IFN_LAST;
2161 }
2162
2163 /* Expand a call to the builtin trinary math functions (fma).
2164 Return NULL_RTX if a normal call should be emitted rather than expanding the
2165 function in-line. EXP is the expression that is a call to the builtin
2166 function; if convenient, the result should be placed in TARGET.
2167 SUBTARGET may be used as the target for computing one of EXP's
2168 operands. */
2169
2170 static rtx
2171 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2172 {
2173 optab builtin_optab;
2174 rtx op0, op1, op2, result;
2175 rtx_insn *insns;
2176 tree fndecl = get_callee_fndecl (exp);
2177 tree arg0, arg1, arg2;
2178 machine_mode mode;
2179
2180 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2181 return NULL_RTX;
2182
2183 arg0 = CALL_EXPR_ARG (exp, 0);
2184 arg1 = CALL_EXPR_ARG (exp, 1);
2185 arg2 = CALL_EXPR_ARG (exp, 2);
2186
2187 switch (DECL_FUNCTION_CODE (fndecl))
2188 {
2189 CASE_FLT_FN (BUILT_IN_FMA):
2190 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2191 builtin_optab = fma_optab; break;
2192 default:
2193 gcc_unreachable ();
2194 }
2195
2196 /* Make a suitable register to place result in. */
2197 mode = TYPE_MODE (TREE_TYPE (exp));
2198
2199 /* Before working hard, check whether the instruction is available. */
2200 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2201 return NULL_RTX;
2202
2203 result = gen_reg_rtx (mode);
2204
2205 /* Always stabilize the argument list. */
2206 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2207 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2208 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2209
2210 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2211 op1 = expand_normal (arg1);
2212 op2 = expand_normal (arg2);
2213
2214 start_sequence ();
2215
2216 /* Compute into RESULT.
2217 Set RESULT to wherever the result comes back. */
2218 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2219 result, 0);
2220
2221 /* If we were unable to expand via the builtin, stop the sequence
2222 (without outputting the insns) and call to the library function
2223 with the stabilized argument list. */
2224 if (result == 0)
2225 {
2226 end_sequence ();
2227 return expand_call (exp, target, target == const0_rtx);
2228 }
2229
2230 /* Output the entire sequence. */
2231 insns = get_insns ();
2232 end_sequence ();
2233 emit_insn (insns);
2234
2235 return result;
2236 }
2237
2238 /* Expand a call to the builtin sin and cos math functions.
2239 Return NULL_RTX if a normal call should be emitted rather than expanding the
2240 function in-line. EXP is the expression that is a call to the builtin
2241 function; if convenient, the result should be placed in TARGET.
2242 SUBTARGET may be used as the target for computing one of EXP's
2243 operands. */
2244
2245 static rtx
2246 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2247 {
2248 optab builtin_optab;
2249 rtx op0;
2250 rtx_insn *insns;
2251 tree fndecl = get_callee_fndecl (exp);
2252 machine_mode mode;
2253 tree arg;
2254
2255 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2256 return NULL_RTX;
2257
2258 arg = CALL_EXPR_ARG (exp, 0);
2259
2260 switch (DECL_FUNCTION_CODE (fndecl))
2261 {
2262 CASE_FLT_FN (BUILT_IN_SIN):
2263 CASE_FLT_FN (BUILT_IN_COS):
2264 builtin_optab = sincos_optab; break;
2265 default:
2266 gcc_unreachable ();
2267 }
2268
2269 /* Make a suitable register to place result in. */
2270 mode = TYPE_MODE (TREE_TYPE (exp));
2271
2272 /* Check if sincos insn is available, otherwise fallback
2273 to sin or cos insn. */
2274 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2275 switch (DECL_FUNCTION_CODE (fndecl))
2276 {
2277 CASE_FLT_FN (BUILT_IN_SIN):
2278 builtin_optab = sin_optab; break;
2279 CASE_FLT_FN (BUILT_IN_COS):
2280 builtin_optab = cos_optab; break;
2281 default:
2282 gcc_unreachable ();
2283 }
2284
2285 /* Before working hard, check whether the instruction is available. */
2286 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2287 {
2288 rtx result = gen_reg_rtx (mode);
2289
2290 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2291 need to expand the argument again. This way, we will not perform
2292 side-effects more the once. */
2293 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2294
2295 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2296
2297 start_sequence ();
2298
2299 /* Compute into RESULT.
2300 Set RESULT to wherever the result comes back. */
2301 if (builtin_optab == sincos_optab)
2302 {
2303 int ok;
2304
2305 switch (DECL_FUNCTION_CODE (fndecl))
2306 {
2307 CASE_FLT_FN (BUILT_IN_SIN):
2308 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2309 break;
2310 CASE_FLT_FN (BUILT_IN_COS):
2311 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2312 break;
2313 default:
2314 gcc_unreachable ();
2315 }
2316 gcc_assert (ok);
2317 }
2318 else
2319 result = expand_unop (mode, builtin_optab, op0, result, 0);
2320
2321 if (result != 0)
2322 {
2323 /* Output the entire sequence. */
2324 insns = get_insns ();
2325 end_sequence ();
2326 emit_insn (insns);
2327 return result;
2328 }
2329
2330 /* If we were unable to expand via the builtin, stop the sequence
2331 (without outputting the insns) and call to the library function
2332 with the stabilized argument list. */
2333 end_sequence ();
2334 }
2335
2336 return expand_call (exp, target, target == const0_rtx);
2337 }
2338
2339 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2340 return an RTL instruction code that implements the functionality.
2341 If that isn't possible or available return CODE_FOR_nothing. */
2342
2343 static enum insn_code
2344 interclass_mathfn_icode (tree arg, tree fndecl)
2345 {
2346 bool errno_set = false;
2347 optab builtin_optab = unknown_optab;
2348 machine_mode mode;
2349
2350 switch (DECL_FUNCTION_CODE (fndecl))
2351 {
2352 CASE_FLT_FN (BUILT_IN_ILOGB):
2353 errno_set = true; builtin_optab = ilogb_optab; break;
2354 CASE_FLT_FN (BUILT_IN_ISINF):
2355 builtin_optab = isinf_optab; break;
2356 case BUILT_IN_ISNORMAL:
2357 case BUILT_IN_ISFINITE:
2358 CASE_FLT_FN (BUILT_IN_FINITE):
2359 case BUILT_IN_FINITED32:
2360 case BUILT_IN_FINITED64:
2361 case BUILT_IN_FINITED128:
2362 case BUILT_IN_ISINFD32:
2363 case BUILT_IN_ISINFD64:
2364 case BUILT_IN_ISINFD128:
2365 /* These builtins have no optabs (yet). */
2366 break;
2367 default:
2368 gcc_unreachable ();
2369 }
2370
2371 /* There's no easy way to detect the case we need to set EDOM. */
2372 if (flag_errno_math && errno_set)
2373 return CODE_FOR_nothing;
2374
2375 /* Optab mode depends on the mode of the input argument. */
2376 mode = TYPE_MODE (TREE_TYPE (arg));
2377
2378 if (builtin_optab)
2379 return optab_handler (builtin_optab, mode);
2380 return CODE_FOR_nothing;
2381 }
2382
2383 /* Expand a call to one of the builtin math functions that operate on
2384 floating point argument and output an integer result (ilogb, isinf,
2385 isnan, etc).
2386 Return 0 if a normal call should be emitted rather than expanding the
2387 function in-line. EXP is the expression that is a call to the builtin
2388 function; if convenient, the result should be placed in TARGET. */
2389
2390 static rtx
2391 expand_builtin_interclass_mathfn (tree exp, rtx target)
2392 {
2393 enum insn_code icode = CODE_FOR_nothing;
2394 rtx op0;
2395 tree fndecl = get_callee_fndecl (exp);
2396 machine_mode mode;
2397 tree arg;
2398
2399 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2400 return NULL_RTX;
2401
2402 arg = CALL_EXPR_ARG (exp, 0);
2403 icode = interclass_mathfn_icode (arg, fndecl);
2404 mode = TYPE_MODE (TREE_TYPE (arg));
2405
2406 if (icode != CODE_FOR_nothing)
2407 {
2408 struct expand_operand ops[1];
2409 rtx_insn *last = get_last_insn ();
2410 tree orig_arg = arg;
2411
2412 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2413 need to expand the argument again. This way, we will not perform
2414 side-effects more the once. */
2415 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2416
2417 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2418
2419 if (mode != GET_MODE (op0))
2420 op0 = convert_to_mode (mode, op0, 0);
2421
2422 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2423 if (maybe_legitimize_operands (icode, 0, 1, ops)
2424 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2425 return ops[0].value;
2426
2427 delete_insns_since (last);
2428 CALL_EXPR_ARG (exp, 0) = orig_arg;
2429 }
2430
2431 return NULL_RTX;
2432 }
2433
2434 /* Expand a call to the builtin sincos math function.
2435 Return NULL_RTX if a normal call should be emitted rather than expanding the
2436 function in-line. EXP is the expression that is a call to the builtin
2437 function. */
2438
2439 static rtx
2440 expand_builtin_sincos (tree exp)
2441 {
2442 rtx op0, op1, op2, target1, target2;
2443 machine_mode mode;
2444 tree arg, sinp, cosp;
2445 int result;
2446 location_t loc = EXPR_LOCATION (exp);
2447 tree alias_type, alias_off;
2448
2449 if (!validate_arglist (exp, REAL_TYPE,
2450 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2451 return NULL_RTX;
2452
2453 arg = CALL_EXPR_ARG (exp, 0);
2454 sinp = CALL_EXPR_ARG (exp, 1);
2455 cosp = CALL_EXPR_ARG (exp, 2);
2456
2457 /* Make a suitable register to place result in. */
2458 mode = TYPE_MODE (TREE_TYPE (arg));
2459
2460 /* Check if sincos insn is available, otherwise emit the call. */
2461 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2462 return NULL_RTX;
2463
2464 target1 = gen_reg_rtx (mode);
2465 target2 = gen_reg_rtx (mode);
2466
2467 op0 = expand_normal (arg);
2468 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2469 alias_off = build_int_cst (alias_type, 0);
2470 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2471 sinp, alias_off));
2472 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2473 cosp, alias_off));
2474
2475 /* Compute into target1 and target2.
2476 Set TARGET to wherever the result comes back. */
2477 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2478 gcc_assert (result);
2479
2480 /* Move target1 and target2 to the memory locations indicated
2481 by op1 and op2. */
2482 emit_move_insn (op1, target1);
2483 emit_move_insn (op2, target2);
2484
2485 return const0_rtx;
2486 }
2487
2488 /* Expand a call to the internal cexpi builtin to the sincos math function.
2489 EXP is the expression that is a call to the builtin function; if convenient,
2490 the result should be placed in TARGET. */
2491
2492 static rtx
2493 expand_builtin_cexpi (tree exp, rtx target)
2494 {
2495 tree fndecl = get_callee_fndecl (exp);
2496 tree arg, type;
2497 machine_mode mode;
2498 rtx op0, op1, op2;
2499 location_t loc = EXPR_LOCATION (exp);
2500
2501 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2502 return NULL_RTX;
2503
2504 arg = CALL_EXPR_ARG (exp, 0);
2505 type = TREE_TYPE (arg);
2506 mode = TYPE_MODE (TREE_TYPE (arg));
2507
2508 /* Try expanding via a sincos optab, fall back to emitting a libcall
2509 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2510 is only generated from sincos, cexp or if we have either of them. */
2511 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2512 {
2513 op1 = gen_reg_rtx (mode);
2514 op2 = gen_reg_rtx (mode);
2515
2516 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2517
2518 /* Compute into op1 and op2. */
2519 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2520 }
2521 else if (targetm.libc_has_function (function_sincos))
2522 {
2523 tree call, fn = NULL_TREE;
2524 tree top1, top2;
2525 rtx op1a, op2a;
2526
2527 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2528 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2529 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2530 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2531 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2532 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2533 else
2534 gcc_unreachable ();
2535
2536 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2537 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2538 op1a = copy_addr_to_reg (XEXP (op1, 0));
2539 op2a = copy_addr_to_reg (XEXP (op2, 0));
2540 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2541 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2542
2543 /* Make sure not to fold the sincos call again. */
2544 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2545 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2546 call, 3, arg, top1, top2));
2547 }
2548 else
2549 {
2550 tree call, fn = NULL_TREE, narg;
2551 tree ctype = build_complex_type (type);
2552
2553 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2554 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2555 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2556 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2557 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2558 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2559 else
2560 gcc_unreachable ();
2561
2562 /* If we don't have a decl for cexp create one. This is the
2563 friendliest fallback if the user calls __builtin_cexpi
2564 without full target C99 function support. */
2565 if (fn == NULL_TREE)
2566 {
2567 tree fntype;
2568 const char *name = NULL;
2569
2570 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2571 name = "cexpf";
2572 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2573 name = "cexp";
2574 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2575 name = "cexpl";
2576
2577 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2578 fn = build_fn_decl (name, fntype);
2579 }
2580
2581 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2582 build_real (type, dconst0), arg);
2583
2584 /* Make sure not to fold the cexp call again. */
2585 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2586 return expand_expr (build_call_nary (ctype, call, 1, narg),
2587 target, VOIDmode, EXPAND_NORMAL);
2588 }
2589
2590 /* Now build the proper return type. */
2591 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2592 make_tree (TREE_TYPE (arg), op2),
2593 make_tree (TREE_TYPE (arg), op1)),
2594 target, VOIDmode, EXPAND_NORMAL);
2595 }
2596
2597 /* Conveniently construct a function call expression. FNDECL names the
2598 function to be called, N is the number of arguments, and the "..."
2599 parameters are the argument expressions. Unlike build_call_exr
2600 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2601
2602 static tree
2603 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2604 {
2605 va_list ap;
2606 tree fntype = TREE_TYPE (fndecl);
2607 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2608
2609 va_start (ap, n);
2610 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2611 va_end (ap);
2612 SET_EXPR_LOCATION (fn, loc);
2613 return fn;
2614 }
2615
2616 /* Expand a call to one of the builtin rounding functions gcc defines
2617 as an extension (lfloor and lceil). As these are gcc extensions we
2618 do not need to worry about setting errno to EDOM.
2619 If expanding via optab fails, lower expression to (int)(floor(x)).
2620 EXP is the expression that is a call to the builtin function;
2621 if convenient, the result should be placed in TARGET. */
2622
2623 static rtx
2624 expand_builtin_int_roundingfn (tree exp, rtx target)
2625 {
2626 convert_optab builtin_optab;
2627 rtx op0, tmp;
2628 rtx_insn *insns;
2629 tree fndecl = get_callee_fndecl (exp);
2630 enum built_in_function fallback_fn;
2631 tree fallback_fndecl;
2632 machine_mode mode;
2633 tree arg;
2634
2635 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2636 gcc_unreachable ();
2637
2638 arg = CALL_EXPR_ARG (exp, 0);
2639
2640 switch (DECL_FUNCTION_CODE (fndecl))
2641 {
2642 CASE_FLT_FN (BUILT_IN_ICEIL):
2643 CASE_FLT_FN (BUILT_IN_LCEIL):
2644 CASE_FLT_FN (BUILT_IN_LLCEIL):
2645 builtin_optab = lceil_optab;
2646 fallback_fn = BUILT_IN_CEIL;
2647 break;
2648
2649 CASE_FLT_FN (BUILT_IN_IFLOOR):
2650 CASE_FLT_FN (BUILT_IN_LFLOOR):
2651 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2652 builtin_optab = lfloor_optab;
2653 fallback_fn = BUILT_IN_FLOOR;
2654 break;
2655
2656 default:
2657 gcc_unreachable ();
2658 }
2659
2660 /* Make a suitable register to place result in. */
2661 mode = TYPE_MODE (TREE_TYPE (exp));
2662
2663 target = gen_reg_rtx (mode);
2664
2665 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2666 need to expand the argument again. This way, we will not perform
2667 side-effects more the once. */
2668 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2669
2670 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2671
2672 start_sequence ();
2673
2674 /* Compute into TARGET. */
2675 if (expand_sfix_optab (target, op0, builtin_optab))
2676 {
2677 /* Output the entire sequence. */
2678 insns = get_insns ();
2679 end_sequence ();
2680 emit_insn (insns);
2681 return target;
2682 }
2683
2684 /* If we were unable to expand via the builtin, stop the sequence
2685 (without outputting the insns). */
2686 end_sequence ();
2687
2688 /* Fall back to floating point rounding optab. */
2689 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2690
2691 /* For non-C99 targets we may end up without a fallback fndecl here
2692 if the user called __builtin_lfloor directly. In this case emit
2693 a call to the floor/ceil variants nevertheless. This should result
2694 in the best user experience for not full C99 targets. */
2695 if (fallback_fndecl == NULL_TREE)
2696 {
2697 tree fntype;
2698 const char *name = NULL;
2699
2700 switch (DECL_FUNCTION_CODE (fndecl))
2701 {
2702 case BUILT_IN_ICEIL:
2703 case BUILT_IN_LCEIL:
2704 case BUILT_IN_LLCEIL:
2705 name = "ceil";
2706 break;
2707 case BUILT_IN_ICEILF:
2708 case BUILT_IN_LCEILF:
2709 case BUILT_IN_LLCEILF:
2710 name = "ceilf";
2711 break;
2712 case BUILT_IN_ICEILL:
2713 case BUILT_IN_LCEILL:
2714 case BUILT_IN_LLCEILL:
2715 name = "ceill";
2716 break;
2717 case BUILT_IN_IFLOOR:
2718 case BUILT_IN_LFLOOR:
2719 case BUILT_IN_LLFLOOR:
2720 name = "floor";
2721 break;
2722 case BUILT_IN_IFLOORF:
2723 case BUILT_IN_LFLOORF:
2724 case BUILT_IN_LLFLOORF:
2725 name = "floorf";
2726 break;
2727 case BUILT_IN_IFLOORL:
2728 case BUILT_IN_LFLOORL:
2729 case BUILT_IN_LLFLOORL:
2730 name = "floorl";
2731 break;
2732 default:
2733 gcc_unreachable ();
2734 }
2735
2736 fntype = build_function_type_list (TREE_TYPE (arg),
2737 TREE_TYPE (arg), NULL_TREE);
2738 fallback_fndecl = build_fn_decl (name, fntype);
2739 }
2740
2741 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2742
2743 tmp = expand_normal (exp);
2744 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2745
2746 /* Truncate the result of floating point optab to integer
2747 via expand_fix (). */
2748 target = gen_reg_rtx (mode);
2749 expand_fix (target, tmp, 0);
2750
2751 return target;
2752 }
2753
2754 /* Expand a call to one of the builtin math functions doing integer
2755 conversion (lrint).
2756 Return 0 if a normal call should be emitted rather than expanding the
2757 function in-line. EXP is the expression that is a call to the builtin
2758 function; if convenient, the result should be placed in TARGET. */
2759
2760 static rtx
2761 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2762 {
2763 convert_optab builtin_optab;
2764 rtx op0;
2765 rtx_insn *insns;
2766 tree fndecl = get_callee_fndecl (exp);
2767 tree arg;
2768 machine_mode mode;
2769 enum built_in_function fallback_fn = BUILT_IN_NONE;
2770
2771 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2772 gcc_unreachable ();
2773
2774 arg = CALL_EXPR_ARG (exp, 0);
2775
2776 switch (DECL_FUNCTION_CODE (fndecl))
2777 {
2778 CASE_FLT_FN (BUILT_IN_IRINT):
2779 fallback_fn = BUILT_IN_LRINT;
2780 gcc_fallthrough ();
2781 CASE_FLT_FN (BUILT_IN_LRINT):
2782 CASE_FLT_FN (BUILT_IN_LLRINT):
2783 builtin_optab = lrint_optab;
2784 break;
2785
2786 CASE_FLT_FN (BUILT_IN_IROUND):
2787 fallback_fn = BUILT_IN_LROUND;
2788 gcc_fallthrough ();
2789 CASE_FLT_FN (BUILT_IN_LROUND):
2790 CASE_FLT_FN (BUILT_IN_LLROUND):
2791 builtin_optab = lround_optab;
2792 break;
2793
2794 default:
2795 gcc_unreachable ();
2796 }
2797
2798 /* There's no easy way to detect the case we need to set EDOM. */
2799 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2800 return NULL_RTX;
2801
2802 /* Make a suitable register to place result in. */
2803 mode = TYPE_MODE (TREE_TYPE (exp));
2804
2805 /* There's no easy way to detect the case we need to set EDOM. */
2806 if (!flag_errno_math)
2807 {
2808 rtx result = gen_reg_rtx (mode);
2809
2810 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2811 need to expand the argument again. This way, we will not perform
2812 side-effects more the once. */
2813 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2814
2815 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2816
2817 start_sequence ();
2818
2819 if (expand_sfix_optab (result, op0, builtin_optab))
2820 {
2821 /* Output the entire sequence. */
2822 insns = get_insns ();
2823 end_sequence ();
2824 emit_insn (insns);
2825 return result;
2826 }
2827
2828 /* If we were unable to expand via the builtin, stop the sequence
2829 (without outputting the insns) and call to the library function
2830 with the stabilized argument list. */
2831 end_sequence ();
2832 }
2833
2834 if (fallback_fn != BUILT_IN_NONE)
2835 {
2836 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2837 targets, (int) round (x) should never be transformed into
2838 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2839 a call to lround in the hope that the target provides at least some
2840 C99 functions. This should result in the best user experience for
2841 not full C99 targets. */
2842 tree fallback_fndecl = mathfn_built_in_1
2843 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2844
2845 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2846 fallback_fndecl, 1, arg);
2847
2848 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2849 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2850 return convert_to_mode (mode, target, 0);
2851 }
2852
2853 return expand_call (exp, target, target == const0_rtx);
2854 }
2855
2856 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2857 a normal call should be emitted rather than expanding the function
2858 in-line. EXP is the expression that is a call to the builtin
2859 function; if convenient, the result should be placed in TARGET. */
2860
2861 static rtx
2862 expand_builtin_powi (tree exp, rtx target)
2863 {
2864 tree arg0, arg1;
2865 rtx op0, op1;
2866 machine_mode mode;
2867 machine_mode mode2;
2868
2869 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2870 return NULL_RTX;
2871
2872 arg0 = CALL_EXPR_ARG (exp, 0);
2873 arg1 = CALL_EXPR_ARG (exp, 1);
2874 mode = TYPE_MODE (TREE_TYPE (exp));
2875
2876 /* Emit a libcall to libgcc. */
2877
2878 /* Mode of the 2nd argument must match that of an int. */
2879 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2880
2881 if (target == NULL_RTX)
2882 target = gen_reg_rtx (mode);
2883
2884 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2885 if (GET_MODE (op0) != mode)
2886 op0 = convert_to_mode (mode, op0, 0);
2887 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2888 if (GET_MODE (op1) != mode2)
2889 op1 = convert_to_mode (mode2, op1, 0);
2890
2891 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2892 target, LCT_CONST, mode,
2893 op0, mode, op1, mode2);
2894
2895 return target;
2896 }
2897
2898 /* Expand expression EXP which is a call to the strlen builtin. Return
2899 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2900 try to get the result in TARGET, if convenient. */
2901
2902 static rtx
2903 expand_builtin_strlen (tree exp, rtx target,
2904 machine_mode target_mode)
2905 {
2906 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2907 return NULL_RTX;
2908
2909 struct expand_operand ops[4];
2910 rtx pat;
2911 tree len;
2912 tree src = CALL_EXPR_ARG (exp, 0);
2913 rtx src_reg;
2914 rtx_insn *before_strlen;
2915 machine_mode insn_mode;
2916 enum insn_code icode = CODE_FOR_nothing;
2917 unsigned int align;
2918
2919 /* If the length can be computed at compile-time, return it. */
2920 len = c_strlen (src, 0);
2921 if (len)
2922 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2923
2924 /* If the length can be computed at compile-time and is constant
2925 integer, but there are side-effects in src, evaluate
2926 src for side-effects, then return len.
2927 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2928 can be optimized into: i++; x = 3; */
2929 len = c_strlen (src, 1);
2930 if (len && TREE_CODE (len) == INTEGER_CST)
2931 {
2932 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2933 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2934 }
2935
2936 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2937
2938 /* If SRC is not a pointer type, don't do this operation inline. */
2939 if (align == 0)
2940 return NULL_RTX;
2941
2942 /* Bail out if we can't compute strlen in the right mode. */
2943 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2944 {
2945 icode = optab_handler (strlen_optab, insn_mode);
2946 if (icode != CODE_FOR_nothing)
2947 break;
2948 }
2949 if (insn_mode == VOIDmode)
2950 return NULL_RTX;
2951
2952 /* Make a place to hold the source address. We will not expand
2953 the actual source until we are sure that the expansion will
2954 not fail -- there are trees that cannot be expanded twice. */
2955 src_reg = gen_reg_rtx (Pmode);
2956
2957 /* Mark the beginning of the strlen sequence so we can emit the
2958 source operand later. */
2959 before_strlen = get_last_insn ();
2960
2961 create_output_operand (&ops[0], target, insn_mode);
2962 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2963 create_integer_operand (&ops[2], 0);
2964 create_integer_operand (&ops[3], align);
2965 if (!maybe_expand_insn (icode, 4, ops))
2966 return NULL_RTX;
2967
2968 /* Check to see if the argument was declared attribute nonstring
2969 and if so, issue a warning since at this point it's not known
2970 to be nul-terminated. */
2971 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2972
2973 /* Now that we are assured of success, expand the source. */
2974 start_sequence ();
2975 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2976 if (pat != src_reg)
2977 {
2978 #ifdef POINTERS_EXTEND_UNSIGNED
2979 if (GET_MODE (pat) != Pmode)
2980 pat = convert_to_mode (Pmode, pat,
2981 POINTERS_EXTEND_UNSIGNED);
2982 #endif
2983 emit_move_insn (src_reg, pat);
2984 }
2985 pat = get_insns ();
2986 end_sequence ();
2987
2988 if (before_strlen)
2989 emit_insn_after (pat, before_strlen);
2990 else
2991 emit_insn_before (pat, get_insns ());
2992
2993 /* Return the value in the proper mode for this function. */
2994 if (GET_MODE (ops[0].value) == target_mode)
2995 target = ops[0].value;
2996 else if (target != 0)
2997 convert_move (target, ops[0].value, 0);
2998 else
2999 target = convert_to_mode (target_mode, ops[0].value, 0);
3000
3001 return target;
3002 }
3003
3004 /* Expand call EXP to the strnlen built-in, returning the result
3005 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3006
3007 static rtx
3008 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3009 {
3010 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3011 return NULL_RTX;
3012
3013 tree src = CALL_EXPR_ARG (exp, 0);
3014 tree bound = CALL_EXPR_ARG (exp, 1);
3015
3016 if (!bound)
3017 return NULL_RTX;
3018
3019 location_t loc = UNKNOWN_LOCATION;
3020 if (EXPR_HAS_LOCATION (exp))
3021 loc = EXPR_LOCATION (exp);
3022
3023 tree maxobjsize = max_object_size ();
3024 tree func = get_callee_fndecl (exp);
3025
3026 tree len = c_strlen (src, 0);
3027 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3028 so these conversions aren't necessary. */
3029 if (len)
3030 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3031
3032 if (TREE_CODE (bound) == INTEGER_CST)
3033 {
3034 if (!TREE_NO_WARNING (exp)
3035 && tree_int_cst_lt (maxobjsize, bound)
3036 && warning_at (loc, OPT_Wstringop_overflow_,
3037 "%K%qD specified bound %E "
3038 "exceeds maximum object size %E",
3039 exp, func, bound, maxobjsize))
3040 TREE_NO_WARNING (exp) = true;
3041
3042 if (!len || TREE_CODE (len) != INTEGER_CST)
3043 return NULL_RTX;
3044
3045 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3046 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3047 }
3048
3049 if (TREE_CODE (bound) != SSA_NAME)
3050 return NULL_RTX;
3051
3052 wide_int min, max;
3053 enum value_range_type rng = get_range_info (bound, &min, &max);
3054 if (rng != VR_RANGE)
3055 return NULL_RTX;
3056
3057 if (!TREE_NO_WARNING (exp)
3058 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3059 && warning_at (loc, OPT_Wstringop_overflow_,
3060 "%K%qD specified bound [%wu, %wu] "
3061 "exceeds maximum object size %E",
3062 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3063 TREE_NO_WARNING (exp) = true;
3064
3065 if (!len || TREE_CODE (len) != INTEGER_CST)
3066 return NULL_RTX;
3067
3068 if (wi::gtu_p (min, wi::to_wide (len)))
3069 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3070
3071 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3072 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3073 }
3074
3075 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3076 bytes from constant string DATA + OFFSET and return it as target
3077 constant. */
3078
3079 static rtx
3080 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3081 scalar_int_mode mode)
3082 {
3083 const char *str = (const char *) data;
3084
3085 gcc_assert (offset >= 0
3086 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3087 <= strlen (str) + 1));
3088
3089 return c_readstr (str + offset, mode);
3090 }
3091
3092 /* LEN specify length of the block of memcpy/memset operation.
3093 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3094 In some cases we can make very likely guess on max size, then we
3095 set it into PROBABLE_MAX_SIZE. */
3096
3097 static void
3098 determine_block_size (tree len, rtx len_rtx,
3099 unsigned HOST_WIDE_INT *min_size,
3100 unsigned HOST_WIDE_INT *max_size,
3101 unsigned HOST_WIDE_INT *probable_max_size)
3102 {
3103 if (CONST_INT_P (len_rtx))
3104 {
3105 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3106 return;
3107 }
3108 else
3109 {
3110 wide_int min, max;
3111 enum value_range_type range_type = VR_UNDEFINED;
3112
3113 /* Determine bounds from the type. */
3114 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3115 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3116 else
3117 *min_size = 0;
3118 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3119 *probable_max_size = *max_size
3120 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3121 else
3122 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3123
3124 if (TREE_CODE (len) == SSA_NAME)
3125 range_type = get_range_info (len, &min, &max);
3126 if (range_type == VR_RANGE)
3127 {
3128 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3129 *min_size = min.to_uhwi ();
3130 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3131 *probable_max_size = *max_size = max.to_uhwi ();
3132 }
3133 else if (range_type == VR_ANTI_RANGE)
3134 {
3135 /* Anti range 0...N lets us to determine minimal size to N+1. */
3136 if (min == 0)
3137 {
3138 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3139 *min_size = max.to_uhwi () + 1;
3140 }
3141 /* Code like
3142
3143 int n;
3144 if (n < 100)
3145 memcpy (a, b, n)
3146
3147 Produce anti range allowing negative values of N. We still
3148 can use the information and make a guess that N is not negative.
3149 */
3150 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3151 *probable_max_size = min.to_uhwi () - 1;
3152 }
3153 }
3154 gcc_checking_assert (*max_size <=
3155 (unsigned HOST_WIDE_INT)
3156 GET_MODE_MASK (GET_MODE (len_rtx)));
3157 }
3158
3159 /* Try to verify that the sizes and lengths of the arguments to a string
3160 manipulation function given by EXP are within valid bounds and that
3161 the operation does not lead to buffer overflow or read past the end.
3162 Arguments other than EXP may be null. When non-null, the arguments
3163 have the following meaning:
3164 DST is the destination of a copy call or NULL otherwise.
3165 SRC is the source of a copy call or NULL otherwise.
3166 DSTWRITE is the number of bytes written into the destination obtained
3167 from the user-supplied size argument to the function (such as in
3168 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3169 MAXREAD is the user-supplied bound on the length of the source sequence
3170 (such as in strncat(d, s, N). It specifies the upper limit on the number
3171 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3172 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3173 expression EXP is a string function call (as opposed to a memory call
3174 like memcpy). As an exception, SRCSTR can also be an integer denoting
3175 the precomputed size of the source string or object (for functions like
3176 memcpy).
3177 DSTSIZE is the size of the destination object specified by the last
3178 argument to the _chk builtins, typically resulting from the expansion
3179 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3180 DSTSIZE).
3181
3182 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3183 SIZE_MAX.
3184
3185 If the call is successfully verified as safe return true, otherwise
3186 return false. */
3187
3188 static bool
3189 check_access (tree exp, tree, tree, tree dstwrite,
3190 tree maxread, tree srcstr, tree dstsize)
3191 {
3192 int opt = OPT_Wstringop_overflow_;
3193
3194 /* The size of the largest object is half the address space, or
3195 PTRDIFF_MAX. (This is way too permissive.) */
3196 tree maxobjsize = max_object_size ();
3197
3198 /* Either the length of the source string for string functions or
3199 the size of the source object for raw memory functions. */
3200 tree slen = NULL_TREE;
3201
3202 tree range[2] = { NULL_TREE, NULL_TREE };
3203
3204 /* Set to true when the exact number of bytes written by a string
3205 function like strcpy is not known and the only thing that is
3206 known is that it must be at least one (for the terminating nul). */
3207 bool at_least_one = false;
3208 if (srcstr)
3209 {
3210 /* SRCSTR is normally a pointer to string but as a special case
3211 it can be an integer denoting the length of a string. */
3212 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3213 {
3214 /* Try to determine the range of lengths the source string
3215 refers to. If it can be determined and is less than
3216 the upper bound given by MAXREAD add one to it for
3217 the terminating nul. Otherwise, set it to one for
3218 the same reason, or to MAXREAD as appropriate. */
3219 get_range_strlen (srcstr, range);
3220 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3221 {
3222 if (maxread && tree_int_cst_le (maxread, range[0]))
3223 range[0] = range[1] = maxread;
3224 else
3225 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3226 range[0], size_one_node);
3227
3228 if (maxread && tree_int_cst_le (maxread, range[1]))
3229 range[1] = maxread;
3230 else if (!integer_all_onesp (range[1]))
3231 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3232 range[1], size_one_node);
3233
3234 slen = range[0];
3235 }
3236 else
3237 {
3238 at_least_one = true;
3239 slen = size_one_node;
3240 }
3241 }
3242 else
3243 slen = srcstr;
3244 }
3245
3246 if (!dstwrite && !maxread)
3247 {
3248 /* When the only available piece of data is the object size
3249 there is nothing to do. */
3250 if (!slen)
3251 return true;
3252
3253 /* Otherwise, when the length of the source sequence is known
3254 (as with strlen), set DSTWRITE to it. */
3255 if (!range[0])
3256 dstwrite = slen;
3257 }
3258
3259 if (!dstsize)
3260 dstsize = maxobjsize;
3261
3262 if (dstwrite)
3263 get_size_range (dstwrite, range);
3264
3265 tree func = get_callee_fndecl (exp);
3266
3267 /* First check the number of bytes to be written against the maximum
3268 object size. */
3269 if (range[0]
3270 && TREE_CODE (range[0]) == INTEGER_CST
3271 && tree_int_cst_lt (maxobjsize, range[0]))
3272 {
3273 if (TREE_NO_WARNING (exp))
3274 return false;
3275
3276 location_t loc = tree_nonartificial_location (exp);
3277 loc = expansion_point_location_if_in_system_header (loc);
3278
3279 bool warned;
3280 if (range[0] == range[1])
3281 warned = warning_at (loc, opt,
3282 "%K%qD specified size %E "
3283 "exceeds maximum object size %E",
3284 exp, func, range[0], maxobjsize);
3285 else
3286 warned = warning_at (loc, opt,
3287 "%K%qD specified size between %E and %E "
3288 "exceeds maximum object size %E",
3289 exp, func,
3290 range[0], range[1], maxobjsize);
3291 if (warned)
3292 TREE_NO_WARNING (exp) = true;
3293
3294 return false;
3295 }
3296
3297 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3298 constant, and in range of unsigned HOST_WIDE_INT. */
3299 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3300
3301 /* Next check the number of bytes to be written against the destination
3302 object size. */
3303 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3304 {
3305 if (range[0]
3306 && TREE_CODE (range[0]) == INTEGER_CST
3307 && ((tree_fits_uhwi_p (dstsize)
3308 && tree_int_cst_lt (dstsize, range[0]))
3309 || (dstwrite
3310 && tree_fits_uhwi_p (dstwrite)
3311 && tree_int_cst_lt (dstwrite, range[0]))))
3312 {
3313 if (TREE_NO_WARNING (exp))
3314 return false;
3315
3316 location_t loc = tree_nonartificial_location (exp);
3317 loc = expansion_point_location_if_in_system_header (loc);
3318
3319 if (dstwrite == slen && at_least_one)
3320 {
3321 /* This is a call to strcpy with a destination of 0 size
3322 and a source of unknown length. The call will write
3323 at least one byte past the end of the destination. */
3324 warning_at (loc, opt,
3325 "%K%qD writing %E or more bytes into a region "
3326 "of size %E overflows the destination",
3327 exp, func, range[0], dstsize);
3328 }
3329 else if (tree_int_cst_equal (range[0], range[1]))
3330 warning_n (loc, opt, tree_to_uhwi (range[0]),
3331 "%K%qD writing %E byte into a region "
3332 "of size %E overflows the destination",
3333 "%K%qD writing %E bytes into a region "
3334 "of size %E overflows the destination",
3335 exp, func, range[0], dstsize);
3336 else if (tree_int_cst_sign_bit (range[1]))
3337 {
3338 /* Avoid printing the upper bound if it's invalid. */
3339 warning_at (loc, opt,
3340 "%K%qD writing %E or more bytes into a region "
3341 "of size %E overflows the destination",
3342 exp, func, range[0], dstsize);
3343 }
3344 else
3345 warning_at (loc, opt,
3346 "%K%qD writing between %E and %E bytes into "
3347 "a region of size %E overflows the destination",
3348 exp, func, range[0], range[1],
3349 dstsize);
3350
3351 /* Return error when an overflow has been detected. */
3352 return false;
3353 }
3354 }
3355
3356 /* Check the maximum length of the source sequence against the size
3357 of the destination object if known, or against the maximum size
3358 of an object. */
3359 if (maxread)
3360 {
3361 get_size_range (maxread, range);
3362
3363 /* Use the lower end for MAXREAD from now on. */
3364 if (range[0])
3365 maxread = range[0];
3366
3367 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3368 {
3369 location_t loc = tree_nonartificial_location (exp);
3370 loc = expansion_point_location_if_in_system_header (loc);
3371
3372 if (tree_int_cst_lt (maxobjsize, range[0]))
3373 {
3374 if (TREE_NO_WARNING (exp))
3375 return false;
3376
3377 /* Warn about crazy big sizes first since that's more
3378 likely to be meaningful than saying that the bound
3379 is greater than the object size if both are big. */
3380 if (range[0] == range[1])
3381 warning_at (loc, opt,
3382 "%K%qD specified bound %E "
3383 "exceeds maximum object size %E",
3384 exp, func,
3385 range[0], maxobjsize);
3386 else
3387 warning_at (loc, opt,
3388 "%K%qD specified bound between %E and %E "
3389 "exceeds maximum object size %E",
3390 exp, func,
3391 range[0], range[1], maxobjsize);
3392
3393 return false;
3394 }
3395
3396 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3397 {
3398 if (TREE_NO_WARNING (exp))
3399 return false;
3400
3401 if (tree_int_cst_equal (range[0], range[1]))
3402 warning_at (loc, opt,
3403 "%K%qD specified bound %E "
3404 "exceeds destination size %E",
3405 exp, func,
3406 range[0], dstsize);
3407 else
3408 warning_at (loc, opt,
3409 "%K%qD specified bound between %E and %E "
3410 "exceeds destination size %E",
3411 exp, func,
3412 range[0], range[1], dstsize);
3413 return false;
3414 }
3415 }
3416 }
3417
3418 /* Check for reading past the end of SRC. */
3419 if (slen
3420 && slen == srcstr
3421 && dstwrite && range[0]
3422 && tree_int_cst_lt (slen, range[0]))
3423 {
3424 if (TREE_NO_WARNING (exp))
3425 return false;
3426
3427 location_t loc = tree_nonartificial_location (exp);
3428
3429 if (tree_int_cst_equal (range[0], range[1]))
3430 warning_n (loc, opt, tree_to_uhwi (range[0]),
3431 "%K%qD reading %E byte from a region of size %E",
3432 "%K%qD reading %E bytes from a region of size %E",
3433 exp, func, range[0], slen);
3434 else if (tree_int_cst_sign_bit (range[1]))
3435 {
3436 /* Avoid printing the upper bound if it's invalid. */
3437 warning_at (loc, opt,
3438 "%K%qD reading %E or more bytes from a region "
3439 "of size %E",
3440 exp, func, range[0], slen);
3441 }
3442 else
3443 warning_at (loc, opt,
3444 "%K%qD reading between %E and %E bytes from a region "
3445 "of size %E",
3446 exp, func, range[0], range[1], slen);
3447 return false;
3448 }
3449
3450 return true;
3451 }
3452
3453 /* Helper to compute the size of the object referenced by the DEST
3454 expression which must have pointer type, using Object Size type
3455 OSTYPE (only the least significant 2 bits are used). Return
3456 an estimate of the size of the object if successful or NULL when
3457 the size cannot be determined. When the referenced object involves
3458 a non-constant offset in some range the returned value represents
3459 the largest size given the smallest non-negative offset in the
3460 range. The function is intended for diagnostics and should not
3461 be used to influence code generation or optimization. */
3462
3463 tree
3464 compute_objsize (tree dest, int ostype)
3465 {
3466 unsigned HOST_WIDE_INT size;
3467
3468 /* Only the two least significant bits are meaningful. */
3469 ostype &= 3;
3470
3471 if (compute_builtin_object_size (dest, ostype, &size))
3472 return build_int_cst (sizetype, size);
3473
3474 if (TREE_CODE (dest) == SSA_NAME)
3475 {
3476 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3477 if (!is_gimple_assign (stmt))
3478 return NULL_TREE;
3479
3480 dest = gimple_assign_rhs1 (stmt);
3481
3482 tree_code code = gimple_assign_rhs_code (stmt);
3483 if (code == POINTER_PLUS_EXPR)
3484 {
3485 /* compute_builtin_object_size fails for addresses with
3486 non-constant offsets. Try to determine the range of
3487 such an offset here and use it to adjust the constant
3488 size. */
3489 tree off = gimple_assign_rhs2 (stmt);
3490 if (TREE_CODE (off) == INTEGER_CST)
3491 {
3492 if (tree size = compute_objsize (dest, ostype))
3493 {
3494 wide_int wioff = wi::to_wide (off);
3495 wide_int wisiz = wi::to_wide (size);
3496
3497 /* Ignore negative offsets for now. For others,
3498 use the lower bound as the most optimistic
3499 estimate of the (remaining) size. */
3500 if (wi::sign_mask (wioff))
3501 ;
3502 else if (wi::ltu_p (wioff, wisiz))
3503 return wide_int_to_tree (TREE_TYPE (size),
3504 wi::sub (wisiz, wioff));
3505 else
3506 return size_zero_node;
3507 }
3508 }
3509 else if (TREE_CODE (off) == SSA_NAME
3510 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3511 {
3512 wide_int min, max;
3513 enum value_range_type rng = get_range_info (off, &min, &max);
3514
3515 if (rng == VR_RANGE)
3516 {
3517 if (tree size = compute_objsize (dest, ostype))
3518 {
3519 wide_int wisiz = wi::to_wide (size);
3520
3521 /* Ignore negative offsets for now. For others,
3522 use the lower bound as the most optimistic
3523 estimate of the (remaining)size. */
3524 if (wi::sign_mask (min))
3525 ;
3526 else if (wi::ltu_p (min, wisiz))
3527 return wide_int_to_tree (TREE_TYPE (size),
3528 wi::sub (wisiz, min));
3529 else
3530 return size_zero_node;
3531 }
3532 }
3533 }
3534 }
3535 else if (code != ADDR_EXPR)
3536 return NULL_TREE;
3537 }
3538
3539 /* Unless computing the largest size (for memcpy and other raw memory
3540 functions), try to determine the size of the object from its type. */
3541 if (!ostype)
3542 return NULL_TREE;
3543
3544 if (TREE_CODE (dest) != ADDR_EXPR)
3545 return NULL_TREE;
3546
3547 tree type = TREE_TYPE (dest);
3548 if (TREE_CODE (type) == POINTER_TYPE)
3549 type = TREE_TYPE (type);
3550
3551 type = TYPE_MAIN_VARIANT (type);
3552
3553 if (TREE_CODE (type) == ARRAY_TYPE
3554 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3555 {
3556 /* Return the constant size unless it's zero (that's a zero-length
3557 array likely at the end of a struct). */
3558 tree size = TYPE_SIZE_UNIT (type);
3559 if (size && TREE_CODE (size) == INTEGER_CST
3560 && !integer_zerop (size))
3561 return size;
3562 }
3563
3564 return NULL_TREE;
3565 }
3566
3567 /* Helper to determine and check the sizes of the source and the destination
3568 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3569 call expression, DEST is the destination argument, SRC is the source
3570 argument or null, and LEN is the number of bytes. Use Object Size type-0
3571 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3572 (no overflow or invalid sizes), false otherwise. */
3573
3574 static bool
3575 check_memop_access (tree exp, tree dest, tree src, tree size)
3576 {
3577 /* For functions like memset and memcpy that operate on raw memory
3578 try to determine the size of the largest source and destination
3579 object using type-0 Object Size regardless of the object size
3580 type specified by the option. */
3581 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3582 tree dstsize = compute_objsize (dest, 0);
3583
3584 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3585 srcsize, dstsize);
3586 }
3587
3588 /* Validate memchr arguments without performing any expansion.
3589 Return NULL_RTX. */
3590
3591 static rtx
3592 expand_builtin_memchr (tree exp, rtx)
3593 {
3594 if (!validate_arglist (exp,
3595 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3596 return NULL_RTX;
3597
3598 tree arg1 = CALL_EXPR_ARG (exp, 0);
3599 tree len = CALL_EXPR_ARG (exp, 2);
3600
3601 /* Diagnose calls where the specified length exceeds the size
3602 of the object. */
3603 if (warn_stringop_overflow)
3604 {
3605 tree size = compute_objsize (arg1, 0);
3606 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3607 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3608 }
3609
3610 return NULL_RTX;
3611 }
3612
3613 /* Expand a call EXP to the memcpy builtin.
3614 Return NULL_RTX if we failed, the caller should emit a normal call,
3615 otherwise try to get the result in TARGET, if convenient (and in
3616 mode MODE if that's convenient). */
3617
3618 static rtx
3619 expand_builtin_memcpy (tree exp, rtx target)
3620 {
3621 if (!validate_arglist (exp,
3622 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3623 return NULL_RTX;
3624
3625 tree dest = CALL_EXPR_ARG (exp, 0);
3626 tree src = CALL_EXPR_ARG (exp, 1);
3627 tree len = CALL_EXPR_ARG (exp, 2);
3628
3629 check_memop_access (exp, dest, src, len);
3630
3631 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3632 /*endp=*/ 0);
3633 }
3634
3635 /* Check a call EXP to the memmove built-in for validity.
3636 Return NULL_RTX on both success and failure. */
3637
3638 static rtx
3639 expand_builtin_memmove (tree exp, rtx)
3640 {
3641 if (!validate_arglist (exp,
3642 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3643 return NULL_RTX;
3644
3645 tree dest = CALL_EXPR_ARG (exp, 0);
3646 tree src = CALL_EXPR_ARG (exp, 1);
3647 tree len = CALL_EXPR_ARG (exp, 2);
3648
3649 check_memop_access (exp, dest, src, len);
3650
3651 return NULL_RTX;
3652 }
3653
3654 /* Expand a call EXP to the mempcpy builtin.
3655 Return NULL_RTX if we failed; the caller should emit a normal call,
3656 otherwise try to get the result in TARGET, if convenient (and in
3657 mode MODE if that's convenient). If ENDP is 0 return the
3658 destination pointer, if ENDP is 1 return the end pointer ala
3659 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3660 stpcpy. */
3661
3662 static rtx
3663 expand_builtin_mempcpy (tree exp, rtx target)
3664 {
3665 if (!validate_arglist (exp,
3666 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3667 return NULL_RTX;
3668
3669 tree dest = CALL_EXPR_ARG (exp, 0);
3670 tree src = CALL_EXPR_ARG (exp, 1);
3671 tree len = CALL_EXPR_ARG (exp, 2);
3672
3673 /* Policy does not generally allow using compute_objsize (which
3674 is used internally by check_memop_size) to change code generation
3675 or drive optimization decisions.
3676
3677 In this instance it is safe because the code we generate has
3678 the same semantics regardless of the return value of
3679 check_memop_sizes. Exactly the same amount of data is copied
3680 and the return value is exactly the same in both cases.
3681
3682 Furthermore, check_memop_size always uses mode 0 for the call to
3683 compute_objsize, so the imprecise nature of compute_objsize is
3684 avoided. */
3685
3686 /* Avoid expanding mempcpy into memcpy when the call is determined
3687 to overflow the buffer. This also prevents the same overflow
3688 from being diagnosed again when expanding memcpy. */
3689 if (!check_memop_access (exp, dest, src, len))
3690 return NULL_RTX;
3691
3692 return expand_builtin_mempcpy_args (dest, src, len,
3693 target, exp, /*endp=*/ 1);
3694 }
3695
3696 /* Helper function to do the actual work for expand of memory copy family
3697 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3698 of memory from SRC to DEST and assign to TARGET if convenient.
3699 If ENDP is 0 return the
3700 destination pointer, if ENDP is 1 return the end pointer ala
3701 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3702 stpcpy. */
3703
3704 static rtx
3705 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3706 rtx target, tree exp, int endp)
3707 {
3708 const char *src_str;
3709 unsigned int src_align = get_pointer_alignment (src);
3710 unsigned int dest_align = get_pointer_alignment (dest);
3711 rtx dest_mem, src_mem, dest_addr, len_rtx;
3712 HOST_WIDE_INT expected_size = -1;
3713 unsigned int expected_align = 0;
3714 unsigned HOST_WIDE_INT min_size;
3715 unsigned HOST_WIDE_INT max_size;
3716 unsigned HOST_WIDE_INT probable_max_size;
3717
3718 /* If DEST is not a pointer type, call the normal function. */
3719 if (dest_align == 0)
3720 return NULL_RTX;
3721
3722 /* If either SRC is not a pointer type, don't do this
3723 operation in-line. */
3724 if (src_align == 0)
3725 return NULL_RTX;
3726
3727 if (currently_expanding_gimple_stmt)
3728 stringop_block_profile (currently_expanding_gimple_stmt,
3729 &expected_align, &expected_size);
3730
3731 if (expected_align < dest_align)
3732 expected_align = dest_align;
3733 dest_mem = get_memory_rtx (dest, len);
3734 set_mem_align (dest_mem, dest_align);
3735 len_rtx = expand_normal (len);
3736 determine_block_size (len, len_rtx, &min_size, &max_size,
3737 &probable_max_size);
3738 src_str = c_getstr (src);
3739
3740 /* If SRC is a string constant and block move would be done
3741 by pieces, we can avoid loading the string from memory
3742 and only stored the computed constants. */
3743 if (src_str
3744 && CONST_INT_P (len_rtx)
3745 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3746 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3747 CONST_CAST (char *, src_str),
3748 dest_align, false))
3749 {
3750 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3751 builtin_memcpy_read_str,
3752 CONST_CAST (char *, src_str),
3753 dest_align, false, endp);
3754 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3755 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3756 return dest_mem;
3757 }
3758
3759 src_mem = get_memory_rtx (src, len);
3760 set_mem_align (src_mem, src_align);
3761
3762 /* Copy word part most expediently. */
3763 enum block_op_methods method = BLOCK_OP_NORMAL;
3764 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3765 method = BLOCK_OP_TAILCALL;
3766 if (endp == 1 && target != const0_rtx)
3767 method = BLOCK_OP_NO_LIBCALL_RET;
3768 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3769 expected_align, expected_size,
3770 min_size, max_size, probable_max_size);
3771 if (dest_addr == pc_rtx)
3772 return NULL_RTX;
3773
3774 if (dest_addr == 0)
3775 {
3776 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3777 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3778 }
3779
3780 if (endp && target != const0_rtx)
3781 {
3782 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3783 /* stpcpy pointer to last byte. */
3784 if (endp == 2)
3785 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3786 }
3787
3788 return dest_addr;
3789 }
3790
3791 static rtx
3792 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3793 rtx target, tree orig_exp, int endp)
3794 {
3795 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3796 endp);
3797 }
3798
3799 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3800 we failed, the caller should emit a normal call, otherwise try to
3801 get the result in TARGET, if convenient. If ENDP is 0 return the
3802 destination pointer, if ENDP is 1 return the end pointer ala
3803 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3804 stpcpy. */
3805
3806 static rtx
3807 expand_movstr (tree dest, tree src, rtx target, int endp)
3808 {
3809 struct expand_operand ops[3];
3810 rtx dest_mem;
3811 rtx src_mem;
3812
3813 if (!targetm.have_movstr ())
3814 return NULL_RTX;
3815
3816 dest_mem = get_memory_rtx (dest, NULL);
3817 src_mem = get_memory_rtx (src, NULL);
3818 if (!endp)
3819 {
3820 target = force_reg (Pmode, XEXP (dest_mem, 0));
3821 dest_mem = replace_equiv_address (dest_mem, target);
3822 }
3823
3824 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3825 create_fixed_operand (&ops[1], dest_mem);
3826 create_fixed_operand (&ops[2], src_mem);
3827 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3828 return NULL_RTX;
3829
3830 if (endp && target != const0_rtx)
3831 {
3832 target = ops[0].value;
3833 /* movstr is supposed to set end to the address of the NUL
3834 terminator. If the caller requested a mempcpy-like return value,
3835 adjust it. */
3836 if (endp == 1)
3837 {
3838 rtx tem = plus_constant (GET_MODE (target),
3839 gen_lowpart (GET_MODE (target), target), 1);
3840 emit_move_insn (target, force_operand (tem, NULL_RTX));
3841 }
3842 }
3843 return target;
3844 }
3845
3846 /* Do some very basic size validation of a call to the strcpy builtin
3847 given by EXP. Return NULL_RTX to have the built-in expand to a call
3848 to the library function. */
3849
3850 static rtx
3851 expand_builtin_strcat (tree exp, rtx)
3852 {
3853 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3854 || !warn_stringop_overflow)
3855 return NULL_RTX;
3856
3857 tree dest = CALL_EXPR_ARG (exp, 0);
3858 tree src = CALL_EXPR_ARG (exp, 1);
3859
3860 /* There is no way here to determine the length of the string in
3861 the destination to which the SRC string is being appended so
3862 just diagnose cases when the souce string is longer than
3863 the destination object. */
3864
3865 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3866
3867 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3868 destsize);
3869
3870 return NULL_RTX;
3871 }
3872
3873 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3874 NULL_RTX if we failed the caller should emit a normal call, otherwise
3875 try to get the result in TARGET, if convenient (and in mode MODE if that's
3876 convenient). */
3877
3878 static rtx
3879 expand_builtin_strcpy (tree exp, rtx target)
3880 {
3881 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3882 return NULL_RTX;
3883
3884 tree dest = CALL_EXPR_ARG (exp, 0);
3885 tree src = CALL_EXPR_ARG (exp, 1);
3886
3887 if (warn_stringop_overflow)
3888 {
3889 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3890 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3891 src, destsize);
3892 }
3893
3894 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
3895 {
3896 /* Check to see if the argument was declared attribute nonstring
3897 and if so, issue a warning since at this point it's not known
3898 to be nul-terminated. */
3899 tree fndecl = get_callee_fndecl (exp);
3900 maybe_warn_nonstring_arg (fndecl, exp);
3901 return ret;
3902 }
3903
3904 return NULL_RTX;
3905 }
3906
3907 /* Helper function to do the actual work for expand_builtin_strcpy. The
3908 arguments to the builtin_strcpy call DEST and SRC are broken out
3909 so that this can also be called without constructing an actual CALL_EXPR.
3910 The other arguments and return value are the same as for
3911 expand_builtin_strcpy. */
3912
3913 static rtx
3914 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
3915 {
3916 /* Detect strcpy calls with unterminated arrays.. */
3917 if (tree nonstr = unterminated_array (src))
3918 {
3919 /* NONSTR refers to the non-nul terminated constant array. */
3920 if (!TREE_NO_WARNING (exp))
3921 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
3922 return NULL_RTX;
3923 }
3924
3925 return expand_movstr (dest, src, target, /*endp=*/0);
3926 }
3927
3928 /* Expand a call EXP to the stpcpy builtin.
3929 Return NULL_RTX if we failed the caller should emit a normal call,
3930 otherwise try to get the result in TARGET, if convenient (and in
3931 mode MODE if that's convenient). */
3932
3933 static rtx
3934 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3935 {
3936 tree dst, src;
3937 location_t loc = EXPR_LOCATION (exp);
3938
3939 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3940 return NULL_RTX;
3941
3942 dst = CALL_EXPR_ARG (exp, 0);
3943 src = CALL_EXPR_ARG (exp, 1);
3944
3945 if (warn_stringop_overflow)
3946 {
3947 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3948 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3949 src, destsize);
3950 }
3951
3952 /* If return value is ignored, transform stpcpy into strcpy. */
3953 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3954 {
3955 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3956 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3957 return expand_expr (result, target, mode, EXPAND_NORMAL);
3958 }
3959 else
3960 {
3961 tree len, lenp1;
3962 rtx ret;
3963
3964 /* Ensure we get an actual string whose length can be evaluated at
3965 compile-time, not an expression containing a string. This is
3966 because the latter will potentially produce pessimized code
3967 when used to produce the return value. */
3968 tree nonstr = NULL_TREE;
3969 if (!c_getstr (src, NULL)
3970 || !(len = c_strlen (src, 0, &nonstr, 1)))
3971 return expand_movstr (dst, src, target, /*endp=*/2);
3972
3973 if (nonstr && !TREE_NO_WARNING (exp))
3974 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, nonstr);
3975
3976 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3977 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3978 target, exp, /*endp=*/2);
3979
3980 if (ret)
3981 return ret;
3982
3983 if (TREE_CODE (len) == INTEGER_CST)
3984 {
3985 rtx len_rtx = expand_normal (len);
3986
3987 if (CONST_INT_P (len_rtx))
3988 {
3989 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3990
3991 if (ret)
3992 {
3993 if (! target)
3994 {
3995 if (mode != VOIDmode)
3996 target = gen_reg_rtx (mode);
3997 else
3998 target = gen_reg_rtx (GET_MODE (ret));
3999 }
4000 if (GET_MODE (target) != GET_MODE (ret))
4001 ret = gen_lowpart (GET_MODE (target), ret);
4002
4003 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4004 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4005 gcc_assert (ret);
4006
4007 return target;
4008 }
4009 }
4010 }
4011
4012 return expand_movstr (dst, src, target, /*endp=*/2);
4013 }
4014 }
4015
4016 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4017 arguments while being careful to avoid duplicate warnings (which could
4018 be issued if the expander were to expand the call, resulting in it
4019 being emitted in expand_call(). */
4020
4021 static rtx
4022 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4023 {
4024 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4025 {
4026 /* The call has been successfully expanded. Check for nonstring
4027 arguments and issue warnings as appropriate. */
4028 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4029 return ret;
4030 }
4031
4032 return NULL_RTX;
4033 }
4034
4035 /* Check a call EXP to the stpncpy built-in for validity.
4036 Return NULL_RTX on both success and failure. */
4037
4038 static rtx
4039 expand_builtin_stpncpy (tree exp, rtx)
4040 {
4041 if (!validate_arglist (exp,
4042 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4043 || !warn_stringop_overflow)
4044 return NULL_RTX;
4045
4046 /* The source and destination of the call. */
4047 tree dest = CALL_EXPR_ARG (exp, 0);
4048 tree src = CALL_EXPR_ARG (exp, 1);
4049
4050 /* The exact number of bytes to write (not the maximum). */
4051 tree len = CALL_EXPR_ARG (exp, 2);
4052
4053 /* The size of the destination object. */
4054 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4055
4056 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4057
4058 return NULL_RTX;
4059 }
4060
4061 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4062 bytes from constant string DATA + OFFSET and return it as target
4063 constant. */
4064
4065 rtx
4066 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4067 scalar_int_mode mode)
4068 {
4069 const char *str = (const char *) data;
4070
4071 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4072 return const0_rtx;
4073
4074 return c_readstr (str + offset, mode);
4075 }
4076
4077 /* Helper to check the sizes of sequences and the destination of calls
4078 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4079 success (no overflow or invalid sizes), false otherwise. */
4080
4081 static bool
4082 check_strncat_sizes (tree exp, tree objsize)
4083 {
4084 tree dest = CALL_EXPR_ARG (exp, 0);
4085 tree src = CALL_EXPR_ARG (exp, 1);
4086 tree maxread = CALL_EXPR_ARG (exp, 2);
4087
4088 /* Try to determine the range of lengths that the source expression
4089 refers to. */
4090 tree lenrange[2];
4091 get_range_strlen (src, lenrange);
4092
4093 /* Try to verify that the destination is big enough for the shortest
4094 string. */
4095
4096 if (!objsize && warn_stringop_overflow)
4097 {
4098 /* If it hasn't been provided by __strncat_chk, try to determine
4099 the size of the destination object into which the source is
4100 being copied. */
4101 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4102 }
4103
4104 /* Add one for the terminating nul. */
4105 tree srclen = (lenrange[0]
4106 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4107 size_one_node)
4108 : NULL_TREE);
4109
4110 /* The strncat function copies at most MAXREAD bytes and always appends
4111 the terminating nul so the specified upper bound should never be equal
4112 to (or greater than) the size of the destination. */
4113 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4114 && tree_int_cst_equal (objsize, maxread))
4115 {
4116 location_t loc = tree_nonartificial_location (exp);
4117 loc = expansion_point_location_if_in_system_header (loc);
4118
4119 warning_at (loc, OPT_Wstringop_overflow_,
4120 "%K%qD specified bound %E equals destination size",
4121 exp, get_callee_fndecl (exp), maxread);
4122
4123 return false;
4124 }
4125
4126 if (!srclen
4127 || (maxread && tree_fits_uhwi_p (maxread)
4128 && tree_fits_uhwi_p (srclen)
4129 && tree_int_cst_lt (maxread, srclen)))
4130 srclen = maxread;
4131
4132 /* The number of bytes to write is LEN but check_access will also
4133 check SRCLEN if LEN's value isn't known. */
4134 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4135 objsize);
4136 }
4137
4138 /* Similar to expand_builtin_strcat, do some very basic size validation
4139 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4140 the built-in expand to a call to the library function. */
4141
4142 static rtx
4143 expand_builtin_strncat (tree exp, rtx)
4144 {
4145 if (!validate_arglist (exp,
4146 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4147 || !warn_stringop_overflow)
4148 return NULL_RTX;
4149
4150 tree dest = CALL_EXPR_ARG (exp, 0);
4151 tree src = CALL_EXPR_ARG (exp, 1);
4152 /* The upper bound on the number of bytes to write. */
4153 tree maxread = CALL_EXPR_ARG (exp, 2);
4154 /* The length of the source sequence. */
4155 tree slen = c_strlen (src, 1);
4156
4157 /* Try to determine the range of lengths that the source expression
4158 refers to. */
4159 tree lenrange[2];
4160 if (slen)
4161 lenrange[0] = lenrange[1] = slen;
4162 else
4163 get_range_strlen (src, lenrange);
4164
4165 /* Try to verify that the destination is big enough for the shortest
4166 string. First try to determine the size of the destination object
4167 into which the source is being copied. */
4168 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4169
4170 /* Add one for the terminating nul. */
4171 tree srclen = (lenrange[0]
4172 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4173 size_one_node)
4174 : NULL_TREE);
4175
4176 /* The strncat function copies at most MAXREAD bytes and always appends
4177 the terminating nul so the specified upper bound should never be equal
4178 to (or greater than) the size of the destination. */
4179 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4180 && tree_int_cst_equal (destsize, maxread))
4181 {
4182 location_t loc = tree_nonartificial_location (exp);
4183 loc = expansion_point_location_if_in_system_header (loc);
4184
4185 warning_at (loc, OPT_Wstringop_overflow_,
4186 "%K%qD specified bound %E equals destination size",
4187 exp, get_callee_fndecl (exp), maxread);
4188
4189 return NULL_RTX;
4190 }
4191
4192 if (!srclen
4193 || (maxread && tree_fits_uhwi_p (maxread)
4194 && tree_fits_uhwi_p (srclen)
4195 && tree_int_cst_lt (maxread, srclen)))
4196 srclen = maxread;
4197
4198 /* The number of bytes to write is SRCLEN. */
4199 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4200
4201 return NULL_RTX;
4202 }
4203
4204 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4205 NULL_RTX if we failed the caller should emit a normal call. */
4206
4207 static rtx
4208 expand_builtin_strncpy (tree exp, rtx target)
4209 {
4210 location_t loc = EXPR_LOCATION (exp);
4211
4212 if (validate_arglist (exp,
4213 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4214 {
4215 tree dest = CALL_EXPR_ARG (exp, 0);
4216 tree src = CALL_EXPR_ARG (exp, 1);
4217 /* The number of bytes to write (not the maximum). */
4218 tree len = CALL_EXPR_ARG (exp, 2);
4219 /* The length of the source sequence. */
4220 tree slen = c_strlen (src, 1);
4221
4222 if (warn_stringop_overflow)
4223 {
4224 tree destsize = compute_objsize (dest,
4225 warn_stringop_overflow - 1);
4226
4227 /* The number of bytes to write is LEN but check_access will also
4228 check SLEN if LEN's value isn't known. */
4229 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4230 destsize);
4231 }
4232
4233 /* We must be passed a constant len and src parameter. */
4234 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4235 return NULL_RTX;
4236
4237 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4238
4239 /* We're required to pad with trailing zeros if the requested
4240 len is greater than strlen(s2)+1. In that case try to
4241 use store_by_pieces, if it fails, punt. */
4242 if (tree_int_cst_lt (slen, len))
4243 {
4244 unsigned int dest_align = get_pointer_alignment (dest);
4245 const char *p = c_getstr (src);
4246 rtx dest_mem;
4247
4248 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4249 || !can_store_by_pieces (tree_to_uhwi (len),
4250 builtin_strncpy_read_str,
4251 CONST_CAST (char *, p),
4252 dest_align, false))
4253 return NULL_RTX;
4254
4255 dest_mem = get_memory_rtx (dest, len);
4256 store_by_pieces (dest_mem, tree_to_uhwi (len),
4257 builtin_strncpy_read_str,
4258 CONST_CAST (char *, p), dest_align, false, 0);
4259 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4260 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4261 return dest_mem;
4262 }
4263 }
4264 return NULL_RTX;
4265 }
4266
4267 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4268 bytes from constant string DATA + OFFSET and return it as target
4269 constant. */
4270
4271 rtx
4272 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4273 scalar_int_mode mode)
4274 {
4275 const char *c = (const char *) data;
4276 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4277
4278 memset (p, *c, GET_MODE_SIZE (mode));
4279
4280 return c_readstr (p, mode);
4281 }
4282
4283 /* Callback routine for store_by_pieces. Return the RTL of a register
4284 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4285 char value given in the RTL register data. For example, if mode is
4286 4 bytes wide, return the RTL for 0x01010101*data. */
4287
4288 static rtx
4289 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4290 scalar_int_mode mode)
4291 {
4292 rtx target, coeff;
4293 size_t size;
4294 char *p;
4295
4296 size = GET_MODE_SIZE (mode);
4297 if (size == 1)
4298 return (rtx) data;
4299
4300 p = XALLOCAVEC (char, size);
4301 memset (p, 1, size);
4302 coeff = c_readstr (p, mode);
4303
4304 target = convert_to_mode (mode, (rtx) data, 1);
4305 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4306 return force_reg (mode, target);
4307 }
4308
4309 /* Expand expression EXP, which is a call to the memset builtin. Return
4310 NULL_RTX if we failed the caller should emit a normal call, otherwise
4311 try to get the result in TARGET, if convenient (and in mode MODE if that's
4312 convenient). */
4313
4314 static rtx
4315 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4316 {
4317 if (!validate_arglist (exp,
4318 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4319 return NULL_RTX;
4320
4321 tree dest = CALL_EXPR_ARG (exp, 0);
4322 tree val = CALL_EXPR_ARG (exp, 1);
4323 tree len = CALL_EXPR_ARG (exp, 2);
4324
4325 check_memop_access (exp, dest, NULL_TREE, len);
4326
4327 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4328 }
4329
4330 /* Helper function to do the actual work for expand_builtin_memset. The
4331 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4332 so that this can also be called without constructing an actual CALL_EXPR.
4333 The other arguments and return value are the same as for
4334 expand_builtin_memset. */
4335
4336 static rtx
4337 expand_builtin_memset_args (tree dest, tree val, tree len,
4338 rtx target, machine_mode mode, tree orig_exp)
4339 {
4340 tree fndecl, fn;
4341 enum built_in_function fcode;
4342 machine_mode val_mode;
4343 char c;
4344 unsigned int dest_align;
4345 rtx dest_mem, dest_addr, len_rtx;
4346 HOST_WIDE_INT expected_size = -1;
4347 unsigned int expected_align = 0;
4348 unsigned HOST_WIDE_INT min_size;
4349 unsigned HOST_WIDE_INT max_size;
4350 unsigned HOST_WIDE_INT probable_max_size;
4351
4352 dest_align = get_pointer_alignment (dest);
4353
4354 /* If DEST is not a pointer type, don't do this operation in-line. */
4355 if (dest_align == 0)
4356 return NULL_RTX;
4357
4358 if (currently_expanding_gimple_stmt)
4359 stringop_block_profile (currently_expanding_gimple_stmt,
4360 &expected_align, &expected_size);
4361
4362 if (expected_align < dest_align)
4363 expected_align = dest_align;
4364
4365 /* If the LEN parameter is zero, return DEST. */
4366 if (integer_zerop (len))
4367 {
4368 /* Evaluate and ignore VAL in case it has side-effects. */
4369 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4370 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4371 }
4372
4373 /* Stabilize the arguments in case we fail. */
4374 dest = builtin_save_expr (dest);
4375 val = builtin_save_expr (val);
4376 len = builtin_save_expr (len);
4377
4378 len_rtx = expand_normal (len);
4379 determine_block_size (len, len_rtx, &min_size, &max_size,
4380 &probable_max_size);
4381 dest_mem = get_memory_rtx (dest, len);
4382 val_mode = TYPE_MODE (unsigned_char_type_node);
4383
4384 if (TREE_CODE (val) != INTEGER_CST)
4385 {
4386 rtx val_rtx;
4387
4388 val_rtx = expand_normal (val);
4389 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4390
4391 /* Assume that we can memset by pieces if we can store
4392 * the coefficients by pieces (in the required modes).
4393 * We can't pass builtin_memset_gen_str as that emits RTL. */
4394 c = 1;
4395 if (tree_fits_uhwi_p (len)
4396 && can_store_by_pieces (tree_to_uhwi (len),
4397 builtin_memset_read_str, &c, dest_align,
4398 true))
4399 {
4400 val_rtx = force_reg (val_mode, val_rtx);
4401 store_by_pieces (dest_mem, tree_to_uhwi (len),
4402 builtin_memset_gen_str, val_rtx, dest_align,
4403 true, 0);
4404 }
4405 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4406 dest_align, expected_align,
4407 expected_size, min_size, max_size,
4408 probable_max_size))
4409 goto do_libcall;
4410
4411 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4412 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4413 return dest_mem;
4414 }
4415
4416 if (target_char_cast (val, &c))
4417 goto do_libcall;
4418
4419 if (c)
4420 {
4421 if (tree_fits_uhwi_p (len)
4422 && can_store_by_pieces (tree_to_uhwi (len),
4423 builtin_memset_read_str, &c, dest_align,
4424 true))
4425 store_by_pieces (dest_mem, tree_to_uhwi (len),
4426 builtin_memset_read_str, &c, dest_align, true, 0);
4427 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4428 gen_int_mode (c, val_mode),
4429 dest_align, expected_align,
4430 expected_size, min_size, max_size,
4431 probable_max_size))
4432 goto do_libcall;
4433
4434 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4435 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4436 return dest_mem;
4437 }
4438
4439 set_mem_align (dest_mem, dest_align);
4440 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4441 CALL_EXPR_TAILCALL (orig_exp)
4442 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4443 expected_align, expected_size,
4444 min_size, max_size,
4445 probable_max_size);
4446
4447 if (dest_addr == 0)
4448 {
4449 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4450 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4451 }
4452
4453 return dest_addr;
4454
4455 do_libcall:
4456 fndecl = get_callee_fndecl (orig_exp);
4457 fcode = DECL_FUNCTION_CODE (fndecl);
4458 if (fcode == BUILT_IN_MEMSET)
4459 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4460 dest, val, len);
4461 else if (fcode == BUILT_IN_BZERO)
4462 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4463 dest, len);
4464 else
4465 gcc_unreachable ();
4466 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4467 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4468 return expand_call (fn, target, target == const0_rtx);
4469 }
4470
4471 /* Expand expression EXP, which is a call to the bzero builtin. Return
4472 NULL_RTX if we failed the caller should emit a normal call. */
4473
4474 static rtx
4475 expand_builtin_bzero (tree exp)
4476 {
4477 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4478 return NULL_RTX;
4479
4480 tree dest = CALL_EXPR_ARG (exp, 0);
4481 tree size = CALL_EXPR_ARG (exp, 1);
4482
4483 check_memop_access (exp, dest, NULL_TREE, size);
4484
4485 /* New argument list transforming bzero(ptr x, int y) to
4486 memset(ptr x, int 0, size_t y). This is done this way
4487 so that if it isn't expanded inline, we fallback to
4488 calling bzero instead of memset. */
4489
4490 location_t loc = EXPR_LOCATION (exp);
4491
4492 return expand_builtin_memset_args (dest, integer_zero_node,
4493 fold_convert_loc (loc,
4494 size_type_node, size),
4495 const0_rtx, VOIDmode, exp);
4496 }
4497
4498 /* Try to expand cmpstr operation ICODE with the given operands.
4499 Return the result rtx on success, otherwise return null. */
4500
4501 static rtx
4502 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4503 HOST_WIDE_INT align)
4504 {
4505 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4506
4507 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4508 target = NULL_RTX;
4509
4510 struct expand_operand ops[4];
4511 create_output_operand (&ops[0], target, insn_mode);
4512 create_fixed_operand (&ops[1], arg1_rtx);
4513 create_fixed_operand (&ops[2], arg2_rtx);
4514 create_integer_operand (&ops[3], align);
4515 if (maybe_expand_insn (icode, 4, ops))
4516 return ops[0].value;
4517 return NULL_RTX;
4518 }
4519
4520 /* Expand expression EXP, which is a call to the memcmp built-in function.
4521 Return NULL_RTX if we failed and the caller should emit a normal call,
4522 otherwise try to get the result in TARGET, if convenient.
4523 RESULT_EQ is true if we can relax the returned value to be either zero
4524 or nonzero, without caring about the sign. */
4525
4526 static rtx
4527 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4528 {
4529 if (!validate_arglist (exp,
4530 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4531 return NULL_RTX;
4532
4533 tree arg1 = CALL_EXPR_ARG (exp, 0);
4534 tree arg2 = CALL_EXPR_ARG (exp, 1);
4535 tree len = CALL_EXPR_ARG (exp, 2);
4536 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4537 bool no_overflow = true;
4538
4539 /* Diagnose calls where the specified length exceeds the size of either
4540 object. */
4541 tree size = compute_objsize (arg1, 0);
4542 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4543 len, /*maxread=*/NULL_TREE, size,
4544 /*objsize=*/NULL_TREE);
4545 if (no_overflow)
4546 {
4547 size = compute_objsize (arg2, 0);
4548 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4549 len, /*maxread=*/NULL_TREE, size,
4550 /*objsize=*/NULL_TREE);
4551 }
4552
4553 /* If the specified length exceeds the size of either object,
4554 call the function. */
4555 if (!no_overflow)
4556 return NULL_RTX;
4557
4558 /* Due to the performance benefit, always inline the calls first
4559 when result_eq is false. */
4560 rtx result = NULL_RTX;
4561
4562 if (!result_eq && fcode != BUILT_IN_BCMP)
4563 {
4564 result = inline_expand_builtin_string_cmp (exp, target);
4565 if (result)
4566 return result;
4567 }
4568
4569 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4570 location_t loc = EXPR_LOCATION (exp);
4571
4572 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4573 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4574
4575 /* If we don't have POINTER_TYPE, call the function. */
4576 if (arg1_align == 0 || arg2_align == 0)
4577 return NULL_RTX;
4578
4579 rtx arg1_rtx = get_memory_rtx (arg1, len);
4580 rtx arg2_rtx = get_memory_rtx (arg2, len);
4581 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4582
4583 /* Set MEM_SIZE as appropriate. */
4584 if (CONST_INT_P (len_rtx))
4585 {
4586 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4587 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4588 }
4589
4590 by_pieces_constfn constfn = NULL;
4591
4592 const char *src_str = c_getstr (arg2);
4593 if (result_eq && src_str == NULL)
4594 {
4595 src_str = c_getstr (arg1);
4596 if (src_str != NULL)
4597 std::swap (arg1_rtx, arg2_rtx);
4598 }
4599
4600 /* If SRC is a string constant and block move would be done
4601 by pieces, we can avoid loading the string from memory
4602 and only stored the computed constants. */
4603 if (src_str
4604 && CONST_INT_P (len_rtx)
4605 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4606 constfn = builtin_memcpy_read_str;
4607
4608 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4609 TREE_TYPE (len), target,
4610 result_eq, constfn,
4611 CONST_CAST (char *, src_str));
4612
4613 if (result)
4614 {
4615 /* Return the value in the proper mode for this function. */
4616 if (GET_MODE (result) == mode)
4617 return result;
4618
4619 if (target != 0)
4620 {
4621 convert_move (target, result, 0);
4622 return target;
4623 }
4624
4625 return convert_to_mode (mode, result, 0);
4626 }
4627
4628 return NULL_RTX;
4629 }
4630
4631 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4632 if we failed the caller should emit a normal call, otherwise try to get
4633 the result in TARGET, if convenient. */
4634
4635 static rtx
4636 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4637 {
4638 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4639 return NULL_RTX;
4640
4641 /* Due to the performance benefit, always inline the calls first. */
4642 rtx result = NULL_RTX;
4643 result = inline_expand_builtin_string_cmp (exp, target);
4644 if (result)
4645 return result;
4646
4647 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4648 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4649 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4650 return NULL_RTX;
4651
4652 tree arg1 = CALL_EXPR_ARG (exp, 0);
4653 tree arg2 = CALL_EXPR_ARG (exp, 1);
4654
4655 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4656 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4657
4658 /* If we don't have POINTER_TYPE, call the function. */
4659 if (arg1_align == 0 || arg2_align == 0)
4660 return NULL_RTX;
4661
4662 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4663 arg1 = builtin_save_expr (arg1);
4664 arg2 = builtin_save_expr (arg2);
4665
4666 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4667 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4668
4669 /* Try to call cmpstrsi. */
4670 if (cmpstr_icode != CODE_FOR_nothing)
4671 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4672 MIN (arg1_align, arg2_align));
4673
4674 /* Try to determine at least one length and call cmpstrnsi. */
4675 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4676 {
4677 tree len;
4678 rtx arg3_rtx;
4679
4680 tree len1 = c_strlen (arg1, 1);
4681 tree len2 = c_strlen (arg2, 1);
4682
4683 if (len1)
4684 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4685 if (len2)
4686 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4687
4688 /* If we don't have a constant length for the first, use the length
4689 of the second, if we know it. We don't require a constant for
4690 this case; some cost analysis could be done if both are available
4691 but neither is constant. For now, assume they're equally cheap,
4692 unless one has side effects. If both strings have constant lengths,
4693 use the smaller. */
4694
4695 if (!len1)
4696 len = len2;
4697 else if (!len2)
4698 len = len1;
4699 else if (TREE_SIDE_EFFECTS (len1))
4700 len = len2;
4701 else if (TREE_SIDE_EFFECTS (len2))
4702 len = len1;
4703 else if (TREE_CODE (len1) != INTEGER_CST)
4704 len = len2;
4705 else if (TREE_CODE (len2) != INTEGER_CST)
4706 len = len1;
4707 else if (tree_int_cst_lt (len1, len2))
4708 len = len1;
4709 else
4710 len = len2;
4711
4712 /* If both arguments have side effects, we cannot optimize. */
4713 if (len && !TREE_SIDE_EFFECTS (len))
4714 {
4715 arg3_rtx = expand_normal (len);
4716 result = expand_cmpstrn_or_cmpmem
4717 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4718 arg3_rtx, MIN (arg1_align, arg2_align));
4719 }
4720 }
4721
4722 tree fndecl = get_callee_fndecl (exp);
4723 if (result)
4724 {
4725 /* Check to see if the argument was declared attribute nonstring
4726 and if so, issue a warning since at this point it's not known
4727 to be nul-terminated. */
4728 maybe_warn_nonstring_arg (fndecl, exp);
4729
4730 /* Return the value in the proper mode for this function. */
4731 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4732 if (GET_MODE (result) == mode)
4733 return result;
4734 if (target == 0)
4735 return convert_to_mode (mode, result, 0);
4736 convert_move (target, result, 0);
4737 return target;
4738 }
4739
4740 /* Expand the library call ourselves using a stabilized argument
4741 list to avoid re-evaluating the function's arguments twice. */
4742 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4743 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4744 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4745 return expand_call (fn, target, target == const0_rtx);
4746 }
4747
4748 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4749 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4750 the result in TARGET, if convenient. */
4751
4752 static rtx
4753 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4754 ATTRIBUTE_UNUSED machine_mode mode)
4755 {
4756 if (!validate_arglist (exp,
4757 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4758 return NULL_RTX;
4759
4760 /* Due to the performance benefit, always inline the calls first. */
4761 rtx result = NULL_RTX;
4762 result = inline_expand_builtin_string_cmp (exp, target);
4763 if (result)
4764 return result;
4765
4766 /* If c_strlen can determine an expression for one of the string
4767 lengths, and it doesn't have side effects, then emit cmpstrnsi
4768 using length MIN(strlen(string)+1, arg3). */
4769 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4770 if (cmpstrn_icode == CODE_FOR_nothing)
4771 return NULL_RTX;
4772
4773 tree len;
4774
4775 tree arg1 = CALL_EXPR_ARG (exp, 0);
4776 tree arg2 = CALL_EXPR_ARG (exp, 1);
4777 tree arg3 = CALL_EXPR_ARG (exp, 2);
4778
4779 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4780 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4781
4782 tree len1 = c_strlen (arg1, 1);
4783 tree len2 = c_strlen (arg2, 1);
4784
4785 location_t loc = EXPR_LOCATION (exp);
4786
4787 if (len1)
4788 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4789 if (len2)
4790 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4791
4792 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4793
4794 /* If we don't have a constant length for the first, use the length
4795 of the second, if we know it. If neither string is constant length,
4796 use the given length argument. We don't require a constant for
4797 this case; some cost analysis could be done if both are available
4798 but neither is constant. For now, assume they're equally cheap,
4799 unless one has side effects. If both strings have constant lengths,
4800 use the smaller. */
4801
4802 if (!len1 && !len2)
4803 len = len3;
4804 else if (!len1)
4805 len = len2;
4806 else if (!len2)
4807 len = len1;
4808 else if (TREE_SIDE_EFFECTS (len1))
4809 len = len2;
4810 else if (TREE_SIDE_EFFECTS (len2))
4811 len = len1;
4812 else if (TREE_CODE (len1) != INTEGER_CST)
4813 len = len2;
4814 else if (TREE_CODE (len2) != INTEGER_CST)
4815 len = len1;
4816 else if (tree_int_cst_lt (len1, len2))
4817 len = len1;
4818 else
4819 len = len2;
4820
4821 /* If we are not using the given length, we must incorporate it here.
4822 The actual new length parameter will be MIN(len,arg3) in this case. */
4823 if (len != len3)
4824 {
4825 len = fold_convert_loc (loc, sizetype, len);
4826 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4827 }
4828 rtx arg1_rtx = get_memory_rtx (arg1, len);
4829 rtx arg2_rtx = get_memory_rtx (arg2, len);
4830 rtx arg3_rtx = expand_normal (len);
4831 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4832 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4833 MIN (arg1_align, arg2_align));
4834
4835 tree fndecl = get_callee_fndecl (exp);
4836 if (result)
4837 {
4838 /* Check to see if the argument was declared attribute nonstring
4839 and if so, issue a warning since at this point it's not known
4840 to be nul-terminated. */
4841 maybe_warn_nonstring_arg (fndecl, exp);
4842
4843 /* Return the value in the proper mode for this function. */
4844 mode = TYPE_MODE (TREE_TYPE (exp));
4845 if (GET_MODE (result) == mode)
4846 return result;
4847 if (target == 0)
4848 return convert_to_mode (mode, result, 0);
4849 convert_move (target, result, 0);
4850 return target;
4851 }
4852
4853 /* Expand the library call ourselves using a stabilized argument
4854 list to avoid re-evaluating the function's arguments twice. */
4855 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4856 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4857 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4858 return expand_call (fn, target, target == const0_rtx);
4859 }
4860
4861 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4862 if that's convenient. */
4863
4864 rtx
4865 expand_builtin_saveregs (void)
4866 {
4867 rtx val;
4868 rtx_insn *seq;
4869
4870 /* Don't do __builtin_saveregs more than once in a function.
4871 Save the result of the first call and reuse it. */
4872 if (saveregs_value != 0)
4873 return saveregs_value;
4874
4875 /* When this function is called, it means that registers must be
4876 saved on entry to this function. So we migrate the call to the
4877 first insn of this function. */
4878
4879 start_sequence ();
4880
4881 /* Do whatever the machine needs done in this case. */
4882 val = targetm.calls.expand_builtin_saveregs ();
4883
4884 seq = get_insns ();
4885 end_sequence ();
4886
4887 saveregs_value = val;
4888
4889 /* Put the insns after the NOTE that starts the function. If this
4890 is inside a start_sequence, make the outer-level insn chain current, so
4891 the code is placed at the start of the function. */
4892 push_topmost_sequence ();
4893 emit_insn_after (seq, entry_of_function ());
4894 pop_topmost_sequence ();
4895
4896 return val;
4897 }
4898
4899 /* Expand a call to __builtin_next_arg. */
4900
4901 static rtx
4902 expand_builtin_next_arg (void)
4903 {
4904 /* Checking arguments is already done in fold_builtin_next_arg
4905 that must be called before this function. */
4906 return expand_binop (ptr_mode, add_optab,
4907 crtl->args.internal_arg_pointer,
4908 crtl->args.arg_offset_rtx,
4909 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4910 }
4911
4912 /* Make it easier for the backends by protecting the valist argument
4913 from multiple evaluations. */
4914
4915 static tree
4916 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4917 {
4918 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4919
4920 /* The current way of determining the type of valist is completely
4921 bogus. We should have the information on the va builtin instead. */
4922 if (!vatype)
4923 vatype = targetm.fn_abi_va_list (cfun->decl);
4924
4925 if (TREE_CODE (vatype) == ARRAY_TYPE)
4926 {
4927 if (TREE_SIDE_EFFECTS (valist))
4928 valist = save_expr (valist);
4929
4930 /* For this case, the backends will be expecting a pointer to
4931 vatype, but it's possible we've actually been given an array
4932 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4933 So fix it. */
4934 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4935 {
4936 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4937 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4938 }
4939 }
4940 else
4941 {
4942 tree pt = build_pointer_type (vatype);
4943
4944 if (! needs_lvalue)
4945 {
4946 if (! TREE_SIDE_EFFECTS (valist))
4947 return valist;
4948
4949 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4950 TREE_SIDE_EFFECTS (valist) = 1;
4951 }
4952
4953 if (TREE_SIDE_EFFECTS (valist))
4954 valist = save_expr (valist);
4955 valist = fold_build2_loc (loc, MEM_REF,
4956 vatype, valist, build_int_cst (pt, 0));
4957 }
4958
4959 return valist;
4960 }
4961
4962 /* The "standard" definition of va_list is void*. */
4963
4964 tree
4965 std_build_builtin_va_list (void)
4966 {
4967 return ptr_type_node;
4968 }
4969
4970 /* The "standard" abi va_list is va_list_type_node. */
4971
4972 tree
4973 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4974 {
4975 return va_list_type_node;
4976 }
4977
4978 /* The "standard" type of va_list is va_list_type_node. */
4979
4980 tree
4981 std_canonical_va_list_type (tree type)
4982 {
4983 tree wtype, htype;
4984
4985 wtype = va_list_type_node;
4986 htype = type;
4987
4988 if (TREE_CODE (wtype) == ARRAY_TYPE)
4989 {
4990 /* If va_list is an array type, the argument may have decayed
4991 to a pointer type, e.g. by being passed to another function.
4992 In that case, unwrap both types so that we can compare the
4993 underlying records. */
4994 if (TREE_CODE (htype) == ARRAY_TYPE
4995 || POINTER_TYPE_P (htype))
4996 {
4997 wtype = TREE_TYPE (wtype);
4998 htype = TREE_TYPE (htype);
4999 }
5000 }
5001 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5002 return va_list_type_node;
5003
5004 return NULL_TREE;
5005 }
5006
5007 /* The "standard" implementation of va_start: just assign `nextarg' to
5008 the variable. */
5009
5010 void
5011 std_expand_builtin_va_start (tree valist, rtx nextarg)
5012 {
5013 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5014 convert_move (va_r, nextarg, 0);
5015 }
5016
5017 /* Expand EXP, a call to __builtin_va_start. */
5018
5019 static rtx
5020 expand_builtin_va_start (tree exp)
5021 {
5022 rtx nextarg;
5023 tree valist;
5024 location_t loc = EXPR_LOCATION (exp);
5025
5026 if (call_expr_nargs (exp) < 2)
5027 {
5028 error_at (loc, "too few arguments to function %<va_start%>");
5029 return const0_rtx;
5030 }
5031
5032 if (fold_builtin_next_arg (exp, true))
5033 return const0_rtx;
5034
5035 nextarg = expand_builtin_next_arg ();
5036 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5037
5038 if (targetm.expand_builtin_va_start)
5039 targetm.expand_builtin_va_start (valist, nextarg);
5040 else
5041 std_expand_builtin_va_start (valist, nextarg);
5042
5043 return const0_rtx;
5044 }
5045
5046 /* Expand EXP, a call to __builtin_va_end. */
5047
5048 static rtx
5049 expand_builtin_va_end (tree exp)
5050 {
5051 tree valist = CALL_EXPR_ARG (exp, 0);
5052
5053 /* Evaluate for side effects, if needed. I hate macros that don't
5054 do that. */
5055 if (TREE_SIDE_EFFECTS (valist))
5056 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5057
5058 return const0_rtx;
5059 }
5060
5061 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5062 builtin rather than just as an assignment in stdarg.h because of the
5063 nastiness of array-type va_list types. */
5064
5065 static rtx
5066 expand_builtin_va_copy (tree exp)
5067 {
5068 tree dst, src, t;
5069 location_t loc = EXPR_LOCATION (exp);
5070
5071 dst = CALL_EXPR_ARG (exp, 0);
5072 src = CALL_EXPR_ARG (exp, 1);
5073
5074 dst = stabilize_va_list_loc (loc, dst, 1);
5075 src = stabilize_va_list_loc (loc, src, 0);
5076
5077 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5078
5079 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5080 {
5081 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5082 TREE_SIDE_EFFECTS (t) = 1;
5083 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5084 }
5085 else
5086 {
5087 rtx dstb, srcb, size;
5088
5089 /* Evaluate to pointers. */
5090 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5091 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5092 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5093 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5094
5095 dstb = convert_memory_address (Pmode, dstb);
5096 srcb = convert_memory_address (Pmode, srcb);
5097
5098 /* "Dereference" to BLKmode memories. */
5099 dstb = gen_rtx_MEM (BLKmode, dstb);
5100 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5101 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5102 srcb = gen_rtx_MEM (BLKmode, srcb);
5103 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5104 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5105
5106 /* Copy. */
5107 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5108 }
5109
5110 return const0_rtx;
5111 }
5112
5113 /* Expand a call to one of the builtin functions __builtin_frame_address or
5114 __builtin_return_address. */
5115
5116 static rtx
5117 expand_builtin_frame_address (tree fndecl, tree exp)
5118 {
5119 /* The argument must be a nonnegative integer constant.
5120 It counts the number of frames to scan up the stack.
5121 The value is either the frame pointer value or the return
5122 address saved in that frame. */
5123 if (call_expr_nargs (exp) == 0)
5124 /* Warning about missing arg was already issued. */
5125 return const0_rtx;
5126 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5127 {
5128 error ("invalid argument to %qD", fndecl);
5129 return const0_rtx;
5130 }
5131 else
5132 {
5133 /* Number of frames to scan up the stack. */
5134 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5135
5136 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5137
5138 /* Some ports cannot access arbitrary stack frames. */
5139 if (tem == NULL)
5140 {
5141 warning (0, "unsupported argument to %qD", fndecl);
5142 return const0_rtx;
5143 }
5144
5145 if (count)
5146 {
5147 /* Warn since no effort is made to ensure that any frame
5148 beyond the current one exists or can be safely reached. */
5149 warning (OPT_Wframe_address, "calling %qD with "
5150 "a nonzero argument is unsafe", fndecl);
5151 }
5152
5153 /* For __builtin_frame_address, return what we've got. */
5154 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5155 return tem;
5156
5157 if (!REG_P (tem)
5158 && ! CONSTANT_P (tem))
5159 tem = copy_addr_to_reg (tem);
5160 return tem;
5161 }
5162 }
5163
5164 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5165 failed and the caller should emit a normal call. */
5166
5167 static rtx
5168 expand_builtin_alloca (tree exp)
5169 {
5170 rtx op0;
5171 rtx result;
5172 unsigned int align;
5173 tree fndecl = get_callee_fndecl (exp);
5174 HOST_WIDE_INT max_size;
5175 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5176 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5177 bool valid_arglist
5178 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5179 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5180 VOID_TYPE)
5181 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5182 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5183 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5184
5185 if (!valid_arglist)
5186 return NULL_RTX;
5187
5188 if ((alloca_for_var
5189 && warn_vla_limit >= HOST_WIDE_INT_MAX
5190 && warn_alloc_size_limit < warn_vla_limit)
5191 || (!alloca_for_var
5192 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5193 && warn_alloc_size_limit < warn_alloca_limit
5194 ))
5195 {
5196 /* -Walloca-larger-than and -Wvla-larger-than settings of
5197 less than HOST_WIDE_INT_MAX override the more general
5198 -Walloc-size-larger-than so unless either of the former
5199 options is smaller than the last one (wchich would imply
5200 that the call was already checked), check the alloca
5201 arguments for overflow. */
5202 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5203 int idx[] = { 0, -1 };
5204 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5205 }
5206
5207 /* Compute the argument. */
5208 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5209
5210 /* Compute the alignment. */
5211 align = (fcode == BUILT_IN_ALLOCA
5212 ? BIGGEST_ALIGNMENT
5213 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5214
5215 /* Compute the maximum size. */
5216 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5217 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5218 : -1);
5219
5220 /* Allocate the desired space. If the allocation stems from the declaration
5221 of a variable-sized object, it cannot accumulate. */
5222 result
5223 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5224 result = convert_memory_address (ptr_mode, result);
5225
5226 return result;
5227 }
5228
5229 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5230 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5231 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5232 handle_builtin_stack_restore function. */
5233
5234 static rtx
5235 expand_asan_emit_allocas_unpoison (tree exp)
5236 {
5237 tree arg0 = CALL_EXPR_ARG (exp, 0);
5238 tree arg1 = CALL_EXPR_ARG (exp, 1);
5239 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5240 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5241 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5242 stack_pointer_rtx, NULL_RTX, 0,
5243 OPTAB_LIB_WIDEN);
5244 off = convert_modes (ptr_mode, Pmode, off, 0);
5245 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5246 OPTAB_LIB_WIDEN);
5247 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5248 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5249 top, ptr_mode, bot, ptr_mode);
5250 return ret;
5251 }
5252
5253 /* Expand a call to bswap builtin in EXP.
5254 Return NULL_RTX if a normal call should be emitted rather than expanding the
5255 function in-line. If convenient, the result should be placed in TARGET.
5256 SUBTARGET may be used as the target for computing one of EXP's operands. */
5257
5258 static rtx
5259 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5260 rtx subtarget)
5261 {
5262 tree arg;
5263 rtx op0;
5264
5265 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5266 return NULL_RTX;
5267
5268 arg = CALL_EXPR_ARG (exp, 0);
5269 op0 = expand_expr (arg,
5270 subtarget && GET_MODE (subtarget) == target_mode
5271 ? subtarget : NULL_RTX,
5272 target_mode, EXPAND_NORMAL);
5273 if (GET_MODE (op0) != target_mode)
5274 op0 = convert_to_mode (target_mode, op0, 1);
5275
5276 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5277
5278 gcc_assert (target);
5279
5280 return convert_to_mode (target_mode, target, 1);
5281 }
5282
5283 /* Expand a call to a unary builtin in EXP.
5284 Return NULL_RTX if a normal call should be emitted rather than expanding the
5285 function in-line. If convenient, the result should be placed in TARGET.
5286 SUBTARGET may be used as the target for computing one of EXP's operands. */
5287
5288 static rtx
5289 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5290 rtx subtarget, optab op_optab)
5291 {
5292 rtx op0;
5293
5294 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5295 return NULL_RTX;
5296
5297 /* Compute the argument. */
5298 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5299 (subtarget
5300 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5301 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5302 VOIDmode, EXPAND_NORMAL);
5303 /* Compute op, into TARGET if possible.
5304 Set TARGET to wherever the result comes back. */
5305 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5306 op_optab, op0, target, op_optab != clrsb_optab);
5307 gcc_assert (target);
5308
5309 return convert_to_mode (target_mode, target, 0);
5310 }
5311
5312 /* Expand a call to __builtin_expect. We just return our argument
5313 as the builtin_expect semantic should've been already executed by
5314 tree branch prediction pass. */
5315
5316 static rtx
5317 expand_builtin_expect (tree exp, rtx target)
5318 {
5319 tree arg;
5320
5321 if (call_expr_nargs (exp) < 2)
5322 return const0_rtx;
5323 arg = CALL_EXPR_ARG (exp, 0);
5324
5325 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5326 /* When guessing was done, the hints should be already stripped away. */
5327 gcc_assert (!flag_guess_branch_prob
5328 || optimize == 0 || seen_error ());
5329 return target;
5330 }
5331
5332 /* Expand a call to __builtin_expect_with_probability. We just return our
5333 argument as the builtin_expect semantic should've been already executed by
5334 tree branch prediction pass. */
5335
5336 static rtx
5337 expand_builtin_expect_with_probability (tree exp, rtx target)
5338 {
5339 tree arg;
5340
5341 if (call_expr_nargs (exp) < 3)
5342 return const0_rtx;
5343 arg = CALL_EXPR_ARG (exp, 0);
5344
5345 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5346 /* When guessing was done, the hints should be already stripped away. */
5347 gcc_assert (!flag_guess_branch_prob
5348 || optimize == 0 || seen_error ());
5349 return target;
5350 }
5351
5352
5353 /* Expand a call to __builtin_assume_aligned. We just return our first
5354 argument as the builtin_assume_aligned semantic should've been already
5355 executed by CCP. */
5356
5357 static rtx
5358 expand_builtin_assume_aligned (tree exp, rtx target)
5359 {
5360 if (call_expr_nargs (exp) < 2)
5361 return const0_rtx;
5362 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5363 EXPAND_NORMAL);
5364 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5365 && (call_expr_nargs (exp) < 3
5366 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5367 return target;
5368 }
5369
5370 void
5371 expand_builtin_trap (void)
5372 {
5373 if (targetm.have_trap ())
5374 {
5375 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5376 /* For trap insns when not accumulating outgoing args force
5377 REG_ARGS_SIZE note to prevent crossjumping of calls with
5378 different args sizes. */
5379 if (!ACCUMULATE_OUTGOING_ARGS)
5380 add_args_size_note (insn, stack_pointer_delta);
5381 }
5382 else
5383 {
5384 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5385 tree call_expr = build_call_expr (fn, 0);
5386 expand_call (call_expr, NULL_RTX, false);
5387 }
5388
5389 emit_barrier ();
5390 }
5391
5392 /* Expand a call to __builtin_unreachable. We do nothing except emit
5393 a barrier saying that control flow will not pass here.
5394
5395 It is the responsibility of the program being compiled to ensure
5396 that control flow does never reach __builtin_unreachable. */
5397 static void
5398 expand_builtin_unreachable (void)
5399 {
5400 emit_barrier ();
5401 }
5402
5403 /* Expand EXP, a call to fabs, fabsf or fabsl.
5404 Return NULL_RTX if a normal call should be emitted rather than expanding
5405 the function inline. If convenient, the result should be placed
5406 in TARGET. SUBTARGET may be used as the target for computing
5407 the operand. */
5408
5409 static rtx
5410 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5411 {
5412 machine_mode mode;
5413 tree arg;
5414 rtx op0;
5415
5416 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5417 return NULL_RTX;
5418
5419 arg = CALL_EXPR_ARG (exp, 0);
5420 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5421 mode = TYPE_MODE (TREE_TYPE (arg));
5422 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5423 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5424 }
5425
5426 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5427 Return NULL is a normal call should be emitted rather than expanding the
5428 function inline. If convenient, the result should be placed in TARGET.
5429 SUBTARGET may be used as the target for computing the operand. */
5430
5431 static rtx
5432 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5433 {
5434 rtx op0, op1;
5435 tree arg;
5436
5437 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5438 return NULL_RTX;
5439
5440 arg = CALL_EXPR_ARG (exp, 0);
5441 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5442
5443 arg = CALL_EXPR_ARG (exp, 1);
5444 op1 = expand_normal (arg);
5445
5446 return expand_copysign (op0, op1, target);
5447 }
5448
5449 /* Expand a call to __builtin___clear_cache. */
5450
5451 static rtx
5452 expand_builtin___clear_cache (tree exp)
5453 {
5454 if (!targetm.code_for_clear_cache)
5455 {
5456 #ifdef CLEAR_INSN_CACHE
5457 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5458 does something. Just do the default expansion to a call to
5459 __clear_cache(). */
5460 return NULL_RTX;
5461 #else
5462 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5463 does nothing. There is no need to call it. Do nothing. */
5464 return const0_rtx;
5465 #endif /* CLEAR_INSN_CACHE */
5466 }
5467
5468 /* We have a "clear_cache" insn, and it will handle everything. */
5469 tree begin, end;
5470 rtx begin_rtx, end_rtx;
5471
5472 /* We must not expand to a library call. If we did, any
5473 fallback library function in libgcc that might contain a call to
5474 __builtin___clear_cache() would recurse infinitely. */
5475 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5476 {
5477 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5478 return const0_rtx;
5479 }
5480
5481 if (targetm.have_clear_cache ())
5482 {
5483 struct expand_operand ops[2];
5484
5485 begin = CALL_EXPR_ARG (exp, 0);
5486 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5487
5488 end = CALL_EXPR_ARG (exp, 1);
5489 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5490
5491 create_address_operand (&ops[0], begin_rtx);
5492 create_address_operand (&ops[1], end_rtx);
5493 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5494 return const0_rtx;
5495 }
5496 return const0_rtx;
5497 }
5498
5499 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5500
5501 static rtx
5502 round_trampoline_addr (rtx tramp)
5503 {
5504 rtx temp, addend, mask;
5505
5506 /* If we don't need too much alignment, we'll have been guaranteed
5507 proper alignment by get_trampoline_type. */
5508 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5509 return tramp;
5510
5511 /* Round address up to desired boundary. */
5512 temp = gen_reg_rtx (Pmode);
5513 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5514 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5515
5516 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5517 temp, 0, OPTAB_LIB_WIDEN);
5518 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5519 temp, 0, OPTAB_LIB_WIDEN);
5520
5521 return tramp;
5522 }
5523
5524 static rtx
5525 expand_builtin_init_trampoline (tree exp, bool onstack)
5526 {
5527 tree t_tramp, t_func, t_chain;
5528 rtx m_tramp, r_tramp, r_chain, tmp;
5529
5530 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5531 POINTER_TYPE, VOID_TYPE))
5532 return NULL_RTX;
5533
5534 t_tramp = CALL_EXPR_ARG (exp, 0);
5535 t_func = CALL_EXPR_ARG (exp, 1);
5536 t_chain = CALL_EXPR_ARG (exp, 2);
5537
5538 r_tramp = expand_normal (t_tramp);
5539 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5540 MEM_NOTRAP_P (m_tramp) = 1;
5541
5542 /* If ONSTACK, the TRAMP argument should be the address of a field
5543 within the local function's FRAME decl. Either way, let's see if
5544 we can fill in the MEM_ATTRs for this memory. */
5545 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5546 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5547
5548 /* Creator of a heap trampoline is responsible for making sure the
5549 address is aligned to at least STACK_BOUNDARY. Normally malloc
5550 will ensure this anyhow. */
5551 tmp = round_trampoline_addr (r_tramp);
5552 if (tmp != r_tramp)
5553 {
5554 m_tramp = change_address (m_tramp, BLKmode, tmp);
5555 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5556 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5557 }
5558
5559 /* The FUNC argument should be the address of the nested function.
5560 Extract the actual function decl to pass to the hook. */
5561 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5562 t_func = TREE_OPERAND (t_func, 0);
5563 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5564
5565 r_chain = expand_normal (t_chain);
5566
5567 /* Generate insns to initialize the trampoline. */
5568 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5569
5570 if (onstack)
5571 {
5572 trampolines_created = 1;
5573
5574 if (targetm.calls.custom_function_descriptors != 0)
5575 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5576 "trampoline generated for nested function %qD", t_func);
5577 }
5578
5579 return const0_rtx;
5580 }
5581
5582 static rtx
5583 expand_builtin_adjust_trampoline (tree exp)
5584 {
5585 rtx tramp;
5586
5587 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5588 return NULL_RTX;
5589
5590 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5591 tramp = round_trampoline_addr (tramp);
5592 if (targetm.calls.trampoline_adjust_address)
5593 tramp = targetm.calls.trampoline_adjust_address (tramp);
5594
5595 return tramp;
5596 }
5597
5598 /* Expand a call to the builtin descriptor initialization routine.
5599 A descriptor is made up of a couple of pointers to the static
5600 chain and the code entry in this order. */
5601
5602 static rtx
5603 expand_builtin_init_descriptor (tree exp)
5604 {
5605 tree t_descr, t_func, t_chain;
5606 rtx m_descr, r_descr, r_func, r_chain;
5607
5608 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5609 VOID_TYPE))
5610 return NULL_RTX;
5611
5612 t_descr = CALL_EXPR_ARG (exp, 0);
5613 t_func = CALL_EXPR_ARG (exp, 1);
5614 t_chain = CALL_EXPR_ARG (exp, 2);
5615
5616 r_descr = expand_normal (t_descr);
5617 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5618 MEM_NOTRAP_P (m_descr) = 1;
5619
5620 r_func = expand_normal (t_func);
5621 r_chain = expand_normal (t_chain);
5622
5623 /* Generate insns to initialize the descriptor. */
5624 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5625 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5626 POINTER_SIZE / BITS_PER_UNIT), r_func);
5627
5628 return const0_rtx;
5629 }
5630
5631 /* Expand a call to the builtin descriptor adjustment routine. */
5632
5633 static rtx
5634 expand_builtin_adjust_descriptor (tree exp)
5635 {
5636 rtx tramp;
5637
5638 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5639 return NULL_RTX;
5640
5641 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5642
5643 /* Unalign the descriptor to allow runtime identification. */
5644 tramp = plus_constant (ptr_mode, tramp,
5645 targetm.calls.custom_function_descriptors);
5646
5647 return force_operand (tramp, NULL_RTX);
5648 }
5649
5650 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5651 function. The function first checks whether the back end provides
5652 an insn to implement signbit for the respective mode. If not, it
5653 checks whether the floating point format of the value is such that
5654 the sign bit can be extracted. If that is not the case, error out.
5655 EXP is the expression that is a call to the builtin function; if
5656 convenient, the result should be placed in TARGET. */
5657 static rtx
5658 expand_builtin_signbit (tree exp, rtx target)
5659 {
5660 const struct real_format *fmt;
5661 scalar_float_mode fmode;
5662 scalar_int_mode rmode, imode;
5663 tree arg;
5664 int word, bitpos;
5665 enum insn_code icode;
5666 rtx temp;
5667 location_t loc = EXPR_LOCATION (exp);
5668
5669 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5670 return NULL_RTX;
5671
5672 arg = CALL_EXPR_ARG (exp, 0);
5673 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5674 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5675 fmt = REAL_MODE_FORMAT (fmode);
5676
5677 arg = builtin_save_expr (arg);
5678
5679 /* Expand the argument yielding a RTX expression. */
5680 temp = expand_normal (arg);
5681
5682 /* Check if the back end provides an insn that handles signbit for the
5683 argument's mode. */
5684 icode = optab_handler (signbit_optab, fmode);
5685 if (icode != CODE_FOR_nothing)
5686 {
5687 rtx_insn *last = get_last_insn ();
5688 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5689 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5690 return target;
5691 delete_insns_since (last);
5692 }
5693
5694 /* For floating point formats without a sign bit, implement signbit
5695 as "ARG < 0.0". */
5696 bitpos = fmt->signbit_ro;
5697 if (bitpos < 0)
5698 {
5699 /* But we can't do this if the format supports signed zero. */
5700 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5701
5702 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5703 build_real (TREE_TYPE (arg), dconst0));
5704 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5705 }
5706
5707 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5708 {
5709 imode = int_mode_for_mode (fmode).require ();
5710 temp = gen_lowpart (imode, temp);
5711 }
5712 else
5713 {
5714 imode = word_mode;
5715 /* Handle targets with different FP word orders. */
5716 if (FLOAT_WORDS_BIG_ENDIAN)
5717 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5718 else
5719 word = bitpos / BITS_PER_WORD;
5720 temp = operand_subword_force (temp, word, fmode);
5721 bitpos = bitpos % BITS_PER_WORD;
5722 }
5723
5724 /* Force the intermediate word_mode (or narrower) result into a
5725 register. This avoids attempting to create paradoxical SUBREGs
5726 of floating point modes below. */
5727 temp = force_reg (imode, temp);
5728
5729 /* If the bitpos is within the "result mode" lowpart, the operation
5730 can be implement with a single bitwise AND. Otherwise, we need
5731 a right shift and an AND. */
5732
5733 if (bitpos < GET_MODE_BITSIZE (rmode))
5734 {
5735 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5736
5737 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5738 temp = gen_lowpart (rmode, temp);
5739 temp = expand_binop (rmode, and_optab, temp,
5740 immed_wide_int_const (mask, rmode),
5741 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5742 }
5743 else
5744 {
5745 /* Perform a logical right shift to place the signbit in the least
5746 significant bit, then truncate the result to the desired mode
5747 and mask just this bit. */
5748 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5749 temp = gen_lowpart (rmode, temp);
5750 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5751 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5752 }
5753
5754 return temp;
5755 }
5756
5757 /* Expand fork or exec calls. TARGET is the desired target of the
5758 call. EXP is the call. FN is the
5759 identificator of the actual function. IGNORE is nonzero if the
5760 value is to be ignored. */
5761
5762 static rtx
5763 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5764 {
5765 tree id, decl;
5766 tree call;
5767
5768 /* If we are not profiling, just call the function. */
5769 if (!profile_arc_flag)
5770 return NULL_RTX;
5771
5772 /* Otherwise call the wrapper. This should be equivalent for the rest of
5773 compiler, so the code does not diverge, and the wrapper may run the
5774 code necessary for keeping the profiling sane. */
5775
5776 switch (DECL_FUNCTION_CODE (fn))
5777 {
5778 case BUILT_IN_FORK:
5779 id = get_identifier ("__gcov_fork");
5780 break;
5781
5782 case BUILT_IN_EXECL:
5783 id = get_identifier ("__gcov_execl");
5784 break;
5785
5786 case BUILT_IN_EXECV:
5787 id = get_identifier ("__gcov_execv");
5788 break;
5789
5790 case BUILT_IN_EXECLP:
5791 id = get_identifier ("__gcov_execlp");
5792 break;
5793
5794 case BUILT_IN_EXECLE:
5795 id = get_identifier ("__gcov_execle");
5796 break;
5797
5798 case BUILT_IN_EXECVP:
5799 id = get_identifier ("__gcov_execvp");
5800 break;
5801
5802 case BUILT_IN_EXECVE:
5803 id = get_identifier ("__gcov_execve");
5804 break;
5805
5806 default:
5807 gcc_unreachable ();
5808 }
5809
5810 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5811 FUNCTION_DECL, id, TREE_TYPE (fn));
5812 DECL_EXTERNAL (decl) = 1;
5813 TREE_PUBLIC (decl) = 1;
5814 DECL_ARTIFICIAL (decl) = 1;
5815 TREE_NOTHROW (decl) = 1;
5816 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5817 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5818 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5819 return expand_call (call, target, ignore);
5820 }
5821
5822
5823 \f
5824 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5825 the pointer in these functions is void*, the tree optimizers may remove
5826 casts. The mode computed in expand_builtin isn't reliable either, due
5827 to __sync_bool_compare_and_swap.
5828
5829 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5830 group of builtins. This gives us log2 of the mode size. */
5831
5832 static inline machine_mode
5833 get_builtin_sync_mode (int fcode_diff)
5834 {
5835 /* The size is not negotiable, so ask not to get BLKmode in return
5836 if the target indicates that a smaller size would be better. */
5837 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5838 }
5839
5840 /* Expand the memory expression LOC and return the appropriate memory operand
5841 for the builtin_sync operations. */
5842
5843 static rtx
5844 get_builtin_sync_mem (tree loc, machine_mode mode)
5845 {
5846 rtx addr, mem;
5847 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5848 ? TREE_TYPE (TREE_TYPE (loc))
5849 : TREE_TYPE (loc));
5850 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5851
5852 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5853
5854 /* Note that we explicitly do not want any alias information for this
5855 memory, so that we kill all other live memories. Otherwise we don't
5856 satisfy the full barrier semantics of the intrinsic. */
5857 mem = gen_rtx_MEM (mode, addr);
5858
5859 set_mem_addr_space (mem, addr_space);
5860
5861 mem = validize_mem (mem);
5862
5863 /* The alignment needs to be at least according to that of the mode. */
5864 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5865 get_pointer_alignment (loc)));
5866 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5867 MEM_VOLATILE_P (mem) = 1;
5868
5869 return mem;
5870 }
5871
5872 /* Make sure an argument is in the right mode.
5873 EXP is the tree argument.
5874 MODE is the mode it should be in. */
5875
5876 static rtx
5877 expand_expr_force_mode (tree exp, machine_mode mode)
5878 {
5879 rtx val;
5880 machine_mode old_mode;
5881
5882 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5883 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5884 of CONST_INTs, where we know the old_mode only from the call argument. */
5885
5886 old_mode = GET_MODE (val);
5887 if (old_mode == VOIDmode)
5888 old_mode = TYPE_MODE (TREE_TYPE (exp));
5889 val = convert_modes (mode, old_mode, val, 1);
5890 return val;
5891 }
5892
5893
5894 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5895 EXP is the CALL_EXPR. CODE is the rtx code
5896 that corresponds to the arithmetic or logical operation from the name;
5897 an exception here is that NOT actually means NAND. TARGET is an optional
5898 place for us to store the results; AFTER is true if this is the
5899 fetch_and_xxx form. */
5900
5901 static rtx
5902 expand_builtin_sync_operation (machine_mode mode, tree exp,
5903 enum rtx_code code, bool after,
5904 rtx target)
5905 {
5906 rtx val, mem;
5907 location_t loc = EXPR_LOCATION (exp);
5908
5909 if (code == NOT && warn_sync_nand)
5910 {
5911 tree fndecl = get_callee_fndecl (exp);
5912 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5913
5914 static bool warned_f_a_n, warned_n_a_f;
5915
5916 switch (fcode)
5917 {
5918 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5919 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5920 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5921 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5922 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5923 if (warned_f_a_n)
5924 break;
5925
5926 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5927 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5928 warned_f_a_n = true;
5929 break;
5930
5931 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5932 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5933 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5934 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5935 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5936 if (warned_n_a_f)
5937 break;
5938
5939 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5940 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5941 warned_n_a_f = true;
5942 break;
5943
5944 default:
5945 gcc_unreachable ();
5946 }
5947 }
5948
5949 /* Expand the operands. */
5950 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5951 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5952
5953 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5954 after);
5955 }
5956
5957 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5958 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5959 true if this is the boolean form. TARGET is a place for us to store the
5960 results; this is NOT optional if IS_BOOL is true. */
5961
5962 static rtx
5963 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5964 bool is_bool, rtx target)
5965 {
5966 rtx old_val, new_val, mem;
5967 rtx *pbool, *poval;
5968
5969 /* Expand the operands. */
5970 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5971 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5972 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5973
5974 pbool = poval = NULL;
5975 if (target != const0_rtx)
5976 {
5977 if (is_bool)
5978 pbool = &target;
5979 else
5980 poval = &target;
5981 }
5982 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5983 false, MEMMODEL_SYNC_SEQ_CST,
5984 MEMMODEL_SYNC_SEQ_CST))
5985 return NULL_RTX;
5986
5987 return target;
5988 }
5989
5990 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5991 general form is actually an atomic exchange, and some targets only
5992 support a reduced form with the second argument being a constant 1.
5993 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5994 the results. */
5995
5996 static rtx
5997 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5998 rtx target)
5999 {
6000 rtx val, mem;
6001
6002 /* Expand the operands. */
6003 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6004 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6005
6006 return expand_sync_lock_test_and_set (target, mem, val);
6007 }
6008
6009 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6010
6011 static void
6012 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6013 {
6014 rtx mem;
6015
6016 /* Expand the operands. */
6017 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6018
6019 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6020 }
6021
6022 /* Given an integer representing an ``enum memmodel'', verify its
6023 correctness and return the memory model enum. */
6024
6025 static enum memmodel
6026 get_memmodel (tree exp)
6027 {
6028 rtx op;
6029 unsigned HOST_WIDE_INT val;
6030 source_location loc
6031 = expansion_point_location_if_in_system_header (input_location);
6032
6033 /* If the parameter is not a constant, it's a run time value so we'll just
6034 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6035 if (TREE_CODE (exp) != INTEGER_CST)
6036 return MEMMODEL_SEQ_CST;
6037
6038 op = expand_normal (exp);
6039
6040 val = INTVAL (op);
6041 if (targetm.memmodel_check)
6042 val = targetm.memmodel_check (val);
6043 else if (val & ~MEMMODEL_MASK)
6044 {
6045 warning_at (loc, OPT_Winvalid_memory_model,
6046 "unknown architecture specifier in memory model to builtin");
6047 return MEMMODEL_SEQ_CST;
6048 }
6049
6050 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6051 if (memmodel_base (val) >= MEMMODEL_LAST)
6052 {
6053 warning_at (loc, OPT_Winvalid_memory_model,
6054 "invalid memory model argument to builtin");
6055 return MEMMODEL_SEQ_CST;
6056 }
6057
6058 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6059 be conservative and promote consume to acquire. */
6060 if (val == MEMMODEL_CONSUME)
6061 val = MEMMODEL_ACQUIRE;
6062
6063 return (enum memmodel) val;
6064 }
6065
6066 /* Expand the __atomic_exchange intrinsic:
6067 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6068 EXP is the CALL_EXPR.
6069 TARGET is an optional place for us to store the results. */
6070
6071 static rtx
6072 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6073 {
6074 rtx val, mem;
6075 enum memmodel model;
6076
6077 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6078
6079 if (!flag_inline_atomics)
6080 return NULL_RTX;
6081
6082 /* Expand the operands. */
6083 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6084 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6085
6086 return expand_atomic_exchange (target, mem, val, model);
6087 }
6088
6089 /* Expand the __atomic_compare_exchange intrinsic:
6090 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6091 TYPE desired, BOOL weak,
6092 enum memmodel success,
6093 enum memmodel failure)
6094 EXP is the CALL_EXPR.
6095 TARGET is an optional place for us to store the results. */
6096
6097 static rtx
6098 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6099 rtx target)
6100 {
6101 rtx expect, desired, mem, oldval;
6102 rtx_code_label *label;
6103 enum memmodel success, failure;
6104 tree weak;
6105 bool is_weak;
6106 source_location loc
6107 = expansion_point_location_if_in_system_header (input_location);
6108
6109 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6110 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6111
6112 if (failure > success)
6113 {
6114 warning_at (loc, OPT_Winvalid_memory_model,
6115 "failure memory model cannot be stronger than success "
6116 "memory model for %<__atomic_compare_exchange%>");
6117 success = MEMMODEL_SEQ_CST;
6118 }
6119
6120 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6121 {
6122 warning_at (loc, OPT_Winvalid_memory_model,
6123 "invalid failure memory model for "
6124 "%<__atomic_compare_exchange%>");
6125 failure = MEMMODEL_SEQ_CST;
6126 success = MEMMODEL_SEQ_CST;
6127 }
6128
6129
6130 if (!flag_inline_atomics)
6131 return NULL_RTX;
6132
6133 /* Expand the operands. */
6134 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6135
6136 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6137 expect = convert_memory_address (Pmode, expect);
6138 expect = gen_rtx_MEM (mode, expect);
6139 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6140
6141 weak = CALL_EXPR_ARG (exp, 3);
6142 is_weak = false;
6143 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6144 is_weak = true;
6145
6146 if (target == const0_rtx)
6147 target = NULL;
6148
6149 /* Lest the rtl backend create a race condition with an imporoper store
6150 to memory, always create a new pseudo for OLDVAL. */
6151 oldval = NULL;
6152
6153 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6154 is_weak, success, failure))
6155 return NULL_RTX;
6156
6157 /* Conditionally store back to EXPECT, lest we create a race condition
6158 with an improper store to memory. */
6159 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6160 the normal case where EXPECT is totally private, i.e. a register. At
6161 which point the store can be unconditional. */
6162 label = gen_label_rtx ();
6163 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6164 GET_MODE (target), 1, label);
6165 emit_move_insn (expect, oldval);
6166 emit_label (label);
6167
6168 return target;
6169 }
6170
6171 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6172 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6173 call. The weak parameter must be dropped to match the expected parameter
6174 list and the expected argument changed from value to pointer to memory
6175 slot. */
6176
6177 static void
6178 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6179 {
6180 unsigned int z;
6181 vec<tree, va_gc> *vec;
6182
6183 vec_alloc (vec, 5);
6184 vec->quick_push (gimple_call_arg (call, 0));
6185 tree expected = gimple_call_arg (call, 1);
6186 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6187 TREE_TYPE (expected));
6188 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6189 if (expd != x)
6190 emit_move_insn (x, expd);
6191 tree v = make_tree (TREE_TYPE (expected), x);
6192 vec->quick_push (build1 (ADDR_EXPR,
6193 build_pointer_type (TREE_TYPE (expected)), v));
6194 vec->quick_push (gimple_call_arg (call, 2));
6195 /* Skip the boolean weak parameter. */
6196 for (z = 4; z < 6; z++)
6197 vec->quick_push (gimple_call_arg (call, z));
6198 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6199 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6200 gcc_assert (bytes_log2 < 5);
6201 built_in_function fncode
6202 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6203 + bytes_log2);
6204 tree fndecl = builtin_decl_explicit (fncode);
6205 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6206 fndecl);
6207 tree exp = build_call_vec (boolean_type_node, fn, vec);
6208 tree lhs = gimple_call_lhs (call);
6209 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6210 if (lhs)
6211 {
6212 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6213 if (GET_MODE (boolret) != mode)
6214 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6215 x = force_reg (mode, x);
6216 write_complex_part (target, boolret, true);
6217 write_complex_part (target, x, false);
6218 }
6219 }
6220
6221 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6222
6223 void
6224 expand_ifn_atomic_compare_exchange (gcall *call)
6225 {
6226 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6227 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6228 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6229 rtx expect, desired, mem, oldval, boolret;
6230 enum memmodel success, failure;
6231 tree lhs;
6232 bool is_weak;
6233 source_location loc
6234 = expansion_point_location_if_in_system_header (gimple_location (call));
6235
6236 success = get_memmodel (gimple_call_arg (call, 4));
6237 failure = get_memmodel (gimple_call_arg (call, 5));
6238
6239 if (failure > success)
6240 {
6241 warning_at (loc, OPT_Winvalid_memory_model,
6242 "failure memory model cannot be stronger than success "
6243 "memory model for %<__atomic_compare_exchange%>");
6244 success = MEMMODEL_SEQ_CST;
6245 }
6246
6247 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6248 {
6249 warning_at (loc, OPT_Winvalid_memory_model,
6250 "invalid failure memory model for "
6251 "%<__atomic_compare_exchange%>");
6252 failure = MEMMODEL_SEQ_CST;
6253 success = MEMMODEL_SEQ_CST;
6254 }
6255
6256 if (!flag_inline_atomics)
6257 {
6258 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6259 return;
6260 }
6261
6262 /* Expand the operands. */
6263 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6264
6265 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6266 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6267
6268 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6269
6270 boolret = NULL;
6271 oldval = NULL;
6272
6273 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6274 is_weak, success, failure))
6275 {
6276 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6277 return;
6278 }
6279
6280 lhs = gimple_call_lhs (call);
6281 if (lhs)
6282 {
6283 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6284 if (GET_MODE (boolret) != mode)
6285 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6286 write_complex_part (target, boolret, true);
6287 write_complex_part (target, oldval, false);
6288 }
6289 }
6290
6291 /* Expand the __atomic_load intrinsic:
6292 TYPE __atomic_load (TYPE *object, enum memmodel)
6293 EXP is the CALL_EXPR.
6294 TARGET is an optional place for us to store the results. */
6295
6296 static rtx
6297 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6298 {
6299 rtx mem;
6300 enum memmodel model;
6301
6302 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6303 if (is_mm_release (model) || is_mm_acq_rel (model))
6304 {
6305 source_location loc
6306 = expansion_point_location_if_in_system_header (input_location);
6307 warning_at (loc, OPT_Winvalid_memory_model,
6308 "invalid memory model for %<__atomic_load%>");
6309 model = MEMMODEL_SEQ_CST;
6310 }
6311
6312 if (!flag_inline_atomics)
6313 return NULL_RTX;
6314
6315 /* Expand the operand. */
6316 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6317
6318 return expand_atomic_load (target, mem, model);
6319 }
6320
6321
6322 /* Expand the __atomic_store intrinsic:
6323 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6324 EXP is the CALL_EXPR.
6325 TARGET is an optional place for us to store the results. */
6326
6327 static rtx
6328 expand_builtin_atomic_store (machine_mode mode, tree exp)
6329 {
6330 rtx mem, val;
6331 enum memmodel model;
6332
6333 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6334 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6335 || is_mm_release (model)))
6336 {
6337 source_location loc
6338 = expansion_point_location_if_in_system_header (input_location);
6339 warning_at (loc, OPT_Winvalid_memory_model,
6340 "invalid memory model for %<__atomic_store%>");
6341 model = MEMMODEL_SEQ_CST;
6342 }
6343
6344 if (!flag_inline_atomics)
6345 return NULL_RTX;
6346
6347 /* Expand the operands. */
6348 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6349 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6350
6351 return expand_atomic_store (mem, val, model, false);
6352 }
6353
6354 /* Expand the __atomic_fetch_XXX intrinsic:
6355 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6356 EXP is the CALL_EXPR.
6357 TARGET is an optional place for us to store the results.
6358 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6359 FETCH_AFTER is true if returning the result of the operation.
6360 FETCH_AFTER is false if returning the value before the operation.
6361 IGNORE is true if the result is not used.
6362 EXT_CALL is the correct builtin for an external call if this cannot be
6363 resolved to an instruction sequence. */
6364
6365 static rtx
6366 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6367 enum rtx_code code, bool fetch_after,
6368 bool ignore, enum built_in_function ext_call)
6369 {
6370 rtx val, mem, ret;
6371 enum memmodel model;
6372 tree fndecl;
6373 tree addr;
6374
6375 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6376
6377 /* Expand the operands. */
6378 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6379 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6380
6381 /* Only try generating instructions if inlining is turned on. */
6382 if (flag_inline_atomics)
6383 {
6384 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6385 if (ret)
6386 return ret;
6387 }
6388
6389 /* Return if a different routine isn't needed for the library call. */
6390 if (ext_call == BUILT_IN_NONE)
6391 return NULL_RTX;
6392
6393 /* Change the call to the specified function. */
6394 fndecl = get_callee_fndecl (exp);
6395 addr = CALL_EXPR_FN (exp);
6396 STRIP_NOPS (addr);
6397
6398 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6399 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6400
6401 /* If we will emit code after the call, the call can not be a tail call.
6402 If it is emitted as a tail call, a barrier is emitted after it, and
6403 then all trailing code is removed. */
6404 if (!ignore)
6405 CALL_EXPR_TAILCALL (exp) = 0;
6406
6407 /* Expand the call here so we can emit trailing code. */
6408 ret = expand_call (exp, target, ignore);
6409
6410 /* Replace the original function just in case it matters. */
6411 TREE_OPERAND (addr, 0) = fndecl;
6412
6413 /* Then issue the arithmetic correction to return the right result. */
6414 if (!ignore)
6415 {
6416 if (code == NOT)
6417 {
6418 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6419 OPTAB_LIB_WIDEN);
6420 ret = expand_simple_unop (mode, NOT, ret, target, true);
6421 }
6422 else
6423 ret = expand_simple_binop (mode, code, ret, val, target, true,
6424 OPTAB_LIB_WIDEN);
6425 }
6426 return ret;
6427 }
6428
6429 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6430
6431 void
6432 expand_ifn_atomic_bit_test_and (gcall *call)
6433 {
6434 tree ptr = gimple_call_arg (call, 0);
6435 tree bit = gimple_call_arg (call, 1);
6436 tree flag = gimple_call_arg (call, 2);
6437 tree lhs = gimple_call_lhs (call);
6438 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6439 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6440 enum rtx_code code;
6441 optab optab;
6442 struct expand_operand ops[5];
6443
6444 gcc_assert (flag_inline_atomics);
6445
6446 if (gimple_call_num_args (call) == 4)
6447 model = get_memmodel (gimple_call_arg (call, 3));
6448
6449 rtx mem = get_builtin_sync_mem (ptr, mode);
6450 rtx val = expand_expr_force_mode (bit, mode);
6451
6452 switch (gimple_call_internal_fn (call))
6453 {
6454 case IFN_ATOMIC_BIT_TEST_AND_SET:
6455 code = IOR;
6456 optab = atomic_bit_test_and_set_optab;
6457 break;
6458 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6459 code = XOR;
6460 optab = atomic_bit_test_and_complement_optab;
6461 break;
6462 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6463 code = AND;
6464 optab = atomic_bit_test_and_reset_optab;
6465 break;
6466 default:
6467 gcc_unreachable ();
6468 }
6469
6470 if (lhs == NULL_TREE)
6471 {
6472 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6473 val, NULL_RTX, true, OPTAB_DIRECT);
6474 if (code == AND)
6475 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6476 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6477 return;
6478 }
6479
6480 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6481 enum insn_code icode = direct_optab_handler (optab, mode);
6482 gcc_assert (icode != CODE_FOR_nothing);
6483 create_output_operand (&ops[0], target, mode);
6484 create_fixed_operand (&ops[1], mem);
6485 create_convert_operand_to (&ops[2], val, mode, true);
6486 create_integer_operand (&ops[3], model);
6487 create_integer_operand (&ops[4], integer_onep (flag));
6488 if (maybe_expand_insn (icode, 5, ops))
6489 return;
6490
6491 rtx bitval = val;
6492 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6493 val, NULL_RTX, true, OPTAB_DIRECT);
6494 rtx maskval = val;
6495 if (code == AND)
6496 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6497 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6498 code, model, false);
6499 if (integer_onep (flag))
6500 {
6501 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6502 NULL_RTX, true, OPTAB_DIRECT);
6503 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6504 true, OPTAB_DIRECT);
6505 }
6506 else
6507 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6508 OPTAB_DIRECT);
6509 if (result != target)
6510 emit_move_insn (target, result);
6511 }
6512
6513 /* Expand an atomic clear operation.
6514 void _atomic_clear (BOOL *obj, enum memmodel)
6515 EXP is the call expression. */
6516
6517 static rtx
6518 expand_builtin_atomic_clear (tree exp)
6519 {
6520 machine_mode mode;
6521 rtx mem, ret;
6522 enum memmodel model;
6523
6524 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6525 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6526 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6527
6528 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6529 {
6530 source_location loc
6531 = expansion_point_location_if_in_system_header (input_location);
6532 warning_at (loc, OPT_Winvalid_memory_model,
6533 "invalid memory model for %<__atomic_store%>");
6534 model = MEMMODEL_SEQ_CST;
6535 }
6536
6537 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6538 Failing that, a store is issued by __atomic_store. The only way this can
6539 fail is if the bool type is larger than a word size. Unlikely, but
6540 handle it anyway for completeness. Assume a single threaded model since
6541 there is no atomic support in this case, and no barriers are required. */
6542 ret = expand_atomic_store (mem, const0_rtx, model, true);
6543 if (!ret)
6544 emit_move_insn (mem, const0_rtx);
6545 return const0_rtx;
6546 }
6547
6548 /* Expand an atomic test_and_set operation.
6549 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6550 EXP is the call expression. */
6551
6552 static rtx
6553 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6554 {
6555 rtx mem;
6556 enum memmodel model;
6557 machine_mode mode;
6558
6559 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6560 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6561 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6562
6563 return expand_atomic_test_and_set (target, mem, model);
6564 }
6565
6566
6567 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6568 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6569
6570 static tree
6571 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6572 {
6573 int size;
6574 machine_mode mode;
6575 unsigned int mode_align, type_align;
6576
6577 if (TREE_CODE (arg0) != INTEGER_CST)
6578 return NULL_TREE;
6579
6580 /* We need a corresponding integer mode for the access to be lock-free. */
6581 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6582 if (!int_mode_for_size (size, 0).exists (&mode))
6583 return boolean_false_node;
6584
6585 mode_align = GET_MODE_ALIGNMENT (mode);
6586
6587 if (TREE_CODE (arg1) == INTEGER_CST)
6588 {
6589 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6590
6591 /* Either this argument is null, or it's a fake pointer encoding
6592 the alignment of the object. */
6593 val = least_bit_hwi (val);
6594 val *= BITS_PER_UNIT;
6595
6596 if (val == 0 || mode_align < val)
6597 type_align = mode_align;
6598 else
6599 type_align = val;
6600 }
6601 else
6602 {
6603 tree ttype = TREE_TYPE (arg1);
6604
6605 /* This function is usually invoked and folded immediately by the front
6606 end before anything else has a chance to look at it. The pointer
6607 parameter at this point is usually cast to a void *, so check for that
6608 and look past the cast. */
6609 if (CONVERT_EXPR_P (arg1)
6610 && POINTER_TYPE_P (ttype)
6611 && VOID_TYPE_P (TREE_TYPE (ttype))
6612 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6613 arg1 = TREE_OPERAND (arg1, 0);
6614
6615 ttype = TREE_TYPE (arg1);
6616 gcc_assert (POINTER_TYPE_P (ttype));
6617
6618 /* Get the underlying type of the object. */
6619 ttype = TREE_TYPE (ttype);
6620 type_align = TYPE_ALIGN (ttype);
6621 }
6622
6623 /* If the object has smaller alignment, the lock free routines cannot
6624 be used. */
6625 if (type_align < mode_align)
6626 return boolean_false_node;
6627
6628 /* Check if a compare_and_swap pattern exists for the mode which represents
6629 the required size. The pattern is not allowed to fail, so the existence
6630 of the pattern indicates support is present. Also require that an
6631 atomic load exists for the required size. */
6632 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6633 return boolean_true_node;
6634 else
6635 return boolean_false_node;
6636 }
6637
6638 /* Return true if the parameters to call EXP represent an object which will
6639 always generate lock free instructions. The first argument represents the
6640 size of the object, and the second parameter is a pointer to the object
6641 itself. If NULL is passed for the object, then the result is based on
6642 typical alignment for an object of the specified size. Otherwise return
6643 false. */
6644
6645 static rtx
6646 expand_builtin_atomic_always_lock_free (tree exp)
6647 {
6648 tree size;
6649 tree arg0 = CALL_EXPR_ARG (exp, 0);
6650 tree arg1 = CALL_EXPR_ARG (exp, 1);
6651
6652 if (TREE_CODE (arg0) != INTEGER_CST)
6653 {
6654 error ("non-constant argument 1 to __atomic_always_lock_free");
6655 return const0_rtx;
6656 }
6657
6658 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6659 if (size == boolean_true_node)
6660 return const1_rtx;
6661 return const0_rtx;
6662 }
6663
6664 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6665 is lock free on this architecture. */
6666
6667 static tree
6668 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6669 {
6670 if (!flag_inline_atomics)
6671 return NULL_TREE;
6672
6673 /* If it isn't always lock free, don't generate a result. */
6674 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6675 return boolean_true_node;
6676
6677 return NULL_TREE;
6678 }
6679
6680 /* Return true if the parameters to call EXP represent an object which will
6681 always generate lock free instructions. The first argument represents the
6682 size of the object, and the second parameter is a pointer to the object
6683 itself. If NULL is passed for the object, then the result is based on
6684 typical alignment for an object of the specified size. Otherwise return
6685 NULL*/
6686
6687 static rtx
6688 expand_builtin_atomic_is_lock_free (tree exp)
6689 {
6690 tree size;
6691 tree arg0 = CALL_EXPR_ARG (exp, 0);
6692 tree arg1 = CALL_EXPR_ARG (exp, 1);
6693
6694 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6695 {
6696 error ("non-integer argument 1 to __atomic_is_lock_free");
6697 return NULL_RTX;
6698 }
6699
6700 if (!flag_inline_atomics)
6701 return NULL_RTX;
6702
6703 /* If the value is known at compile time, return the RTX for it. */
6704 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6705 if (size == boolean_true_node)
6706 return const1_rtx;
6707
6708 return NULL_RTX;
6709 }
6710
6711 /* Expand the __atomic_thread_fence intrinsic:
6712 void __atomic_thread_fence (enum memmodel)
6713 EXP is the CALL_EXPR. */
6714
6715 static void
6716 expand_builtin_atomic_thread_fence (tree exp)
6717 {
6718 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6719 expand_mem_thread_fence (model);
6720 }
6721
6722 /* Expand the __atomic_signal_fence intrinsic:
6723 void __atomic_signal_fence (enum memmodel)
6724 EXP is the CALL_EXPR. */
6725
6726 static void
6727 expand_builtin_atomic_signal_fence (tree exp)
6728 {
6729 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6730 expand_mem_signal_fence (model);
6731 }
6732
6733 /* Expand the __sync_synchronize intrinsic. */
6734
6735 static void
6736 expand_builtin_sync_synchronize (void)
6737 {
6738 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6739 }
6740
6741 static rtx
6742 expand_builtin_thread_pointer (tree exp, rtx target)
6743 {
6744 enum insn_code icode;
6745 if (!validate_arglist (exp, VOID_TYPE))
6746 return const0_rtx;
6747 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6748 if (icode != CODE_FOR_nothing)
6749 {
6750 struct expand_operand op;
6751 /* If the target is not sutitable then create a new target. */
6752 if (target == NULL_RTX
6753 || !REG_P (target)
6754 || GET_MODE (target) != Pmode)
6755 target = gen_reg_rtx (Pmode);
6756 create_output_operand (&op, target, Pmode);
6757 expand_insn (icode, 1, &op);
6758 return target;
6759 }
6760 error ("__builtin_thread_pointer is not supported on this target");
6761 return const0_rtx;
6762 }
6763
6764 static void
6765 expand_builtin_set_thread_pointer (tree exp)
6766 {
6767 enum insn_code icode;
6768 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6769 return;
6770 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6771 if (icode != CODE_FOR_nothing)
6772 {
6773 struct expand_operand op;
6774 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6775 Pmode, EXPAND_NORMAL);
6776 create_input_operand (&op, val, Pmode);
6777 expand_insn (icode, 1, &op);
6778 return;
6779 }
6780 error ("__builtin_set_thread_pointer is not supported on this target");
6781 }
6782
6783 \f
6784 /* Emit code to restore the current value of stack. */
6785
6786 static void
6787 expand_stack_restore (tree var)
6788 {
6789 rtx_insn *prev;
6790 rtx sa = expand_normal (var);
6791
6792 sa = convert_memory_address (Pmode, sa);
6793
6794 prev = get_last_insn ();
6795 emit_stack_restore (SAVE_BLOCK, sa);
6796
6797 record_new_stack_level ();
6798
6799 fixup_args_size_notes (prev, get_last_insn (), 0);
6800 }
6801
6802 /* Emit code to save the current value of stack. */
6803
6804 static rtx
6805 expand_stack_save (void)
6806 {
6807 rtx ret = NULL_RTX;
6808
6809 emit_stack_save (SAVE_BLOCK, &ret);
6810 return ret;
6811 }
6812
6813 /* Emit code to get the openacc gang, worker or vector id or size. */
6814
6815 static rtx
6816 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6817 {
6818 const char *name;
6819 rtx fallback_retval;
6820 rtx_insn *(*gen_fn) (rtx, rtx);
6821 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6822 {
6823 case BUILT_IN_GOACC_PARLEVEL_ID:
6824 name = "__builtin_goacc_parlevel_id";
6825 fallback_retval = const0_rtx;
6826 gen_fn = targetm.gen_oacc_dim_pos;
6827 break;
6828 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6829 name = "__builtin_goacc_parlevel_size";
6830 fallback_retval = const1_rtx;
6831 gen_fn = targetm.gen_oacc_dim_size;
6832 break;
6833 default:
6834 gcc_unreachable ();
6835 }
6836
6837 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6838 {
6839 error ("%qs only supported in OpenACC code", name);
6840 return const0_rtx;
6841 }
6842
6843 tree arg = CALL_EXPR_ARG (exp, 0);
6844 if (TREE_CODE (arg) != INTEGER_CST)
6845 {
6846 error ("non-constant argument 0 to %qs", name);
6847 return const0_rtx;
6848 }
6849
6850 int dim = TREE_INT_CST_LOW (arg);
6851 switch (dim)
6852 {
6853 case GOMP_DIM_GANG:
6854 case GOMP_DIM_WORKER:
6855 case GOMP_DIM_VECTOR:
6856 break;
6857 default:
6858 error ("illegal argument 0 to %qs", name);
6859 return const0_rtx;
6860 }
6861
6862 if (ignore)
6863 return target;
6864
6865 if (target == NULL_RTX)
6866 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6867
6868 if (!targetm.have_oacc_dim_size ())
6869 {
6870 emit_move_insn (target, fallback_retval);
6871 return target;
6872 }
6873
6874 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6875 emit_insn (gen_fn (reg, GEN_INT (dim)));
6876 if (reg != target)
6877 emit_move_insn (target, reg);
6878
6879 return target;
6880 }
6881
6882 /* Expand a string compare operation using a sequence of char comparison
6883 to get rid of the calling overhead, with result going to TARGET if
6884 that's convenient.
6885
6886 VAR_STR is the variable string source;
6887 CONST_STR is the constant string source;
6888 LENGTH is the number of chars to compare;
6889 CONST_STR_N indicates which source string is the constant string;
6890 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6891
6892 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6893
6894 target = (int) (unsigned char) var_str[0]
6895 - (int) (unsigned char) const_str[0];
6896 if (target != 0)
6897 goto ne_label;
6898 ...
6899 target = (int) (unsigned char) var_str[length - 2]
6900 - (int) (unsigned char) const_str[length - 2];
6901 if (target != 0)
6902 goto ne_label;
6903 target = (int) (unsigned char) var_str[length - 1]
6904 - (int) (unsigned char) const_str[length - 1];
6905 ne_label:
6906 */
6907
6908 static rtx
6909 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6910 unsigned HOST_WIDE_INT length,
6911 int const_str_n, machine_mode mode)
6912 {
6913 HOST_WIDE_INT offset = 0;
6914 rtx var_rtx_array
6915 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6916 rtx var_rtx = NULL_RTX;
6917 rtx const_rtx = NULL_RTX;
6918 rtx result = target ? target : gen_reg_rtx (mode);
6919 rtx_code_label *ne_label = gen_label_rtx ();
6920 tree unit_type_node = unsigned_char_type_node;
6921 scalar_int_mode unit_mode
6922 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6923
6924 start_sequence ();
6925
6926 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6927 {
6928 var_rtx
6929 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6930 const_rtx = c_readstr (const_str + offset, unit_mode);
6931 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6932 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6933
6934 op0 = convert_modes (mode, unit_mode, op0, 1);
6935 op1 = convert_modes (mode, unit_mode, op1, 1);
6936 result = expand_simple_binop (mode, MINUS, op0, op1,
6937 result, 1, OPTAB_WIDEN);
6938 if (i < length - 1)
6939 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6940 mode, true, ne_label);
6941 offset += GET_MODE_SIZE (unit_mode);
6942 }
6943
6944 emit_label (ne_label);
6945 rtx_insn *insns = get_insns ();
6946 end_sequence ();
6947 emit_insn (insns);
6948
6949 return result;
6950 }
6951
6952 /* Inline expansion a call to str(n)cmp, with result going to
6953 TARGET if that's convenient.
6954 If the call is not been inlined, return NULL_RTX. */
6955 static rtx
6956 inline_expand_builtin_string_cmp (tree exp, rtx target)
6957 {
6958 tree fndecl = get_callee_fndecl (exp);
6959 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6960 unsigned HOST_WIDE_INT length = 0;
6961 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6962
6963 /* Do NOT apply this inlining expansion when optimizing for size or
6964 optimization level below 2. */
6965 if (optimize < 2 || optimize_insn_for_size_p ())
6966 return NULL_RTX;
6967
6968 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6969 || fcode == BUILT_IN_STRNCMP
6970 || fcode == BUILT_IN_MEMCMP);
6971
6972 /* On a target where the type of the call (int) has same or narrower presicion
6973 than unsigned char, give up the inlining expansion. */
6974 if (TYPE_PRECISION (unsigned_char_type_node)
6975 >= TYPE_PRECISION (TREE_TYPE (exp)))
6976 return NULL_RTX;
6977
6978 tree arg1 = CALL_EXPR_ARG (exp, 0);
6979 tree arg2 = CALL_EXPR_ARG (exp, 1);
6980 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6981
6982 unsigned HOST_WIDE_INT len1 = 0;
6983 unsigned HOST_WIDE_INT len2 = 0;
6984 unsigned HOST_WIDE_INT len3 = 0;
6985
6986 const char *src_str1 = c_getstr (arg1, &len1);
6987 const char *src_str2 = c_getstr (arg2, &len2);
6988
6989 /* If neither strings is constant string, the call is not qualify. */
6990 if (!src_str1 && !src_str2)
6991 return NULL_RTX;
6992
6993 /* For strncmp, if the length is not a const, not qualify. */
6994 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6995 return NULL_RTX;
6996
6997 int const_str_n = 0;
6998 if (!len1)
6999 const_str_n = 2;
7000 else if (!len2)
7001 const_str_n = 1;
7002 else if (len2 > len1)
7003 const_str_n = 1;
7004 else
7005 const_str_n = 2;
7006
7007 gcc_checking_assert (const_str_n > 0);
7008 length = (const_str_n == 1) ? len1 : len2;
7009
7010 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7011 length = len3;
7012
7013 /* If the length of the comparision is larger than the threshold,
7014 do nothing. */
7015 if (length > (unsigned HOST_WIDE_INT)
7016 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7017 return NULL_RTX;
7018
7019 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7020
7021 /* Now, start inline expansion the call. */
7022 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7023 (const_str_n == 1) ? src_str1 : src_str2, length,
7024 const_str_n, mode);
7025 }
7026
7027 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7028 represents the size of the first argument to that call, or VOIDmode
7029 if the argument is a pointer. IGNORE will be true if the result
7030 isn't used. */
7031 static rtx
7032 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7033 bool ignore)
7034 {
7035 rtx val, failsafe;
7036 unsigned nargs = call_expr_nargs (exp);
7037
7038 tree arg0 = CALL_EXPR_ARG (exp, 0);
7039
7040 if (mode == VOIDmode)
7041 {
7042 mode = TYPE_MODE (TREE_TYPE (arg0));
7043 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7044 }
7045
7046 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7047
7048 /* An optional second argument can be used as a failsafe value on
7049 some machines. If it isn't present, then the failsafe value is
7050 assumed to be 0. */
7051 if (nargs > 1)
7052 {
7053 tree arg1 = CALL_EXPR_ARG (exp, 1);
7054 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7055 }
7056 else
7057 failsafe = const0_rtx;
7058
7059 /* If the result isn't used, the behavior is undefined. It would be
7060 nice to emit a warning here, but path splitting means this might
7061 happen with legitimate code. So simply drop the builtin
7062 expansion in that case; we've handled any side-effects above. */
7063 if (ignore)
7064 return const0_rtx;
7065
7066 /* If we don't have a suitable target, create one to hold the result. */
7067 if (target == NULL || GET_MODE (target) != mode)
7068 target = gen_reg_rtx (mode);
7069
7070 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7071 val = convert_modes (mode, VOIDmode, val, false);
7072
7073 return targetm.speculation_safe_value (mode, target, val, failsafe);
7074 }
7075
7076 /* Expand an expression EXP that calls a built-in function,
7077 with result going to TARGET if that's convenient
7078 (and in mode MODE if that's convenient).
7079 SUBTARGET may be used as the target for computing one of EXP's operands.
7080 IGNORE is nonzero if the value is to be ignored. */
7081
7082 rtx
7083 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7084 int ignore)
7085 {
7086 tree fndecl = get_callee_fndecl (exp);
7087 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7088 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7089 int flags;
7090
7091 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7092 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7093
7094 /* When ASan is enabled, we don't want to expand some memory/string
7095 builtins and rely on libsanitizer's hooks. This allows us to avoid
7096 redundant checks and be sure, that possible overflow will be detected
7097 by ASan. */
7098
7099 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7100 return expand_call (exp, target, ignore);
7101
7102 /* When not optimizing, generate calls to library functions for a certain
7103 set of builtins. */
7104 if (!optimize
7105 && !called_as_built_in (fndecl)
7106 && fcode != BUILT_IN_FORK
7107 && fcode != BUILT_IN_EXECL
7108 && fcode != BUILT_IN_EXECV
7109 && fcode != BUILT_IN_EXECLP
7110 && fcode != BUILT_IN_EXECLE
7111 && fcode != BUILT_IN_EXECVP
7112 && fcode != BUILT_IN_EXECVE
7113 && !ALLOCA_FUNCTION_CODE_P (fcode)
7114 && fcode != BUILT_IN_FREE)
7115 return expand_call (exp, target, ignore);
7116
7117 /* The built-in function expanders test for target == const0_rtx
7118 to determine whether the function's result will be ignored. */
7119 if (ignore)
7120 target = const0_rtx;
7121
7122 /* If the result of a pure or const built-in function is ignored, and
7123 none of its arguments are volatile, we can avoid expanding the
7124 built-in call and just evaluate the arguments for side-effects. */
7125 if (target == const0_rtx
7126 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7127 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7128 {
7129 bool volatilep = false;
7130 tree arg;
7131 call_expr_arg_iterator iter;
7132
7133 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7134 if (TREE_THIS_VOLATILE (arg))
7135 {
7136 volatilep = true;
7137 break;
7138 }
7139
7140 if (! volatilep)
7141 {
7142 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7143 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7144 return const0_rtx;
7145 }
7146 }
7147
7148 switch (fcode)
7149 {
7150 CASE_FLT_FN (BUILT_IN_FABS):
7151 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7152 case BUILT_IN_FABSD32:
7153 case BUILT_IN_FABSD64:
7154 case BUILT_IN_FABSD128:
7155 target = expand_builtin_fabs (exp, target, subtarget);
7156 if (target)
7157 return target;
7158 break;
7159
7160 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7161 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7162 target = expand_builtin_copysign (exp, target, subtarget);
7163 if (target)
7164 return target;
7165 break;
7166
7167 /* Just do a normal library call if we were unable to fold
7168 the values. */
7169 CASE_FLT_FN (BUILT_IN_CABS):
7170 break;
7171
7172 CASE_FLT_FN (BUILT_IN_FMA):
7173 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7174 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7175 if (target)
7176 return target;
7177 break;
7178
7179 CASE_FLT_FN (BUILT_IN_ILOGB):
7180 if (! flag_unsafe_math_optimizations)
7181 break;
7182 gcc_fallthrough ();
7183 CASE_FLT_FN (BUILT_IN_ISINF):
7184 CASE_FLT_FN (BUILT_IN_FINITE):
7185 case BUILT_IN_ISFINITE:
7186 case BUILT_IN_ISNORMAL:
7187 target = expand_builtin_interclass_mathfn (exp, target);
7188 if (target)
7189 return target;
7190 break;
7191
7192 CASE_FLT_FN (BUILT_IN_ICEIL):
7193 CASE_FLT_FN (BUILT_IN_LCEIL):
7194 CASE_FLT_FN (BUILT_IN_LLCEIL):
7195 CASE_FLT_FN (BUILT_IN_LFLOOR):
7196 CASE_FLT_FN (BUILT_IN_IFLOOR):
7197 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7198 target = expand_builtin_int_roundingfn (exp, target);
7199 if (target)
7200 return target;
7201 break;
7202
7203 CASE_FLT_FN (BUILT_IN_IRINT):
7204 CASE_FLT_FN (BUILT_IN_LRINT):
7205 CASE_FLT_FN (BUILT_IN_LLRINT):
7206 CASE_FLT_FN (BUILT_IN_IROUND):
7207 CASE_FLT_FN (BUILT_IN_LROUND):
7208 CASE_FLT_FN (BUILT_IN_LLROUND):
7209 target = expand_builtin_int_roundingfn_2 (exp, target);
7210 if (target)
7211 return target;
7212 break;
7213
7214 CASE_FLT_FN (BUILT_IN_POWI):
7215 target = expand_builtin_powi (exp, target);
7216 if (target)
7217 return target;
7218 break;
7219
7220 CASE_FLT_FN (BUILT_IN_CEXPI):
7221 target = expand_builtin_cexpi (exp, target);
7222 gcc_assert (target);
7223 return target;
7224
7225 CASE_FLT_FN (BUILT_IN_SIN):
7226 CASE_FLT_FN (BUILT_IN_COS):
7227 if (! flag_unsafe_math_optimizations)
7228 break;
7229 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7230 if (target)
7231 return target;
7232 break;
7233
7234 CASE_FLT_FN (BUILT_IN_SINCOS):
7235 if (! flag_unsafe_math_optimizations)
7236 break;
7237 target = expand_builtin_sincos (exp);
7238 if (target)
7239 return target;
7240 break;
7241
7242 case BUILT_IN_APPLY_ARGS:
7243 return expand_builtin_apply_args ();
7244
7245 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7246 FUNCTION with a copy of the parameters described by
7247 ARGUMENTS, and ARGSIZE. It returns a block of memory
7248 allocated on the stack into which is stored all the registers
7249 that might possibly be used for returning the result of a
7250 function. ARGUMENTS is the value returned by
7251 __builtin_apply_args. ARGSIZE is the number of bytes of
7252 arguments that must be copied. ??? How should this value be
7253 computed? We'll also need a safe worst case value for varargs
7254 functions. */
7255 case BUILT_IN_APPLY:
7256 if (!validate_arglist (exp, POINTER_TYPE,
7257 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7258 && !validate_arglist (exp, REFERENCE_TYPE,
7259 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7260 return const0_rtx;
7261 else
7262 {
7263 rtx ops[3];
7264
7265 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7266 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7267 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7268
7269 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7270 }
7271
7272 /* __builtin_return (RESULT) causes the function to return the
7273 value described by RESULT. RESULT is address of the block of
7274 memory returned by __builtin_apply. */
7275 case BUILT_IN_RETURN:
7276 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7277 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7278 return const0_rtx;
7279
7280 case BUILT_IN_SAVEREGS:
7281 return expand_builtin_saveregs ();
7282
7283 case BUILT_IN_VA_ARG_PACK:
7284 /* All valid uses of __builtin_va_arg_pack () are removed during
7285 inlining. */
7286 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7287 return const0_rtx;
7288
7289 case BUILT_IN_VA_ARG_PACK_LEN:
7290 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7291 inlining. */
7292 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7293 return const0_rtx;
7294
7295 /* Return the address of the first anonymous stack arg. */
7296 case BUILT_IN_NEXT_ARG:
7297 if (fold_builtin_next_arg (exp, false))
7298 return const0_rtx;
7299 return expand_builtin_next_arg ();
7300
7301 case BUILT_IN_CLEAR_CACHE:
7302 target = expand_builtin___clear_cache (exp);
7303 if (target)
7304 return target;
7305 break;
7306
7307 case BUILT_IN_CLASSIFY_TYPE:
7308 return expand_builtin_classify_type (exp);
7309
7310 case BUILT_IN_CONSTANT_P:
7311 return const0_rtx;
7312
7313 case BUILT_IN_FRAME_ADDRESS:
7314 case BUILT_IN_RETURN_ADDRESS:
7315 return expand_builtin_frame_address (fndecl, exp);
7316
7317 /* Returns the address of the area where the structure is returned.
7318 0 otherwise. */
7319 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7320 if (call_expr_nargs (exp) != 0
7321 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7322 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7323 return const0_rtx;
7324 else
7325 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7326
7327 CASE_BUILT_IN_ALLOCA:
7328 target = expand_builtin_alloca (exp);
7329 if (target)
7330 return target;
7331 break;
7332
7333 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7334 return expand_asan_emit_allocas_unpoison (exp);
7335
7336 case BUILT_IN_STACK_SAVE:
7337 return expand_stack_save ();
7338
7339 case BUILT_IN_STACK_RESTORE:
7340 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7341 return const0_rtx;
7342
7343 case BUILT_IN_BSWAP16:
7344 case BUILT_IN_BSWAP32:
7345 case BUILT_IN_BSWAP64:
7346 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7347 if (target)
7348 return target;
7349 break;
7350
7351 CASE_INT_FN (BUILT_IN_FFS):
7352 target = expand_builtin_unop (target_mode, exp, target,
7353 subtarget, ffs_optab);
7354 if (target)
7355 return target;
7356 break;
7357
7358 CASE_INT_FN (BUILT_IN_CLZ):
7359 target = expand_builtin_unop (target_mode, exp, target,
7360 subtarget, clz_optab);
7361 if (target)
7362 return target;
7363 break;
7364
7365 CASE_INT_FN (BUILT_IN_CTZ):
7366 target = expand_builtin_unop (target_mode, exp, target,
7367 subtarget, ctz_optab);
7368 if (target)
7369 return target;
7370 break;
7371
7372 CASE_INT_FN (BUILT_IN_CLRSB):
7373 target = expand_builtin_unop (target_mode, exp, target,
7374 subtarget, clrsb_optab);
7375 if (target)
7376 return target;
7377 break;
7378
7379 CASE_INT_FN (BUILT_IN_POPCOUNT):
7380 target = expand_builtin_unop (target_mode, exp, target,
7381 subtarget, popcount_optab);
7382 if (target)
7383 return target;
7384 break;
7385
7386 CASE_INT_FN (BUILT_IN_PARITY):
7387 target = expand_builtin_unop (target_mode, exp, target,
7388 subtarget, parity_optab);
7389 if (target)
7390 return target;
7391 break;
7392
7393 case BUILT_IN_STRLEN:
7394 target = expand_builtin_strlen (exp, target, target_mode);
7395 if (target)
7396 return target;
7397 break;
7398
7399 case BUILT_IN_STRNLEN:
7400 target = expand_builtin_strnlen (exp, target, target_mode);
7401 if (target)
7402 return target;
7403 break;
7404
7405 case BUILT_IN_STRCAT:
7406 target = expand_builtin_strcat (exp, target);
7407 if (target)
7408 return target;
7409 break;
7410
7411 case BUILT_IN_STRCPY:
7412 target = expand_builtin_strcpy (exp, target);
7413 if (target)
7414 return target;
7415 break;
7416
7417 case BUILT_IN_STRNCAT:
7418 target = expand_builtin_strncat (exp, target);
7419 if (target)
7420 return target;
7421 break;
7422
7423 case BUILT_IN_STRNCPY:
7424 target = expand_builtin_strncpy (exp, target);
7425 if (target)
7426 return target;
7427 break;
7428
7429 case BUILT_IN_STPCPY:
7430 target = expand_builtin_stpcpy (exp, target, mode);
7431 if (target)
7432 return target;
7433 break;
7434
7435 case BUILT_IN_STPNCPY:
7436 target = expand_builtin_stpncpy (exp, target);
7437 if (target)
7438 return target;
7439 break;
7440
7441 case BUILT_IN_MEMCHR:
7442 target = expand_builtin_memchr (exp, target);
7443 if (target)
7444 return target;
7445 break;
7446
7447 case BUILT_IN_MEMCPY:
7448 target = expand_builtin_memcpy (exp, target);
7449 if (target)
7450 return target;
7451 break;
7452
7453 case BUILT_IN_MEMMOVE:
7454 target = expand_builtin_memmove (exp, target);
7455 if (target)
7456 return target;
7457 break;
7458
7459 case BUILT_IN_MEMPCPY:
7460 target = expand_builtin_mempcpy (exp, target);
7461 if (target)
7462 return target;
7463 break;
7464
7465 case BUILT_IN_MEMSET:
7466 target = expand_builtin_memset (exp, target, mode);
7467 if (target)
7468 return target;
7469 break;
7470
7471 case BUILT_IN_BZERO:
7472 target = expand_builtin_bzero (exp);
7473 if (target)
7474 return target;
7475 break;
7476
7477 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7478 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7479 when changing it to a strcmp call. */
7480 case BUILT_IN_STRCMP_EQ:
7481 target = expand_builtin_memcmp (exp, target, true);
7482 if (target)
7483 return target;
7484
7485 /* Change this call back to a BUILT_IN_STRCMP. */
7486 TREE_OPERAND (exp, 1)
7487 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7488
7489 /* Delete the last parameter. */
7490 unsigned int i;
7491 vec<tree, va_gc> *arg_vec;
7492 vec_alloc (arg_vec, 2);
7493 for (i = 0; i < 2; i++)
7494 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7495 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7496 /* FALLTHROUGH */
7497
7498 case BUILT_IN_STRCMP:
7499 target = expand_builtin_strcmp (exp, target);
7500 if (target)
7501 return target;
7502 break;
7503
7504 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7505 back to a BUILT_IN_STRNCMP. */
7506 case BUILT_IN_STRNCMP_EQ:
7507 target = expand_builtin_memcmp (exp, target, true);
7508 if (target)
7509 return target;
7510
7511 /* Change it back to a BUILT_IN_STRNCMP. */
7512 TREE_OPERAND (exp, 1)
7513 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7514 /* FALLTHROUGH */
7515
7516 case BUILT_IN_STRNCMP:
7517 target = expand_builtin_strncmp (exp, target, mode);
7518 if (target)
7519 return target;
7520 break;
7521
7522 case BUILT_IN_BCMP:
7523 case BUILT_IN_MEMCMP:
7524 case BUILT_IN_MEMCMP_EQ:
7525 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7526 if (target)
7527 return target;
7528 if (fcode == BUILT_IN_MEMCMP_EQ)
7529 {
7530 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7531 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7532 }
7533 break;
7534
7535 case BUILT_IN_SETJMP:
7536 /* This should have been lowered to the builtins below. */
7537 gcc_unreachable ();
7538
7539 case BUILT_IN_SETJMP_SETUP:
7540 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7541 and the receiver label. */
7542 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7543 {
7544 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7545 VOIDmode, EXPAND_NORMAL);
7546 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7547 rtx_insn *label_r = label_rtx (label);
7548
7549 /* This is copied from the handling of non-local gotos. */
7550 expand_builtin_setjmp_setup (buf_addr, label_r);
7551 nonlocal_goto_handler_labels
7552 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7553 nonlocal_goto_handler_labels);
7554 /* ??? Do not let expand_label treat us as such since we would
7555 not want to be both on the list of non-local labels and on
7556 the list of forced labels. */
7557 FORCED_LABEL (label) = 0;
7558 return const0_rtx;
7559 }
7560 break;
7561
7562 case BUILT_IN_SETJMP_RECEIVER:
7563 /* __builtin_setjmp_receiver is passed the receiver label. */
7564 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7565 {
7566 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7567 rtx_insn *label_r = label_rtx (label);
7568
7569 expand_builtin_setjmp_receiver (label_r);
7570 return const0_rtx;
7571 }
7572 break;
7573
7574 /* __builtin_longjmp is passed a pointer to an array of five words.
7575 It's similar to the C library longjmp function but works with
7576 __builtin_setjmp above. */
7577 case BUILT_IN_LONGJMP:
7578 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7579 {
7580 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7581 VOIDmode, EXPAND_NORMAL);
7582 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7583
7584 if (value != const1_rtx)
7585 {
7586 error ("%<__builtin_longjmp%> second argument must be 1");
7587 return const0_rtx;
7588 }
7589
7590 expand_builtin_longjmp (buf_addr, value);
7591 return const0_rtx;
7592 }
7593 break;
7594
7595 case BUILT_IN_NONLOCAL_GOTO:
7596 target = expand_builtin_nonlocal_goto (exp);
7597 if (target)
7598 return target;
7599 break;
7600
7601 /* This updates the setjmp buffer that is its argument with the value
7602 of the current stack pointer. */
7603 case BUILT_IN_UPDATE_SETJMP_BUF:
7604 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7605 {
7606 rtx buf_addr
7607 = expand_normal (CALL_EXPR_ARG (exp, 0));
7608
7609 expand_builtin_update_setjmp_buf (buf_addr);
7610 return const0_rtx;
7611 }
7612 break;
7613
7614 case BUILT_IN_TRAP:
7615 expand_builtin_trap ();
7616 return const0_rtx;
7617
7618 case BUILT_IN_UNREACHABLE:
7619 expand_builtin_unreachable ();
7620 return const0_rtx;
7621
7622 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7623 case BUILT_IN_SIGNBITD32:
7624 case BUILT_IN_SIGNBITD64:
7625 case BUILT_IN_SIGNBITD128:
7626 target = expand_builtin_signbit (exp, target);
7627 if (target)
7628 return target;
7629 break;
7630
7631 /* Various hooks for the DWARF 2 __throw routine. */
7632 case BUILT_IN_UNWIND_INIT:
7633 expand_builtin_unwind_init ();
7634 return const0_rtx;
7635 case BUILT_IN_DWARF_CFA:
7636 return virtual_cfa_rtx;
7637 #ifdef DWARF2_UNWIND_INFO
7638 case BUILT_IN_DWARF_SP_COLUMN:
7639 return expand_builtin_dwarf_sp_column ();
7640 case BUILT_IN_INIT_DWARF_REG_SIZES:
7641 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7642 return const0_rtx;
7643 #endif
7644 case BUILT_IN_FROB_RETURN_ADDR:
7645 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7646 case BUILT_IN_EXTRACT_RETURN_ADDR:
7647 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7648 case BUILT_IN_EH_RETURN:
7649 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7650 CALL_EXPR_ARG (exp, 1));
7651 return const0_rtx;
7652 case BUILT_IN_EH_RETURN_DATA_REGNO:
7653 return expand_builtin_eh_return_data_regno (exp);
7654 case BUILT_IN_EXTEND_POINTER:
7655 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7656 case BUILT_IN_EH_POINTER:
7657 return expand_builtin_eh_pointer (exp);
7658 case BUILT_IN_EH_FILTER:
7659 return expand_builtin_eh_filter (exp);
7660 case BUILT_IN_EH_COPY_VALUES:
7661 return expand_builtin_eh_copy_values (exp);
7662
7663 case BUILT_IN_VA_START:
7664 return expand_builtin_va_start (exp);
7665 case BUILT_IN_VA_END:
7666 return expand_builtin_va_end (exp);
7667 case BUILT_IN_VA_COPY:
7668 return expand_builtin_va_copy (exp);
7669 case BUILT_IN_EXPECT:
7670 return expand_builtin_expect (exp, target);
7671 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7672 return expand_builtin_expect_with_probability (exp, target);
7673 case BUILT_IN_ASSUME_ALIGNED:
7674 return expand_builtin_assume_aligned (exp, target);
7675 case BUILT_IN_PREFETCH:
7676 expand_builtin_prefetch (exp);
7677 return const0_rtx;
7678
7679 case BUILT_IN_INIT_TRAMPOLINE:
7680 return expand_builtin_init_trampoline (exp, true);
7681 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7682 return expand_builtin_init_trampoline (exp, false);
7683 case BUILT_IN_ADJUST_TRAMPOLINE:
7684 return expand_builtin_adjust_trampoline (exp);
7685
7686 case BUILT_IN_INIT_DESCRIPTOR:
7687 return expand_builtin_init_descriptor (exp);
7688 case BUILT_IN_ADJUST_DESCRIPTOR:
7689 return expand_builtin_adjust_descriptor (exp);
7690
7691 case BUILT_IN_FORK:
7692 case BUILT_IN_EXECL:
7693 case BUILT_IN_EXECV:
7694 case BUILT_IN_EXECLP:
7695 case BUILT_IN_EXECLE:
7696 case BUILT_IN_EXECVP:
7697 case BUILT_IN_EXECVE:
7698 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7699 if (target)
7700 return target;
7701 break;
7702
7703 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7704 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7705 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7706 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7707 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7708 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7709 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7710 if (target)
7711 return target;
7712 break;
7713
7714 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7715 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7716 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7717 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7718 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7719 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7720 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7721 if (target)
7722 return target;
7723 break;
7724
7725 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7726 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7727 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7728 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7729 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7730 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7731 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7732 if (target)
7733 return target;
7734 break;
7735
7736 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7737 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7738 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7739 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7740 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7741 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7742 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7743 if (target)
7744 return target;
7745 break;
7746
7747 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7748 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7749 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7750 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7751 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7752 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7753 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7754 if (target)
7755 return target;
7756 break;
7757
7758 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7759 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7760 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7761 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7762 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7763 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7764 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7765 if (target)
7766 return target;
7767 break;
7768
7769 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7770 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7771 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7772 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7773 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7774 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7775 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7776 if (target)
7777 return target;
7778 break;
7779
7780 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7781 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7782 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7783 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7784 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7785 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7786 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7787 if (target)
7788 return target;
7789 break;
7790
7791 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7792 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7793 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7794 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7795 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7796 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7797 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7798 if (target)
7799 return target;
7800 break;
7801
7802 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7803 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7804 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7805 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7806 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7807 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7808 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7809 if (target)
7810 return target;
7811 break;
7812
7813 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7814 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7815 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7816 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7817 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7818 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7819 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7820 if (target)
7821 return target;
7822 break;
7823
7824 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7825 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7826 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7827 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7828 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7829 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7830 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7831 if (target)
7832 return target;
7833 break;
7834
7835 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7836 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7837 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7838 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7839 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7840 if (mode == VOIDmode)
7841 mode = TYPE_MODE (boolean_type_node);
7842 if (!target || !register_operand (target, mode))
7843 target = gen_reg_rtx (mode);
7844
7845 mode = get_builtin_sync_mode
7846 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7847 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7848 if (target)
7849 return target;
7850 break;
7851
7852 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7853 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7854 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7855 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7856 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7857 mode = get_builtin_sync_mode
7858 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7859 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7860 if (target)
7861 return target;
7862 break;
7863
7864 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7865 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7866 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7867 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7868 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7869 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7870 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7871 if (target)
7872 return target;
7873 break;
7874
7875 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7876 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7877 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7878 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7879 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7880 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7881 expand_builtin_sync_lock_release (mode, exp);
7882 return const0_rtx;
7883
7884 case BUILT_IN_SYNC_SYNCHRONIZE:
7885 expand_builtin_sync_synchronize ();
7886 return const0_rtx;
7887
7888 case BUILT_IN_ATOMIC_EXCHANGE_1:
7889 case BUILT_IN_ATOMIC_EXCHANGE_2:
7890 case BUILT_IN_ATOMIC_EXCHANGE_4:
7891 case BUILT_IN_ATOMIC_EXCHANGE_8:
7892 case BUILT_IN_ATOMIC_EXCHANGE_16:
7893 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7894 target = expand_builtin_atomic_exchange (mode, exp, target);
7895 if (target)
7896 return target;
7897 break;
7898
7899 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7900 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7901 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7902 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7903 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7904 {
7905 unsigned int nargs, z;
7906 vec<tree, va_gc> *vec;
7907
7908 mode =
7909 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7910 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7911 if (target)
7912 return target;
7913
7914 /* If this is turned into an external library call, the weak parameter
7915 must be dropped to match the expected parameter list. */
7916 nargs = call_expr_nargs (exp);
7917 vec_alloc (vec, nargs - 1);
7918 for (z = 0; z < 3; z++)
7919 vec->quick_push (CALL_EXPR_ARG (exp, z));
7920 /* Skip the boolean weak parameter. */
7921 for (z = 4; z < 6; z++)
7922 vec->quick_push (CALL_EXPR_ARG (exp, z));
7923 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7924 break;
7925 }
7926
7927 case BUILT_IN_ATOMIC_LOAD_1:
7928 case BUILT_IN_ATOMIC_LOAD_2:
7929 case BUILT_IN_ATOMIC_LOAD_4:
7930 case BUILT_IN_ATOMIC_LOAD_8:
7931 case BUILT_IN_ATOMIC_LOAD_16:
7932 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7933 target = expand_builtin_atomic_load (mode, exp, target);
7934 if (target)
7935 return target;
7936 break;
7937
7938 case BUILT_IN_ATOMIC_STORE_1:
7939 case BUILT_IN_ATOMIC_STORE_2:
7940 case BUILT_IN_ATOMIC_STORE_4:
7941 case BUILT_IN_ATOMIC_STORE_8:
7942 case BUILT_IN_ATOMIC_STORE_16:
7943 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7944 target = expand_builtin_atomic_store (mode, exp);
7945 if (target)
7946 return const0_rtx;
7947 break;
7948
7949 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7950 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7951 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7952 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7953 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7954 {
7955 enum built_in_function lib;
7956 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7957 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7958 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7959 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7960 ignore, lib);
7961 if (target)
7962 return target;
7963 break;
7964 }
7965 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7966 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7967 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7968 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7969 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7970 {
7971 enum built_in_function lib;
7972 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7973 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7974 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7975 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7976 ignore, lib);
7977 if (target)
7978 return target;
7979 break;
7980 }
7981 case BUILT_IN_ATOMIC_AND_FETCH_1:
7982 case BUILT_IN_ATOMIC_AND_FETCH_2:
7983 case BUILT_IN_ATOMIC_AND_FETCH_4:
7984 case BUILT_IN_ATOMIC_AND_FETCH_8:
7985 case BUILT_IN_ATOMIC_AND_FETCH_16:
7986 {
7987 enum built_in_function lib;
7988 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7989 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7990 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7991 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7992 ignore, lib);
7993 if (target)
7994 return target;
7995 break;
7996 }
7997 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7998 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7999 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8000 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8001 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8002 {
8003 enum built_in_function lib;
8004 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8005 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8006 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8007 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8008 ignore, lib);
8009 if (target)
8010 return target;
8011 break;
8012 }
8013 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8014 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8015 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8016 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8017 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8018 {
8019 enum built_in_function lib;
8020 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8021 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8022 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8023 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8024 ignore, lib);
8025 if (target)
8026 return target;
8027 break;
8028 }
8029 case BUILT_IN_ATOMIC_OR_FETCH_1:
8030 case BUILT_IN_ATOMIC_OR_FETCH_2:
8031 case BUILT_IN_ATOMIC_OR_FETCH_4:
8032 case BUILT_IN_ATOMIC_OR_FETCH_8:
8033 case BUILT_IN_ATOMIC_OR_FETCH_16:
8034 {
8035 enum built_in_function lib;
8036 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8037 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8038 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8039 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8040 ignore, lib);
8041 if (target)
8042 return target;
8043 break;
8044 }
8045 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8046 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8047 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8048 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8049 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8050 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8051 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8052 ignore, BUILT_IN_NONE);
8053 if (target)
8054 return target;
8055 break;
8056
8057 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8058 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8059 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8060 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8061 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8062 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8063 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8064 ignore, BUILT_IN_NONE);
8065 if (target)
8066 return target;
8067 break;
8068
8069 case BUILT_IN_ATOMIC_FETCH_AND_1:
8070 case BUILT_IN_ATOMIC_FETCH_AND_2:
8071 case BUILT_IN_ATOMIC_FETCH_AND_4:
8072 case BUILT_IN_ATOMIC_FETCH_AND_8:
8073 case BUILT_IN_ATOMIC_FETCH_AND_16:
8074 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8075 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8076 ignore, BUILT_IN_NONE);
8077 if (target)
8078 return target;
8079 break;
8080
8081 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8082 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8083 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8084 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8085 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8086 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8087 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8088 ignore, BUILT_IN_NONE);
8089 if (target)
8090 return target;
8091 break;
8092
8093 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8094 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8095 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8096 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8097 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8098 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8099 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8100 ignore, BUILT_IN_NONE);
8101 if (target)
8102 return target;
8103 break;
8104
8105 case BUILT_IN_ATOMIC_FETCH_OR_1:
8106 case BUILT_IN_ATOMIC_FETCH_OR_2:
8107 case BUILT_IN_ATOMIC_FETCH_OR_4:
8108 case BUILT_IN_ATOMIC_FETCH_OR_8:
8109 case BUILT_IN_ATOMIC_FETCH_OR_16:
8110 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8111 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8112 ignore, BUILT_IN_NONE);
8113 if (target)
8114 return target;
8115 break;
8116
8117 case BUILT_IN_ATOMIC_TEST_AND_SET:
8118 return expand_builtin_atomic_test_and_set (exp, target);
8119
8120 case BUILT_IN_ATOMIC_CLEAR:
8121 return expand_builtin_atomic_clear (exp);
8122
8123 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8124 return expand_builtin_atomic_always_lock_free (exp);
8125
8126 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8127 target = expand_builtin_atomic_is_lock_free (exp);
8128 if (target)
8129 return target;
8130 break;
8131
8132 case BUILT_IN_ATOMIC_THREAD_FENCE:
8133 expand_builtin_atomic_thread_fence (exp);
8134 return const0_rtx;
8135
8136 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8137 expand_builtin_atomic_signal_fence (exp);
8138 return const0_rtx;
8139
8140 case BUILT_IN_OBJECT_SIZE:
8141 return expand_builtin_object_size (exp);
8142
8143 case BUILT_IN_MEMCPY_CHK:
8144 case BUILT_IN_MEMPCPY_CHK:
8145 case BUILT_IN_MEMMOVE_CHK:
8146 case BUILT_IN_MEMSET_CHK:
8147 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8148 if (target)
8149 return target;
8150 break;
8151
8152 case BUILT_IN_STRCPY_CHK:
8153 case BUILT_IN_STPCPY_CHK:
8154 case BUILT_IN_STRNCPY_CHK:
8155 case BUILT_IN_STPNCPY_CHK:
8156 case BUILT_IN_STRCAT_CHK:
8157 case BUILT_IN_STRNCAT_CHK:
8158 case BUILT_IN_SNPRINTF_CHK:
8159 case BUILT_IN_VSNPRINTF_CHK:
8160 maybe_emit_chk_warning (exp, fcode);
8161 break;
8162
8163 case BUILT_IN_SPRINTF_CHK:
8164 case BUILT_IN_VSPRINTF_CHK:
8165 maybe_emit_sprintf_chk_warning (exp, fcode);
8166 break;
8167
8168 case BUILT_IN_FREE:
8169 if (warn_free_nonheap_object)
8170 maybe_emit_free_warning (exp);
8171 break;
8172
8173 case BUILT_IN_THREAD_POINTER:
8174 return expand_builtin_thread_pointer (exp, target);
8175
8176 case BUILT_IN_SET_THREAD_POINTER:
8177 expand_builtin_set_thread_pointer (exp);
8178 return const0_rtx;
8179
8180 case BUILT_IN_ACC_ON_DEVICE:
8181 /* Do library call, if we failed to expand the builtin when
8182 folding. */
8183 break;
8184
8185 case BUILT_IN_GOACC_PARLEVEL_ID:
8186 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8187 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8188
8189 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8190 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8191
8192 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8193 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8194 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8195 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8196 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8197 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8198 return expand_speculation_safe_value (mode, exp, target, ignore);
8199
8200 default: /* just do library call, if unknown builtin */
8201 break;
8202 }
8203
8204 /* The switch statement above can drop through to cause the function
8205 to be called normally. */
8206 return expand_call (exp, target, ignore);
8207 }
8208
8209 /* Determine whether a tree node represents a call to a built-in
8210 function. If the tree T is a call to a built-in function with
8211 the right number of arguments of the appropriate types, return
8212 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8213 Otherwise the return value is END_BUILTINS. */
8214
8215 enum built_in_function
8216 builtin_mathfn_code (const_tree t)
8217 {
8218 const_tree fndecl, arg, parmlist;
8219 const_tree argtype, parmtype;
8220 const_call_expr_arg_iterator iter;
8221
8222 if (TREE_CODE (t) != CALL_EXPR)
8223 return END_BUILTINS;
8224
8225 fndecl = get_callee_fndecl (t);
8226 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8227 return END_BUILTINS;
8228
8229 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8230 init_const_call_expr_arg_iterator (t, &iter);
8231 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8232 {
8233 /* If a function doesn't take a variable number of arguments,
8234 the last element in the list will have type `void'. */
8235 parmtype = TREE_VALUE (parmlist);
8236 if (VOID_TYPE_P (parmtype))
8237 {
8238 if (more_const_call_expr_args_p (&iter))
8239 return END_BUILTINS;
8240 return DECL_FUNCTION_CODE (fndecl);
8241 }
8242
8243 if (! more_const_call_expr_args_p (&iter))
8244 return END_BUILTINS;
8245
8246 arg = next_const_call_expr_arg (&iter);
8247 argtype = TREE_TYPE (arg);
8248
8249 if (SCALAR_FLOAT_TYPE_P (parmtype))
8250 {
8251 if (! SCALAR_FLOAT_TYPE_P (argtype))
8252 return END_BUILTINS;
8253 }
8254 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8255 {
8256 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8257 return END_BUILTINS;
8258 }
8259 else if (POINTER_TYPE_P (parmtype))
8260 {
8261 if (! POINTER_TYPE_P (argtype))
8262 return END_BUILTINS;
8263 }
8264 else if (INTEGRAL_TYPE_P (parmtype))
8265 {
8266 if (! INTEGRAL_TYPE_P (argtype))
8267 return END_BUILTINS;
8268 }
8269 else
8270 return END_BUILTINS;
8271 }
8272
8273 /* Variable-length argument list. */
8274 return DECL_FUNCTION_CODE (fndecl);
8275 }
8276
8277 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8278 evaluate to a constant. */
8279
8280 static tree
8281 fold_builtin_constant_p (tree arg)
8282 {
8283 /* We return 1 for a numeric type that's known to be a constant
8284 value at compile-time or for an aggregate type that's a
8285 literal constant. */
8286 STRIP_NOPS (arg);
8287
8288 /* If we know this is a constant, emit the constant of one. */
8289 if (CONSTANT_CLASS_P (arg)
8290 || (TREE_CODE (arg) == CONSTRUCTOR
8291 && TREE_CONSTANT (arg)))
8292 return integer_one_node;
8293 if (TREE_CODE (arg) == ADDR_EXPR)
8294 {
8295 tree op = TREE_OPERAND (arg, 0);
8296 if (TREE_CODE (op) == STRING_CST
8297 || (TREE_CODE (op) == ARRAY_REF
8298 && integer_zerop (TREE_OPERAND (op, 1))
8299 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8300 return integer_one_node;
8301 }
8302
8303 /* If this expression has side effects, show we don't know it to be a
8304 constant. Likewise if it's a pointer or aggregate type since in
8305 those case we only want literals, since those are only optimized
8306 when generating RTL, not later.
8307 And finally, if we are compiling an initializer, not code, we
8308 need to return a definite result now; there's not going to be any
8309 more optimization done. */
8310 if (TREE_SIDE_EFFECTS (arg)
8311 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8312 || POINTER_TYPE_P (TREE_TYPE (arg))
8313 || cfun == 0
8314 || folding_initializer
8315 || force_folding_builtin_constant_p)
8316 return integer_zero_node;
8317
8318 return NULL_TREE;
8319 }
8320
8321 /* Create builtin_expect or builtin_expect_with_probability
8322 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8323 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8324 builtin_expect_with_probability instead uses third argument as PROBABILITY
8325 value. */
8326
8327 static tree
8328 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8329 tree predictor, tree probability)
8330 {
8331 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8332
8333 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8334 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8335 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8336 ret_type = TREE_TYPE (TREE_TYPE (fn));
8337 pred_type = TREE_VALUE (arg_types);
8338 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8339
8340 pred = fold_convert_loc (loc, pred_type, pred);
8341 expected = fold_convert_loc (loc, expected_type, expected);
8342
8343 if (probability)
8344 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8345 else
8346 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8347 predictor);
8348
8349 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8350 build_int_cst (ret_type, 0));
8351 }
8352
8353 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8354 NULL_TREE if no simplification is possible. */
8355
8356 tree
8357 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8358 tree arg3)
8359 {
8360 tree inner, fndecl, inner_arg0;
8361 enum tree_code code;
8362
8363 /* Distribute the expected value over short-circuiting operators.
8364 See through the cast from truthvalue_type_node to long. */
8365 inner_arg0 = arg0;
8366 while (CONVERT_EXPR_P (inner_arg0)
8367 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8368 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8369 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8370
8371 /* If this is a builtin_expect within a builtin_expect keep the
8372 inner one. See through a comparison against a constant. It
8373 might have been added to create a thruthvalue. */
8374 inner = inner_arg0;
8375
8376 if (COMPARISON_CLASS_P (inner)
8377 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8378 inner = TREE_OPERAND (inner, 0);
8379
8380 if (TREE_CODE (inner) == CALL_EXPR
8381 && (fndecl = get_callee_fndecl (inner))
8382 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8383 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8384 return arg0;
8385
8386 inner = inner_arg0;
8387 code = TREE_CODE (inner);
8388 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8389 {
8390 tree op0 = TREE_OPERAND (inner, 0);
8391 tree op1 = TREE_OPERAND (inner, 1);
8392 arg1 = save_expr (arg1);
8393
8394 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8395 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8396 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8397
8398 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8399 }
8400
8401 /* If the argument isn't invariant then there's nothing else we can do. */
8402 if (!TREE_CONSTANT (inner_arg0))
8403 return NULL_TREE;
8404
8405 /* If we expect that a comparison against the argument will fold to
8406 a constant return the constant. In practice, this means a true
8407 constant or the address of a non-weak symbol. */
8408 inner = inner_arg0;
8409 STRIP_NOPS (inner);
8410 if (TREE_CODE (inner) == ADDR_EXPR)
8411 {
8412 do
8413 {
8414 inner = TREE_OPERAND (inner, 0);
8415 }
8416 while (TREE_CODE (inner) == COMPONENT_REF
8417 || TREE_CODE (inner) == ARRAY_REF);
8418 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8419 return NULL_TREE;
8420 }
8421
8422 /* Otherwise, ARG0 already has the proper type for the return value. */
8423 return arg0;
8424 }
8425
8426 /* Fold a call to __builtin_classify_type with argument ARG. */
8427
8428 static tree
8429 fold_builtin_classify_type (tree arg)
8430 {
8431 if (arg == 0)
8432 return build_int_cst (integer_type_node, no_type_class);
8433
8434 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8435 }
8436
8437 /* Fold a call to __builtin_strlen with argument ARG. */
8438
8439 static tree
8440 fold_builtin_strlen (location_t loc, tree type, tree arg)
8441 {
8442 if (!validate_arg (arg, POINTER_TYPE))
8443 return NULL_TREE;
8444 else
8445 {
8446 tree nonstr = NULL_TREE;
8447 tree len = c_strlen (arg, 0, &nonstr);
8448
8449 if (len)
8450 return fold_convert_loc (loc, type, len);
8451
8452 if (!nonstr)
8453 c_strlen (arg, 1, &nonstr);
8454
8455 if (nonstr)
8456 {
8457 if (EXPR_HAS_LOCATION (arg))
8458 loc = EXPR_LOCATION (arg);
8459 else if (loc == UNKNOWN_LOCATION)
8460 loc = input_location;
8461 warn_string_no_nul (loc, "strlen", arg, nonstr);
8462 }
8463
8464 return NULL_TREE;
8465 }
8466 }
8467
8468 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8469
8470 static tree
8471 fold_builtin_inf (location_t loc, tree type, int warn)
8472 {
8473 REAL_VALUE_TYPE real;
8474
8475 /* __builtin_inff is intended to be usable to define INFINITY on all
8476 targets. If an infinity is not available, INFINITY expands "to a
8477 positive constant of type float that overflows at translation
8478 time", footnote "In this case, using INFINITY will violate the
8479 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8480 Thus we pedwarn to ensure this constraint violation is
8481 diagnosed. */
8482 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8483 pedwarn (loc, 0, "target format does not support infinity");
8484
8485 real_inf (&real);
8486 return build_real (type, real);
8487 }
8488
8489 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8490 NULL_TREE if no simplification can be made. */
8491
8492 static tree
8493 fold_builtin_sincos (location_t loc,
8494 tree arg0, tree arg1, tree arg2)
8495 {
8496 tree type;
8497 tree fndecl, call = NULL_TREE;
8498
8499 if (!validate_arg (arg0, REAL_TYPE)
8500 || !validate_arg (arg1, POINTER_TYPE)
8501 || !validate_arg (arg2, POINTER_TYPE))
8502 return NULL_TREE;
8503
8504 type = TREE_TYPE (arg0);
8505
8506 /* Calculate the result when the argument is a constant. */
8507 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8508 if (fn == END_BUILTINS)
8509 return NULL_TREE;
8510
8511 /* Canonicalize sincos to cexpi. */
8512 if (TREE_CODE (arg0) == REAL_CST)
8513 {
8514 tree complex_type = build_complex_type (type);
8515 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8516 }
8517 if (!call)
8518 {
8519 if (!targetm.libc_has_function (function_c99_math_complex)
8520 || !builtin_decl_implicit_p (fn))
8521 return NULL_TREE;
8522 fndecl = builtin_decl_explicit (fn);
8523 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8524 call = builtin_save_expr (call);
8525 }
8526
8527 tree ptype = build_pointer_type (type);
8528 arg1 = fold_convert (ptype, arg1);
8529 arg2 = fold_convert (ptype, arg2);
8530 return build2 (COMPOUND_EXPR, void_type_node,
8531 build2 (MODIFY_EXPR, void_type_node,
8532 build_fold_indirect_ref_loc (loc, arg1),
8533 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8534 build2 (MODIFY_EXPR, void_type_node,
8535 build_fold_indirect_ref_loc (loc, arg2),
8536 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8537 }
8538
8539 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8540 Return NULL_TREE if no simplification can be made. */
8541
8542 static tree
8543 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8544 {
8545 if (!validate_arg (arg1, POINTER_TYPE)
8546 || !validate_arg (arg2, POINTER_TYPE)
8547 || !validate_arg (len, INTEGER_TYPE))
8548 return NULL_TREE;
8549
8550 /* If the LEN parameter is zero, return zero. */
8551 if (integer_zerop (len))
8552 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8553 arg1, arg2);
8554
8555 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8556 if (operand_equal_p (arg1, arg2, 0))
8557 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8558
8559 /* If len parameter is one, return an expression corresponding to
8560 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8561 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8562 {
8563 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8564 tree cst_uchar_ptr_node
8565 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8566
8567 tree ind1
8568 = fold_convert_loc (loc, integer_type_node,
8569 build1 (INDIRECT_REF, cst_uchar_node,
8570 fold_convert_loc (loc,
8571 cst_uchar_ptr_node,
8572 arg1)));
8573 tree ind2
8574 = fold_convert_loc (loc, integer_type_node,
8575 build1 (INDIRECT_REF, cst_uchar_node,
8576 fold_convert_loc (loc,
8577 cst_uchar_ptr_node,
8578 arg2)));
8579 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8580 }
8581
8582 return NULL_TREE;
8583 }
8584
8585 /* Fold a call to builtin isascii with argument ARG. */
8586
8587 static tree
8588 fold_builtin_isascii (location_t loc, tree arg)
8589 {
8590 if (!validate_arg (arg, INTEGER_TYPE))
8591 return NULL_TREE;
8592 else
8593 {
8594 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8595 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8596 build_int_cst (integer_type_node,
8597 ~ (unsigned HOST_WIDE_INT) 0x7f));
8598 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8599 arg, integer_zero_node);
8600 }
8601 }
8602
8603 /* Fold a call to builtin toascii with argument ARG. */
8604
8605 static tree
8606 fold_builtin_toascii (location_t loc, tree arg)
8607 {
8608 if (!validate_arg (arg, INTEGER_TYPE))
8609 return NULL_TREE;
8610
8611 /* Transform toascii(c) -> (c & 0x7f). */
8612 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8613 build_int_cst (integer_type_node, 0x7f));
8614 }
8615
8616 /* Fold a call to builtin isdigit with argument ARG. */
8617
8618 static tree
8619 fold_builtin_isdigit (location_t loc, tree arg)
8620 {
8621 if (!validate_arg (arg, INTEGER_TYPE))
8622 return NULL_TREE;
8623 else
8624 {
8625 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8626 /* According to the C standard, isdigit is unaffected by locale.
8627 However, it definitely is affected by the target character set. */
8628 unsigned HOST_WIDE_INT target_digit0
8629 = lang_hooks.to_target_charset ('0');
8630
8631 if (target_digit0 == 0)
8632 return NULL_TREE;
8633
8634 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8635 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8636 build_int_cst (unsigned_type_node, target_digit0));
8637 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8638 build_int_cst (unsigned_type_node, 9));
8639 }
8640 }
8641
8642 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8643
8644 static tree
8645 fold_builtin_fabs (location_t loc, tree arg, tree type)
8646 {
8647 if (!validate_arg (arg, REAL_TYPE))
8648 return NULL_TREE;
8649
8650 arg = fold_convert_loc (loc, type, arg);
8651 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8652 }
8653
8654 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8655
8656 static tree
8657 fold_builtin_abs (location_t loc, tree arg, tree type)
8658 {
8659 if (!validate_arg (arg, INTEGER_TYPE))
8660 return NULL_TREE;
8661
8662 arg = fold_convert_loc (loc, type, arg);
8663 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8664 }
8665
8666 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8667
8668 static tree
8669 fold_builtin_carg (location_t loc, tree arg, tree type)
8670 {
8671 if (validate_arg (arg, COMPLEX_TYPE)
8672 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8673 {
8674 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8675
8676 if (atan2_fn)
8677 {
8678 tree new_arg = builtin_save_expr (arg);
8679 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8680 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8681 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8682 }
8683 }
8684
8685 return NULL_TREE;
8686 }
8687
8688 /* Fold a call to builtin frexp, we can assume the base is 2. */
8689
8690 static tree
8691 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8692 {
8693 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8694 return NULL_TREE;
8695
8696 STRIP_NOPS (arg0);
8697
8698 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8699 return NULL_TREE;
8700
8701 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8702
8703 /* Proceed if a valid pointer type was passed in. */
8704 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8705 {
8706 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8707 tree frac, exp;
8708
8709 switch (value->cl)
8710 {
8711 case rvc_zero:
8712 /* For +-0, return (*exp = 0, +-0). */
8713 exp = integer_zero_node;
8714 frac = arg0;
8715 break;
8716 case rvc_nan:
8717 case rvc_inf:
8718 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8719 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8720 case rvc_normal:
8721 {
8722 /* Since the frexp function always expects base 2, and in
8723 GCC normalized significands are already in the range
8724 [0.5, 1.0), we have exactly what frexp wants. */
8725 REAL_VALUE_TYPE frac_rvt = *value;
8726 SET_REAL_EXP (&frac_rvt, 0);
8727 frac = build_real (rettype, frac_rvt);
8728 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8729 }
8730 break;
8731 default:
8732 gcc_unreachable ();
8733 }
8734
8735 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8736 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8737 TREE_SIDE_EFFECTS (arg1) = 1;
8738 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8739 }
8740
8741 return NULL_TREE;
8742 }
8743
8744 /* Fold a call to builtin modf. */
8745
8746 static tree
8747 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8748 {
8749 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8750 return NULL_TREE;
8751
8752 STRIP_NOPS (arg0);
8753
8754 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8755 return NULL_TREE;
8756
8757 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8758
8759 /* Proceed if a valid pointer type was passed in. */
8760 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8761 {
8762 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8763 REAL_VALUE_TYPE trunc, frac;
8764
8765 switch (value->cl)
8766 {
8767 case rvc_nan:
8768 case rvc_zero:
8769 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8770 trunc = frac = *value;
8771 break;
8772 case rvc_inf:
8773 /* For +-Inf, return (*arg1 = arg0, +-0). */
8774 frac = dconst0;
8775 frac.sign = value->sign;
8776 trunc = *value;
8777 break;
8778 case rvc_normal:
8779 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8780 real_trunc (&trunc, VOIDmode, value);
8781 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8782 /* If the original number was negative and already
8783 integral, then the fractional part is -0.0. */
8784 if (value->sign && frac.cl == rvc_zero)
8785 frac.sign = value->sign;
8786 break;
8787 }
8788
8789 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8790 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8791 build_real (rettype, trunc));
8792 TREE_SIDE_EFFECTS (arg1) = 1;
8793 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8794 build_real (rettype, frac));
8795 }
8796
8797 return NULL_TREE;
8798 }
8799
8800 /* Given a location LOC, an interclass builtin function decl FNDECL
8801 and its single argument ARG, return an folded expression computing
8802 the same, or NULL_TREE if we either couldn't or didn't want to fold
8803 (the latter happen if there's an RTL instruction available). */
8804
8805 static tree
8806 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8807 {
8808 machine_mode mode;
8809
8810 if (!validate_arg (arg, REAL_TYPE))
8811 return NULL_TREE;
8812
8813 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8814 return NULL_TREE;
8815
8816 mode = TYPE_MODE (TREE_TYPE (arg));
8817
8818 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8819
8820 /* If there is no optab, try generic code. */
8821 switch (DECL_FUNCTION_CODE (fndecl))
8822 {
8823 tree result;
8824
8825 CASE_FLT_FN (BUILT_IN_ISINF):
8826 {
8827 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8828 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8829 tree type = TREE_TYPE (arg);
8830 REAL_VALUE_TYPE r;
8831 char buf[128];
8832
8833 if (is_ibm_extended)
8834 {
8835 /* NaN and Inf are encoded in the high-order double value
8836 only. The low-order value is not significant. */
8837 type = double_type_node;
8838 mode = DFmode;
8839 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8840 }
8841 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8842 real_from_string (&r, buf);
8843 result = build_call_expr (isgr_fn, 2,
8844 fold_build1_loc (loc, ABS_EXPR, type, arg),
8845 build_real (type, r));
8846 return result;
8847 }
8848 CASE_FLT_FN (BUILT_IN_FINITE):
8849 case BUILT_IN_ISFINITE:
8850 {
8851 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8852 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8853 tree type = TREE_TYPE (arg);
8854 REAL_VALUE_TYPE r;
8855 char buf[128];
8856
8857 if (is_ibm_extended)
8858 {
8859 /* NaN and Inf are encoded in the high-order double value
8860 only. The low-order value is not significant. */
8861 type = double_type_node;
8862 mode = DFmode;
8863 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8864 }
8865 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8866 real_from_string (&r, buf);
8867 result = build_call_expr (isle_fn, 2,
8868 fold_build1_loc (loc, ABS_EXPR, type, arg),
8869 build_real (type, r));
8870 /*result = fold_build2_loc (loc, UNGT_EXPR,
8871 TREE_TYPE (TREE_TYPE (fndecl)),
8872 fold_build1_loc (loc, ABS_EXPR, type, arg),
8873 build_real (type, r));
8874 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8875 TREE_TYPE (TREE_TYPE (fndecl)),
8876 result);*/
8877 return result;
8878 }
8879 case BUILT_IN_ISNORMAL:
8880 {
8881 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8882 islessequal(fabs(x),DBL_MAX). */
8883 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8884 tree type = TREE_TYPE (arg);
8885 tree orig_arg, max_exp, min_exp;
8886 machine_mode orig_mode = mode;
8887 REAL_VALUE_TYPE rmax, rmin;
8888 char buf[128];
8889
8890 orig_arg = arg = builtin_save_expr (arg);
8891 if (is_ibm_extended)
8892 {
8893 /* Use double to test the normal range of IBM extended
8894 precision. Emin for IBM extended precision is
8895 different to emin for IEEE double, being 53 higher
8896 since the low double exponent is at least 53 lower
8897 than the high double exponent. */
8898 type = double_type_node;
8899 mode = DFmode;
8900 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8901 }
8902 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8903
8904 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8905 real_from_string (&rmax, buf);
8906 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8907 real_from_string (&rmin, buf);
8908 max_exp = build_real (type, rmax);
8909 min_exp = build_real (type, rmin);
8910
8911 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8912 if (is_ibm_extended)
8913 {
8914 /* Testing the high end of the range is done just using
8915 the high double, using the same test as isfinite().
8916 For the subnormal end of the range we first test the
8917 high double, then if its magnitude is equal to the
8918 limit of 0x1p-969, we test whether the low double is
8919 non-zero and opposite sign to the high double. */
8920 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8921 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8922 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8923 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8924 arg, min_exp);
8925 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8926 complex_double_type_node, orig_arg);
8927 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8928 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8929 tree zero = build_real (type, dconst0);
8930 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8931 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8932 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8933 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8934 fold_build3 (COND_EXPR,
8935 integer_type_node,
8936 hilt, logt, lolt));
8937 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8938 eq_min, ok_lo);
8939 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8940 gt_min, eq_min);
8941 }
8942 else
8943 {
8944 tree const isge_fn
8945 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8946 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8947 }
8948 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8949 max_exp, min_exp);
8950 return result;
8951 }
8952 default:
8953 break;
8954 }
8955
8956 return NULL_TREE;
8957 }
8958
8959 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8960 ARG is the argument for the call. */
8961
8962 static tree
8963 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8964 {
8965 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8966
8967 if (!validate_arg (arg, REAL_TYPE))
8968 return NULL_TREE;
8969
8970 switch (builtin_index)
8971 {
8972 case BUILT_IN_ISINF:
8973 if (!HONOR_INFINITIES (arg))
8974 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8975
8976 return NULL_TREE;
8977
8978 case BUILT_IN_ISINF_SIGN:
8979 {
8980 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8981 /* In a boolean context, GCC will fold the inner COND_EXPR to
8982 1. So e.g. "if (isinf_sign(x))" would be folded to just
8983 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8984 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8985 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8986 tree tmp = NULL_TREE;
8987
8988 arg = builtin_save_expr (arg);
8989
8990 if (signbit_fn && isinf_fn)
8991 {
8992 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8993 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8994
8995 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8996 signbit_call, integer_zero_node);
8997 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8998 isinf_call, integer_zero_node);
8999
9000 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9001 integer_minus_one_node, integer_one_node);
9002 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9003 isinf_call, tmp,
9004 integer_zero_node);
9005 }
9006
9007 return tmp;
9008 }
9009
9010 case BUILT_IN_ISFINITE:
9011 if (!HONOR_NANS (arg)
9012 && !HONOR_INFINITIES (arg))
9013 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9014
9015 return NULL_TREE;
9016
9017 case BUILT_IN_ISNAN:
9018 if (!HONOR_NANS (arg))
9019 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9020
9021 {
9022 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9023 if (is_ibm_extended)
9024 {
9025 /* NaN and Inf are encoded in the high-order double value
9026 only. The low-order value is not significant. */
9027 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9028 }
9029 }
9030 arg = builtin_save_expr (arg);
9031 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9032
9033 default:
9034 gcc_unreachable ();
9035 }
9036 }
9037
9038 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9039 This builtin will generate code to return the appropriate floating
9040 point classification depending on the value of the floating point
9041 number passed in. The possible return values must be supplied as
9042 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9043 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9044 one floating point argument which is "type generic". */
9045
9046 static tree
9047 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9048 {
9049 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9050 arg, type, res, tmp;
9051 machine_mode mode;
9052 REAL_VALUE_TYPE r;
9053 char buf[128];
9054
9055 /* Verify the required arguments in the original call. */
9056 if (nargs != 6
9057 || !validate_arg (args[0], INTEGER_TYPE)
9058 || !validate_arg (args[1], INTEGER_TYPE)
9059 || !validate_arg (args[2], INTEGER_TYPE)
9060 || !validate_arg (args[3], INTEGER_TYPE)
9061 || !validate_arg (args[4], INTEGER_TYPE)
9062 || !validate_arg (args[5], REAL_TYPE))
9063 return NULL_TREE;
9064
9065 fp_nan = args[0];
9066 fp_infinite = args[1];
9067 fp_normal = args[2];
9068 fp_subnormal = args[3];
9069 fp_zero = args[4];
9070 arg = args[5];
9071 type = TREE_TYPE (arg);
9072 mode = TYPE_MODE (type);
9073 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9074
9075 /* fpclassify(x) ->
9076 isnan(x) ? FP_NAN :
9077 (fabs(x) == Inf ? FP_INFINITE :
9078 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9079 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9080
9081 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9082 build_real (type, dconst0));
9083 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9084 tmp, fp_zero, fp_subnormal);
9085
9086 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9087 real_from_string (&r, buf);
9088 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9089 arg, build_real (type, r));
9090 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9091
9092 if (HONOR_INFINITIES (mode))
9093 {
9094 real_inf (&r);
9095 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9096 build_real (type, r));
9097 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9098 fp_infinite, res);
9099 }
9100
9101 if (HONOR_NANS (mode))
9102 {
9103 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9104 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9105 }
9106
9107 return res;
9108 }
9109
9110 /* Fold a call to an unordered comparison function such as
9111 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9112 being called and ARG0 and ARG1 are the arguments for the call.
9113 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9114 the opposite of the desired result. UNORDERED_CODE is used
9115 for modes that can hold NaNs and ORDERED_CODE is used for
9116 the rest. */
9117
9118 static tree
9119 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9120 enum tree_code unordered_code,
9121 enum tree_code ordered_code)
9122 {
9123 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9124 enum tree_code code;
9125 tree type0, type1;
9126 enum tree_code code0, code1;
9127 tree cmp_type = NULL_TREE;
9128
9129 type0 = TREE_TYPE (arg0);
9130 type1 = TREE_TYPE (arg1);
9131
9132 code0 = TREE_CODE (type0);
9133 code1 = TREE_CODE (type1);
9134
9135 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9136 /* Choose the wider of two real types. */
9137 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9138 ? type0 : type1;
9139 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9140 cmp_type = type0;
9141 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9142 cmp_type = type1;
9143
9144 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9145 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9146
9147 if (unordered_code == UNORDERED_EXPR)
9148 {
9149 if (!HONOR_NANS (arg0))
9150 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9151 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9152 }
9153
9154 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9155 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9156 fold_build2_loc (loc, code, type, arg0, arg1));
9157 }
9158
9159 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9160 arithmetics if it can never overflow, or into internal functions that
9161 return both result of arithmetics and overflowed boolean flag in
9162 a complex integer result, or some other check for overflow.
9163 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9164 checking part of that. */
9165
9166 static tree
9167 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9168 tree arg0, tree arg1, tree arg2)
9169 {
9170 enum internal_fn ifn = IFN_LAST;
9171 /* The code of the expression corresponding to the type-generic
9172 built-in, or ERROR_MARK for the type-specific ones. */
9173 enum tree_code opcode = ERROR_MARK;
9174 bool ovf_only = false;
9175
9176 switch (fcode)
9177 {
9178 case BUILT_IN_ADD_OVERFLOW_P:
9179 ovf_only = true;
9180 /* FALLTHRU */
9181 case BUILT_IN_ADD_OVERFLOW:
9182 opcode = PLUS_EXPR;
9183 /* FALLTHRU */
9184 case BUILT_IN_SADD_OVERFLOW:
9185 case BUILT_IN_SADDL_OVERFLOW:
9186 case BUILT_IN_SADDLL_OVERFLOW:
9187 case BUILT_IN_UADD_OVERFLOW:
9188 case BUILT_IN_UADDL_OVERFLOW:
9189 case BUILT_IN_UADDLL_OVERFLOW:
9190 ifn = IFN_ADD_OVERFLOW;
9191 break;
9192 case BUILT_IN_SUB_OVERFLOW_P:
9193 ovf_only = true;
9194 /* FALLTHRU */
9195 case BUILT_IN_SUB_OVERFLOW:
9196 opcode = MINUS_EXPR;
9197 /* FALLTHRU */
9198 case BUILT_IN_SSUB_OVERFLOW:
9199 case BUILT_IN_SSUBL_OVERFLOW:
9200 case BUILT_IN_SSUBLL_OVERFLOW:
9201 case BUILT_IN_USUB_OVERFLOW:
9202 case BUILT_IN_USUBL_OVERFLOW:
9203 case BUILT_IN_USUBLL_OVERFLOW:
9204 ifn = IFN_SUB_OVERFLOW;
9205 break;
9206 case BUILT_IN_MUL_OVERFLOW_P:
9207 ovf_only = true;
9208 /* FALLTHRU */
9209 case BUILT_IN_MUL_OVERFLOW:
9210 opcode = MULT_EXPR;
9211 /* FALLTHRU */
9212 case BUILT_IN_SMUL_OVERFLOW:
9213 case BUILT_IN_SMULL_OVERFLOW:
9214 case BUILT_IN_SMULLL_OVERFLOW:
9215 case BUILT_IN_UMUL_OVERFLOW:
9216 case BUILT_IN_UMULL_OVERFLOW:
9217 case BUILT_IN_UMULLL_OVERFLOW:
9218 ifn = IFN_MUL_OVERFLOW;
9219 break;
9220 default:
9221 gcc_unreachable ();
9222 }
9223
9224 /* For the "generic" overloads, the first two arguments can have different
9225 types and the last argument determines the target type to use to check
9226 for overflow. The arguments of the other overloads all have the same
9227 type. */
9228 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9229
9230 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9231 arguments are constant, attempt to fold the built-in call into a constant
9232 expression indicating whether or not it detected an overflow. */
9233 if (ovf_only
9234 && TREE_CODE (arg0) == INTEGER_CST
9235 && TREE_CODE (arg1) == INTEGER_CST)
9236 /* Perform the computation in the target type and check for overflow. */
9237 return omit_one_operand_loc (loc, boolean_type_node,
9238 arith_overflowed_p (opcode, type, arg0, arg1)
9239 ? boolean_true_node : boolean_false_node,
9240 arg2);
9241
9242 tree ctype = build_complex_type (type);
9243 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9244 2, arg0, arg1);
9245 tree tgt = save_expr (call);
9246 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9247 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9248 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9249
9250 if (ovf_only)
9251 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9252
9253 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9254 tree store
9255 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9256 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9257 }
9258
9259 /* Fold a call to __builtin_FILE to a constant string. */
9260
9261 static inline tree
9262 fold_builtin_FILE (location_t loc)
9263 {
9264 if (const char *fname = LOCATION_FILE (loc))
9265 {
9266 /* The documentation says this builtin is equivalent to the preprocessor
9267 __FILE__ macro so it appears appropriate to use the same file prefix
9268 mappings. */
9269 fname = remap_macro_filename (fname);
9270 return build_string_literal (strlen (fname) + 1, fname);
9271 }
9272
9273 return build_string_literal (1, "");
9274 }
9275
9276 /* Fold a call to __builtin_FUNCTION to a constant string. */
9277
9278 static inline tree
9279 fold_builtin_FUNCTION ()
9280 {
9281 const char *name = "";
9282
9283 if (current_function_decl)
9284 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9285
9286 return build_string_literal (strlen (name) + 1, name);
9287 }
9288
9289 /* Fold a call to __builtin_LINE to an integer constant. */
9290
9291 static inline tree
9292 fold_builtin_LINE (location_t loc, tree type)
9293 {
9294 return build_int_cst (type, LOCATION_LINE (loc));
9295 }
9296
9297 /* Fold a call to built-in function FNDECL with 0 arguments.
9298 This function returns NULL_TREE if no simplification was possible. */
9299
9300 static tree
9301 fold_builtin_0 (location_t loc, tree fndecl)
9302 {
9303 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9304 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9305 switch (fcode)
9306 {
9307 case BUILT_IN_FILE:
9308 return fold_builtin_FILE (loc);
9309
9310 case BUILT_IN_FUNCTION:
9311 return fold_builtin_FUNCTION ();
9312
9313 case BUILT_IN_LINE:
9314 return fold_builtin_LINE (loc, type);
9315
9316 CASE_FLT_FN (BUILT_IN_INF):
9317 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9318 case BUILT_IN_INFD32:
9319 case BUILT_IN_INFD64:
9320 case BUILT_IN_INFD128:
9321 return fold_builtin_inf (loc, type, true);
9322
9323 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9324 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9325 return fold_builtin_inf (loc, type, false);
9326
9327 case BUILT_IN_CLASSIFY_TYPE:
9328 return fold_builtin_classify_type (NULL_TREE);
9329
9330 default:
9331 break;
9332 }
9333 return NULL_TREE;
9334 }
9335
9336 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9337 This function returns NULL_TREE if no simplification was possible. */
9338
9339 static tree
9340 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9341 {
9342 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9343 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9344
9345 if (TREE_CODE (arg0) == ERROR_MARK)
9346 return NULL_TREE;
9347
9348 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9349 return ret;
9350
9351 switch (fcode)
9352 {
9353 case BUILT_IN_CONSTANT_P:
9354 {
9355 tree val = fold_builtin_constant_p (arg0);
9356
9357 /* Gimplification will pull the CALL_EXPR for the builtin out of
9358 an if condition. When not optimizing, we'll not CSE it back.
9359 To avoid link error types of regressions, return false now. */
9360 if (!val && !optimize)
9361 val = integer_zero_node;
9362
9363 return val;
9364 }
9365
9366 case BUILT_IN_CLASSIFY_TYPE:
9367 return fold_builtin_classify_type (arg0);
9368
9369 case BUILT_IN_STRLEN:
9370 return fold_builtin_strlen (loc, type, arg0);
9371
9372 CASE_FLT_FN (BUILT_IN_FABS):
9373 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9374 case BUILT_IN_FABSD32:
9375 case BUILT_IN_FABSD64:
9376 case BUILT_IN_FABSD128:
9377 return fold_builtin_fabs (loc, arg0, type);
9378
9379 case BUILT_IN_ABS:
9380 case BUILT_IN_LABS:
9381 case BUILT_IN_LLABS:
9382 case BUILT_IN_IMAXABS:
9383 return fold_builtin_abs (loc, arg0, type);
9384
9385 CASE_FLT_FN (BUILT_IN_CONJ):
9386 if (validate_arg (arg0, COMPLEX_TYPE)
9387 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9388 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9389 break;
9390
9391 CASE_FLT_FN (BUILT_IN_CREAL):
9392 if (validate_arg (arg0, COMPLEX_TYPE)
9393 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9394 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9395 break;
9396
9397 CASE_FLT_FN (BUILT_IN_CIMAG):
9398 if (validate_arg (arg0, COMPLEX_TYPE)
9399 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9400 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9401 break;
9402
9403 CASE_FLT_FN (BUILT_IN_CARG):
9404 return fold_builtin_carg (loc, arg0, type);
9405
9406 case BUILT_IN_ISASCII:
9407 return fold_builtin_isascii (loc, arg0);
9408
9409 case BUILT_IN_TOASCII:
9410 return fold_builtin_toascii (loc, arg0);
9411
9412 case BUILT_IN_ISDIGIT:
9413 return fold_builtin_isdigit (loc, arg0);
9414
9415 CASE_FLT_FN (BUILT_IN_FINITE):
9416 case BUILT_IN_FINITED32:
9417 case BUILT_IN_FINITED64:
9418 case BUILT_IN_FINITED128:
9419 case BUILT_IN_ISFINITE:
9420 {
9421 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9422 if (ret)
9423 return ret;
9424 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9425 }
9426
9427 CASE_FLT_FN (BUILT_IN_ISINF):
9428 case BUILT_IN_ISINFD32:
9429 case BUILT_IN_ISINFD64:
9430 case BUILT_IN_ISINFD128:
9431 {
9432 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9433 if (ret)
9434 return ret;
9435 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9436 }
9437
9438 case BUILT_IN_ISNORMAL:
9439 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9440
9441 case BUILT_IN_ISINF_SIGN:
9442 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9443
9444 CASE_FLT_FN (BUILT_IN_ISNAN):
9445 case BUILT_IN_ISNAND32:
9446 case BUILT_IN_ISNAND64:
9447 case BUILT_IN_ISNAND128:
9448 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9449
9450 case BUILT_IN_FREE:
9451 if (integer_zerop (arg0))
9452 return build_empty_stmt (loc);
9453 break;
9454
9455 default:
9456 break;
9457 }
9458
9459 return NULL_TREE;
9460
9461 }
9462
9463 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9464 This function returns NULL_TREE if no simplification was possible. */
9465
9466 static tree
9467 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9468 {
9469 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9470 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9471
9472 if (TREE_CODE (arg0) == ERROR_MARK
9473 || TREE_CODE (arg1) == ERROR_MARK)
9474 return NULL_TREE;
9475
9476 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9477 return ret;
9478
9479 switch (fcode)
9480 {
9481 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9482 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9483 if (validate_arg (arg0, REAL_TYPE)
9484 && validate_arg (arg1, POINTER_TYPE))
9485 return do_mpfr_lgamma_r (arg0, arg1, type);
9486 break;
9487
9488 CASE_FLT_FN (BUILT_IN_FREXP):
9489 return fold_builtin_frexp (loc, arg0, arg1, type);
9490
9491 CASE_FLT_FN (BUILT_IN_MODF):
9492 return fold_builtin_modf (loc, arg0, arg1, type);
9493
9494 case BUILT_IN_STRSPN:
9495 return fold_builtin_strspn (loc, arg0, arg1);
9496
9497 case BUILT_IN_STRCSPN:
9498 return fold_builtin_strcspn (loc, arg0, arg1);
9499
9500 case BUILT_IN_STRPBRK:
9501 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9502
9503 case BUILT_IN_EXPECT:
9504 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9505
9506 case BUILT_IN_ISGREATER:
9507 return fold_builtin_unordered_cmp (loc, fndecl,
9508 arg0, arg1, UNLE_EXPR, LE_EXPR);
9509 case BUILT_IN_ISGREATEREQUAL:
9510 return fold_builtin_unordered_cmp (loc, fndecl,
9511 arg0, arg1, UNLT_EXPR, LT_EXPR);
9512 case BUILT_IN_ISLESS:
9513 return fold_builtin_unordered_cmp (loc, fndecl,
9514 arg0, arg1, UNGE_EXPR, GE_EXPR);
9515 case BUILT_IN_ISLESSEQUAL:
9516 return fold_builtin_unordered_cmp (loc, fndecl,
9517 arg0, arg1, UNGT_EXPR, GT_EXPR);
9518 case BUILT_IN_ISLESSGREATER:
9519 return fold_builtin_unordered_cmp (loc, fndecl,
9520 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9521 case BUILT_IN_ISUNORDERED:
9522 return fold_builtin_unordered_cmp (loc, fndecl,
9523 arg0, arg1, UNORDERED_EXPR,
9524 NOP_EXPR);
9525
9526 /* We do the folding for va_start in the expander. */
9527 case BUILT_IN_VA_START:
9528 break;
9529
9530 case BUILT_IN_OBJECT_SIZE:
9531 return fold_builtin_object_size (arg0, arg1);
9532
9533 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9534 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9535
9536 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9537 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9538
9539 default:
9540 break;
9541 }
9542 return NULL_TREE;
9543 }
9544
9545 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9546 and ARG2.
9547 This function returns NULL_TREE if no simplification was possible. */
9548
9549 static tree
9550 fold_builtin_3 (location_t loc, tree fndecl,
9551 tree arg0, tree arg1, tree arg2)
9552 {
9553 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9554 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9555
9556 if (TREE_CODE (arg0) == ERROR_MARK
9557 || TREE_CODE (arg1) == ERROR_MARK
9558 || TREE_CODE (arg2) == ERROR_MARK)
9559 return NULL_TREE;
9560
9561 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9562 arg0, arg1, arg2))
9563 return ret;
9564
9565 switch (fcode)
9566 {
9567
9568 CASE_FLT_FN (BUILT_IN_SINCOS):
9569 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9570
9571 CASE_FLT_FN (BUILT_IN_REMQUO):
9572 if (validate_arg (arg0, REAL_TYPE)
9573 && validate_arg (arg1, REAL_TYPE)
9574 && validate_arg (arg2, POINTER_TYPE))
9575 return do_mpfr_remquo (arg0, arg1, arg2);
9576 break;
9577
9578 case BUILT_IN_MEMCMP:
9579 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9580
9581 case BUILT_IN_EXPECT:
9582 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9583
9584 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9585 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9586
9587 case BUILT_IN_ADD_OVERFLOW:
9588 case BUILT_IN_SUB_OVERFLOW:
9589 case BUILT_IN_MUL_OVERFLOW:
9590 case BUILT_IN_ADD_OVERFLOW_P:
9591 case BUILT_IN_SUB_OVERFLOW_P:
9592 case BUILT_IN_MUL_OVERFLOW_P:
9593 case BUILT_IN_SADD_OVERFLOW:
9594 case BUILT_IN_SADDL_OVERFLOW:
9595 case BUILT_IN_SADDLL_OVERFLOW:
9596 case BUILT_IN_SSUB_OVERFLOW:
9597 case BUILT_IN_SSUBL_OVERFLOW:
9598 case BUILT_IN_SSUBLL_OVERFLOW:
9599 case BUILT_IN_SMUL_OVERFLOW:
9600 case BUILT_IN_SMULL_OVERFLOW:
9601 case BUILT_IN_SMULLL_OVERFLOW:
9602 case BUILT_IN_UADD_OVERFLOW:
9603 case BUILT_IN_UADDL_OVERFLOW:
9604 case BUILT_IN_UADDLL_OVERFLOW:
9605 case BUILT_IN_USUB_OVERFLOW:
9606 case BUILT_IN_USUBL_OVERFLOW:
9607 case BUILT_IN_USUBLL_OVERFLOW:
9608 case BUILT_IN_UMUL_OVERFLOW:
9609 case BUILT_IN_UMULL_OVERFLOW:
9610 case BUILT_IN_UMULLL_OVERFLOW:
9611 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9612
9613 default:
9614 break;
9615 }
9616 return NULL_TREE;
9617 }
9618
9619 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9620 arguments. IGNORE is true if the result of the
9621 function call is ignored. This function returns NULL_TREE if no
9622 simplification was possible. */
9623
9624 tree
9625 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9626 {
9627 tree ret = NULL_TREE;
9628
9629 switch (nargs)
9630 {
9631 case 0:
9632 ret = fold_builtin_0 (loc, fndecl);
9633 break;
9634 case 1:
9635 ret = fold_builtin_1 (loc, fndecl, args[0]);
9636 break;
9637 case 2:
9638 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9639 break;
9640 case 3:
9641 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9642 break;
9643 default:
9644 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9645 break;
9646 }
9647 if (ret)
9648 {
9649 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9650 SET_EXPR_LOCATION (ret, loc);
9651 return ret;
9652 }
9653 return NULL_TREE;
9654 }
9655
9656 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9657 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9658 of arguments in ARGS to be omitted. OLDNARGS is the number of
9659 elements in ARGS. */
9660
9661 static tree
9662 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9663 int skip, tree fndecl, int n, va_list newargs)
9664 {
9665 int nargs = oldnargs - skip + n;
9666 tree *buffer;
9667
9668 if (n > 0)
9669 {
9670 int i, j;
9671
9672 buffer = XALLOCAVEC (tree, nargs);
9673 for (i = 0; i < n; i++)
9674 buffer[i] = va_arg (newargs, tree);
9675 for (j = skip; j < oldnargs; j++, i++)
9676 buffer[i] = args[j];
9677 }
9678 else
9679 buffer = args + skip;
9680
9681 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9682 }
9683
9684 /* Return true if FNDECL shouldn't be folded right now.
9685 If a built-in function has an inline attribute always_inline
9686 wrapper, defer folding it after always_inline functions have
9687 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9688 might not be performed. */
9689
9690 bool
9691 avoid_folding_inline_builtin (tree fndecl)
9692 {
9693 return (DECL_DECLARED_INLINE_P (fndecl)
9694 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9695 && cfun
9696 && !cfun->always_inline_functions_inlined
9697 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9698 }
9699
9700 /* A wrapper function for builtin folding that prevents warnings for
9701 "statement without effect" and the like, caused by removing the
9702 call node earlier than the warning is generated. */
9703
9704 tree
9705 fold_call_expr (location_t loc, tree exp, bool ignore)
9706 {
9707 tree ret = NULL_TREE;
9708 tree fndecl = get_callee_fndecl (exp);
9709 if (fndecl && fndecl_built_in_p (fndecl)
9710 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9711 yet. Defer folding until we see all the arguments
9712 (after inlining). */
9713 && !CALL_EXPR_VA_ARG_PACK (exp))
9714 {
9715 int nargs = call_expr_nargs (exp);
9716
9717 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9718 instead last argument is __builtin_va_arg_pack (). Defer folding
9719 even in that case, until arguments are finalized. */
9720 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9721 {
9722 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9723 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9724 return NULL_TREE;
9725 }
9726
9727 if (avoid_folding_inline_builtin (fndecl))
9728 return NULL_TREE;
9729
9730 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9731 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9732 CALL_EXPR_ARGP (exp), ignore);
9733 else
9734 {
9735 tree *args = CALL_EXPR_ARGP (exp);
9736 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9737 if (ret)
9738 return ret;
9739 }
9740 }
9741 return NULL_TREE;
9742 }
9743
9744 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9745 N arguments are passed in the array ARGARRAY. Return a folded
9746 expression or NULL_TREE if no simplification was possible. */
9747
9748 tree
9749 fold_builtin_call_array (location_t loc, tree,
9750 tree fn,
9751 int n,
9752 tree *argarray)
9753 {
9754 if (TREE_CODE (fn) != ADDR_EXPR)
9755 return NULL_TREE;
9756
9757 tree fndecl = TREE_OPERAND (fn, 0);
9758 if (TREE_CODE (fndecl) == FUNCTION_DECL
9759 && fndecl_built_in_p (fndecl))
9760 {
9761 /* If last argument is __builtin_va_arg_pack (), arguments to this
9762 function are not finalized yet. Defer folding until they are. */
9763 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9764 {
9765 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9766 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9767 return NULL_TREE;
9768 }
9769 if (avoid_folding_inline_builtin (fndecl))
9770 return NULL_TREE;
9771 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9772 return targetm.fold_builtin (fndecl, n, argarray, false);
9773 else
9774 return fold_builtin_n (loc, fndecl, argarray, n, false);
9775 }
9776
9777 return NULL_TREE;
9778 }
9779
9780 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9781 along with N new arguments specified as the "..." parameters. SKIP
9782 is the number of arguments in EXP to be omitted. This function is used
9783 to do varargs-to-varargs transformations. */
9784
9785 static tree
9786 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9787 {
9788 va_list ap;
9789 tree t;
9790
9791 va_start (ap, n);
9792 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9793 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9794 va_end (ap);
9795
9796 return t;
9797 }
9798
9799 /* Validate a single argument ARG against a tree code CODE representing
9800 a type. Return true when argument is valid. */
9801
9802 static bool
9803 validate_arg (const_tree arg, enum tree_code code)
9804 {
9805 if (!arg)
9806 return false;
9807 else if (code == POINTER_TYPE)
9808 return POINTER_TYPE_P (TREE_TYPE (arg));
9809 else if (code == INTEGER_TYPE)
9810 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9811 return code == TREE_CODE (TREE_TYPE (arg));
9812 }
9813
9814 /* This function validates the types of a function call argument list
9815 against a specified list of tree_codes. If the last specifier is a 0,
9816 that represents an ellipses, otherwise the last specifier must be a
9817 VOID_TYPE.
9818
9819 This is the GIMPLE version of validate_arglist. Eventually we want to
9820 completely convert builtins.c to work from GIMPLEs and the tree based
9821 validate_arglist will then be removed. */
9822
9823 bool
9824 validate_gimple_arglist (const gcall *call, ...)
9825 {
9826 enum tree_code code;
9827 bool res = 0;
9828 va_list ap;
9829 const_tree arg;
9830 size_t i;
9831
9832 va_start (ap, call);
9833 i = 0;
9834
9835 do
9836 {
9837 code = (enum tree_code) va_arg (ap, int);
9838 switch (code)
9839 {
9840 case 0:
9841 /* This signifies an ellipses, any further arguments are all ok. */
9842 res = true;
9843 goto end;
9844 case VOID_TYPE:
9845 /* This signifies an endlink, if no arguments remain, return
9846 true, otherwise return false. */
9847 res = (i == gimple_call_num_args (call));
9848 goto end;
9849 default:
9850 /* If no parameters remain or the parameter's code does not
9851 match the specified code, return false. Otherwise continue
9852 checking any remaining arguments. */
9853 arg = gimple_call_arg (call, i++);
9854 if (!validate_arg (arg, code))
9855 goto end;
9856 break;
9857 }
9858 }
9859 while (1);
9860
9861 /* We need gotos here since we can only have one VA_CLOSE in a
9862 function. */
9863 end: ;
9864 va_end (ap);
9865
9866 return res;
9867 }
9868
9869 /* Default target-specific builtin expander that does nothing. */
9870
9871 rtx
9872 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9873 rtx target ATTRIBUTE_UNUSED,
9874 rtx subtarget ATTRIBUTE_UNUSED,
9875 machine_mode mode ATTRIBUTE_UNUSED,
9876 int ignore ATTRIBUTE_UNUSED)
9877 {
9878 return NULL_RTX;
9879 }
9880
9881 /* Returns true is EXP represents data that would potentially reside
9882 in a readonly section. */
9883
9884 bool
9885 readonly_data_expr (tree exp)
9886 {
9887 STRIP_NOPS (exp);
9888
9889 if (TREE_CODE (exp) != ADDR_EXPR)
9890 return false;
9891
9892 exp = get_base_address (TREE_OPERAND (exp, 0));
9893 if (!exp)
9894 return false;
9895
9896 /* Make sure we call decl_readonly_section only for trees it
9897 can handle (since it returns true for everything it doesn't
9898 understand). */
9899 if (TREE_CODE (exp) == STRING_CST
9900 || TREE_CODE (exp) == CONSTRUCTOR
9901 || (VAR_P (exp) && TREE_STATIC (exp)))
9902 return decl_readonly_section (exp, 0);
9903 else
9904 return false;
9905 }
9906
9907 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9908 to the call, and TYPE is its return type.
9909
9910 Return NULL_TREE if no simplification was possible, otherwise return the
9911 simplified form of the call as a tree.
9912
9913 The simplified form may be a constant or other expression which
9914 computes the same value, but in a more efficient manner (including
9915 calls to other builtin functions).
9916
9917 The call may contain arguments which need to be evaluated, but
9918 which are not useful to determine the result of the call. In
9919 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9920 COMPOUND_EXPR will be an argument which must be evaluated.
9921 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9922 COMPOUND_EXPR in the chain will contain the tree for the simplified
9923 form of the builtin function call. */
9924
9925 static tree
9926 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9927 {
9928 if (!validate_arg (s1, POINTER_TYPE)
9929 || !validate_arg (s2, POINTER_TYPE))
9930 return NULL_TREE;
9931 else
9932 {
9933 tree fn;
9934 const char *p1, *p2;
9935
9936 p2 = c_getstr (s2);
9937 if (p2 == NULL)
9938 return NULL_TREE;
9939
9940 p1 = c_getstr (s1);
9941 if (p1 != NULL)
9942 {
9943 const char *r = strpbrk (p1, p2);
9944 tree tem;
9945
9946 if (r == NULL)
9947 return build_int_cst (TREE_TYPE (s1), 0);
9948
9949 /* Return an offset into the constant string argument. */
9950 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9951 return fold_convert_loc (loc, type, tem);
9952 }
9953
9954 if (p2[0] == '\0')
9955 /* strpbrk(x, "") == NULL.
9956 Evaluate and ignore s1 in case it had side-effects. */
9957 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9958
9959 if (p2[1] != '\0')
9960 return NULL_TREE; /* Really call strpbrk. */
9961
9962 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9963 if (!fn)
9964 return NULL_TREE;
9965
9966 /* New argument list transforming strpbrk(s1, s2) to
9967 strchr(s1, s2[0]). */
9968 return build_call_expr_loc (loc, fn, 2, s1,
9969 build_int_cst (integer_type_node, p2[0]));
9970 }
9971 }
9972
9973 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9974 to the call.
9975
9976 Return NULL_TREE if no simplification was possible, otherwise return the
9977 simplified form of the call as a tree.
9978
9979 The simplified form may be a constant or other expression which
9980 computes the same value, but in a more efficient manner (including
9981 calls to other builtin functions).
9982
9983 The call may contain arguments which need to be evaluated, but
9984 which are not useful to determine the result of the call. In
9985 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9986 COMPOUND_EXPR will be an argument which must be evaluated.
9987 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9988 COMPOUND_EXPR in the chain will contain the tree for the simplified
9989 form of the builtin function call. */
9990
9991 static tree
9992 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9993 {
9994 if (!validate_arg (s1, POINTER_TYPE)
9995 || !validate_arg (s2, POINTER_TYPE))
9996 return NULL_TREE;
9997 else
9998 {
9999 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10000
10001 /* If either argument is "", return NULL_TREE. */
10002 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10003 /* Evaluate and ignore both arguments in case either one has
10004 side-effects. */
10005 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10006 s1, s2);
10007 return NULL_TREE;
10008 }
10009 }
10010
10011 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10012 to the call.
10013
10014 Return NULL_TREE if no simplification was possible, otherwise return the
10015 simplified form of the call as a tree.
10016
10017 The simplified form may be a constant or other expression which
10018 computes the same value, but in a more efficient manner (including
10019 calls to other builtin functions).
10020
10021 The call may contain arguments which need to be evaluated, but
10022 which are not useful to determine the result of the call. In
10023 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10024 COMPOUND_EXPR will be an argument which must be evaluated.
10025 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10026 COMPOUND_EXPR in the chain will contain the tree for the simplified
10027 form of the builtin function call. */
10028
10029 static tree
10030 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10031 {
10032 if (!validate_arg (s1, POINTER_TYPE)
10033 || !validate_arg (s2, POINTER_TYPE))
10034 return NULL_TREE;
10035 else
10036 {
10037 /* If the first argument is "", return NULL_TREE. */
10038 const char *p1 = c_getstr (s1);
10039 if (p1 && *p1 == '\0')
10040 {
10041 /* Evaluate and ignore argument s2 in case it has
10042 side-effects. */
10043 return omit_one_operand_loc (loc, size_type_node,
10044 size_zero_node, s2);
10045 }
10046
10047 /* If the second argument is "", return __builtin_strlen(s1). */
10048 const char *p2 = c_getstr (s2);
10049 if (p2 && *p2 == '\0')
10050 {
10051 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10052
10053 /* If the replacement _DECL isn't initialized, don't do the
10054 transformation. */
10055 if (!fn)
10056 return NULL_TREE;
10057
10058 return build_call_expr_loc (loc, fn, 1, s1);
10059 }
10060 return NULL_TREE;
10061 }
10062 }
10063
10064 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10065 produced. False otherwise. This is done so that we don't output the error
10066 or warning twice or three times. */
10067
10068 bool
10069 fold_builtin_next_arg (tree exp, bool va_start_p)
10070 {
10071 tree fntype = TREE_TYPE (current_function_decl);
10072 int nargs = call_expr_nargs (exp);
10073 tree arg;
10074 /* There is good chance the current input_location points inside the
10075 definition of the va_start macro (perhaps on the token for
10076 builtin) in a system header, so warnings will not be emitted.
10077 Use the location in real source code. */
10078 source_location current_location =
10079 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10080 NULL);
10081
10082 if (!stdarg_p (fntype))
10083 {
10084 error ("%<va_start%> used in function with fixed args");
10085 return true;
10086 }
10087
10088 if (va_start_p)
10089 {
10090 if (va_start_p && (nargs != 2))
10091 {
10092 error ("wrong number of arguments to function %<va_start%>");
10093 return true;
10094 }
10095 arg = CALL_EXPR_ARG (exp, 1);
10096 }
10097 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10098 when we checked the arguments and if needed issued a warning. */
10099 else
10100 {
10101 if (nargs == 0)
10102 {
10103 /* Evidently an out of date version of <stdarg.h>; can't validate
10104 va_start's second argument, but can still work as intended. */
10105 warning_at (current_location,
10106 OPT_Wvarargs,
10107 "%<__builtin_next_arg%> called without an argument");
10108 return true;
10109 }
10110 else if (nargs > 1)
10111 {
10112 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10113 return true;
10114 }
10115 arg = CALL_EXPR_ARG (exp, 0);
10116 }
10117
10118 if (TREE_CODE (arg) == SSA_NAME)
10119 arg = SSA_NAME_VAR (arg);
10120
10121 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10122 or __builtin_next_arg (0) the first time we see it, after checking
10123 the arguments and if needed issuing a warning. */
10124 if (!integer_zerop (arg))
10125 {
10126 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10127
10128 /* Strip off all nops for the sake of the comparison. This
10129 is not quite the same as STRIP_NOPS. It does more.
10130 We must also strip off INDIRECT_EXPR for C++ reference
10131 parameters. */
10132 while (CONVERT_EXPR_P (arg)
10133 || TREE_CODE (arg) == INDIRECT_REF)
10134 arg = TREE_OPERAND (arg, 0);
10135 if (arg != last_parm)
10136 {
10137 /* FIXME: Sometimes with the tree optimizers we can get the
10138 not the last argument even though the user used the last
10139 argument. We just warn and set the arg to be the last
10140 argument so that we will get wrong-code because of
10141 it. */
10142 warning_at (current_location,
10143 OPT_Wvarargs,
10144 "second parameter of %<va_start%> not last named argument");
10145 }
10146
10147 /* Undefined by C99 7.15.1.4p4 (va_start):
10148 "If the parameter parmN is declared with the register storage
10149 class, with a function or array type, or with a type that is
10150 not compatible with the type that results after application of
10151 the default argument promotions, the behavior is undefined."
10152 */
10153 else if (DECL_REGISTER (arg))
10154 {
10155 warning_at (current_location,
10156 OPT_Wvarargs,
10157 "undefined behavior when second parameter of "
10158 "%<va_start%> is declared with %<register%> storage");
10159 }
10160
10161 /* We want to verify the second parameter just once before the tree
10162 optimizers are run and then avoid keeping it in the tree,
10163 as otherwise we could warn even for correct code like:
10164 void foo (int i, ...)
10165 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10166 if (va_start_p)
10167 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10168 else
10169 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10170 }
10171 return false;
10172 }
10173
10174
10175 /* Expand a call EXP to __builtin_object_size. */
10176
10177 static rtx
10178 expand_builtin_object_size (tree exp)
10179 {
10180 tree ost;
10181 int object_size_type;
10182 tree fndecl = get_callee_fndecl (exp);
10183
10184 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10185 {
10186 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10187 exp, fndecl);
10188 expand_builtin_trap ();
10189 return const0_rtx;
10190 }
10191
10192 ost = CALL_EXPR_ARG (exp, 1);
10193 STRIP_NOPS (ost);
10194
10195 if (TREE_CODE (ost) != INTEGER_CST
10196 || tree_int_cst_sgn (ost) < 0
10197 || compare_tree_int (ost, 3) > 0)
10198 {
10199 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10200 exp, fndecl);
10201 expand_builtin_trap ();
10202 return const0_rtx;
10203 }
10204
10205 object_size_type = tree_to_shwi (ost);
10206
10207 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10208 }
10209
10210 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10211 FCODE is the BUILT_IN_* to use.
10212 Return NULL_RTX if we failed; the caller should emit a normal call,
10213 otherwise try to get the result in TARGET, if convenient (and in
10214 mode MODE if that's convenient). */
10215
10216 static rtx
10217 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10218 enum built_in_function fcode)
10219 {
10220 if (!validate_arglist (exp,
10221 POINTER_TYPE,
10222 fcode == BUILT_IN_MEMSET_CHK
10223 ? INTEGER_TYPE : POINTER_TYPE,
10224 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10225 return NULL_RTX;
10226
10227 tree dest = CALL_EXPR_ARG (exp, 0);
10228 tree src = CALL_EXPR_ARG (exp, 1);
10229 tree len = CALL_EXPR_ARG (exp, 2);
10230 tree size = CALL_EXPR_ARG (exp, 3);
10231
10232 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10233 /*str=*/NULL_TREE, size);
10234
10235 if (!tree_fits_uhwi_p (size))
10236 return NULL_RTX;
10237
10238 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10239 {
10240 /* Avoid transforming the checking call to an ordinary one when
10241 an overflow has been detected or when the call couldn't be
10242 validated because the size is not constant. */
10243 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10244 return NULL_RTX;
10245
10246 tree fn = NULL_TREE;
10247 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10248 mem{cpy,pcpy,move,set} is available. */
10249 switch (fcode)
10250 {
10251 case BUILT_IN_MEMCPY_CHK:
10252 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10253 break;
10254 case BUILT_IN_MEMPCPY_CHK:
10255 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10256 break;
10257 case BUILT_IN_MEMMOVE_CHK:
10258 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10259 break;
10260 case BUILT_IN_MEMSET_CHK:
10261 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10262 break;
10263 default:
10264 break;
10265 }
10266
10267 if (! fn)
10268 return NULL_RTX;
10269
10270 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10271 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10272 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10273 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10274 }
10275 else if (fcode == BUILT_IN_MEMSET_CHK)
10276 return NULL_RTX;
10277 else
10278 {
10279 unsigned int dest_align = get_pointer_alignment (dest);
10280
10281 /* If DEST is not a pointer type, call the normal function. */
10282 if (dest_align == 0)
10283 return NULL_RTX;
10284
10285 /* If SRC and DEST are the same (and not volatile), do nothing. */
10286 if (operand_equal_p (src, dest, 0))
10287 {
10288 tree expr;
10289
10290 if (fcode != BUILT_IN_MEMPCPY_CHK)
10291 {
10292 /* Evaluate and ignore LEN in case it has side-effects. */
10293 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10294 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10295 }
10296
10297 expr = fold_build_pointer_plus (dest, len);
10298 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10299 }
10300
10301 /* __memmove_chk special case. */
10302 if (fcode == BUILT_IN_MEMMOVE_CHK)
10303 {
10304 unsigned int src_align = get_pointer_alignment (src);
10305
10306 if (src_align == 0)
10307 return NULL_RTX;
10308
10309 /* If src is categorized for a readonly section we can use
10310 normal __memcpy_chk. */
10311 if (readonly_data_expr (src))
10312 {
10313 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10314 if (!fn)
10315 return NULL_RTX;
10316 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10317 dest, src, len, size);
10318 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10319 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10320 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10321 }
10322 }
10323 return NULL_RTX;
10324 }
10325 }
10326
10327 /* Emit warning if a buffer overflow is detected at compile time. */
10328
10329 static void
10330 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10331 {
10332 /* The source string. */
10333 tree srcstr = NULL_TREE;
10334 /* The size of the destination object. */
10335 tree objsize = NULL_TREE;
10336 /* The string that is being concatenated with (as in __strcat_chk)
10337 or null if it isn't. */
10338 tree catstr = NULL_TREE;
10339 /* The maximum length of the source sequence in a bounded operation
10340 (such as __strncat_chk) or null if the operation isn't bounded
10341 (such as __strcat_chk). */
10342 tree maxread = NULL_TREE;
10343 /* The exact size of the access (such as in __strncpy_chk). */
10344 tree size = NULL_TREE;
10345
10346 switch (fcode)
10347 {
10348 case BUILT_IN_STRCPY_CHK:
10349 case BUILT_IN_STPCPY_CHK:
10350 srcstr = CALL_EXPR_ARG (exp, 1);
10351 objsize = CALL_EXPR_ARG (exp, 2);
10352 break;
10353
10354 case BUILT_IN_STRCAT_CHK:
10355 /* For __strcat_chk the warning will be emitted only if overflowing
10356 by at least strlen (dest) + 1 bytes. */
10357 catstr = CALL_EXPR_ARG (exp, 0);
10358 srcstr = CALL_EXPR_ARG (exp, 1);
10359 objsize = CALL_EXPR_ARG (exp, 2);
10360 break;
10361
10362 case BUILT_IN_STRNCAT_CHK:
10363 catstr = CALL_EXPR_ARG (exp, 0);
10364 srcstr = CALL_EXPR_ARG (exp, 1);
10365 maxread = CALL_EXPR_ARG (exp, 2);
10366 objsize = CALL_EXPR_ARG (exp, 3);
10367 break;
10368
10369 case BUILT_IN_STRNCPY_CHK:
10370 case BUILT_IN_STPNCPY_CHK:
10371 srcstr = CALL_EXPR_ARG (exp, 1);
10372 size = CALL_EXPR_ARG (exp, 2);
10373 objsize = CALL_EXPR_ARG (exp, 3);
10374 break;
10375
10376 case BUILT_IN_SNPRINTF_CHK:
10377 case BUILT_IN_VSNPRINTF_CHK:
10378 maxread = CALL_EXPR_ARG (exp, 1);
10379 objsize = CALL_EXPR_ARG (exp, 3);
10380 break;
10381 default:
10382 gcc_unreachable ();
10383 }
10384
10385 if (catstr && maxread)
10386 {
10387 /* Check __strncat_chk. There is no way to determine the length
10388 of the string to which the source string is being appended so
10389 just warn when the length of the source string is not known. */
10390 check_strncat_sizes (exp, objsize);
10391 return;
10392 }
10393
10394 /* The destination argument is the first one for all built-ins above. */
10395 tree dst = CALL_EXPR_ARG (exp, 0);
10396
10397 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10398 }
10399
10400 /* Emit warning if a buffer overflow is detected at compile time
10401 in __sprintf_chk/__vsprintf_chk calls. */
10402
10403 static void
10404 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10405 {
10406 tree size, len, fmt;
10407 const char *fmt_str;
10408 int nargs = call_expr_nargs (exp);
10409
10410 /* Verify the required arguments in the original call. */
10411
10412 if (nargs < 4)
10413 return;
10414 size = CALL_EXPR_ARG (exp, 2);
10415 fmt = CALL_EXPR_ARG (exp, 3);
10416
10417 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10418 return;
10419
10420 /* Check whether the format is a literal string constant. */
10421 fmt_str = c_getstr (fmt);
10422 if (fmt_str == NULL)
10423 return;
10424
10425 if (!init_target_chars ())
10426 return;
10427
10428 /* If the format doesn't contain % args or %%, we know its size. */
10429 if (strchr (fmt_str, target_percent) == 0)
10430 len = build_int_cstu (size_type_node, strlen (fmt_str));
10431 /* If the format is "%s" and first ... argument is a string literal,
10432 we know it too. */
10433 else if (fcode == BUILT_IN_SPRINTF_CHK
10434 && strcmp (fmt_str, target_percent_s) == 0)
10435 {
10436 tree arg;
10437
10438 if (nargs < 5)
10439 return;
10440 arg = CALL_EXPR_ARG (exp, 4);
10441 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10442 return;
10443
10444 len = c_strlen (arg, 1);
10445 if (!len || ! tree_fits_uhwi_p (len))
10446 return;
10447 }
10448 else
10449 return;
10450
10451 /* Add one for the terminating nul. */
10452 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10453
10454 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10455 /*maxread=*/NULL_TREE, len, size);
10456 }
10457
10458 /* Emit warning if a free is called with address of a variable. */
10459
10460 static void
10461 maybe_emit_free_warning (tree exp)
10462 {
10463 tree arg = CALL_EXPR_ARG (exp, 0);
10464
10465 STRIP_NOPS (arg);
10466 if (TREE_CODE (arg) != ADDR_EXPR)
10467 return;
10468
10469 arg = get_base_address (TREE_OPERAND (arg, 0));
10470 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10471 return;
10472
10473 if (SSA_VAR_P (arg))
10474 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10475 "%Kattempt to free a non-heap object %qD", exp, arg);
10476 else
10477 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10478 "%Kattempt to free a non-heap object", exp);
10479 }
10480
10481 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10482 if possible. */
10483
10484 static tree
10485 fold_builtin_object_size (tree ptr, tree ost)
10486 {
10487 unsigned HOST_WIDE_INT bytes;
10488 int object_size_type;
10489
10490 if (!validate_arg (ptr, POINTER_TYPE)
10491 || !validate_arg (ost, INTEGER_TYPE))
10492 return NULL_TREE;
10493
10494 STRIP_NOPS (ost);
10495
10496 if (TREE_CODE (ost) != INTEGER_CST
10497 || tree_int_cst_sgn (ost) < 0
10498 || compare_tree_int (ost, 3) > 0)
10499 return NULL_TREE;
10500
10501 object_size_type = tree_to_shwi (ost);
10502
10503 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10504 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10505 and (size_t) 0 for types 2 and 3. */
10506 if (TREE_SIDE_EFFECTS (ptr))
10507 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10508
10509 if (TREE_CODE (ptr) == ADDR_EXPR)
10510 {
10511 compute_builtin_object_size (ptr, object_size_type, &bytes);
10512 if (wi::fits_to_tree_p (bytes, size_type_node))
10513 return build_int_cstu (size_type_node, bytes);
10514 }
10515 else if (TREE_CODE (ptr) == SSA_NAME)
10516 {
10517 /* If object size is not known yet, delay folding until
10518 later. Maybe subsequent passes will help determining
10519 it. */
10520 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10521 && wi::fits_to_tree_p (bytes, size_type_node))
10522 return build_int_cstu (size_type_node, bytes);
10523 }
10524
10525 return NULL_TREE;
10526 }
10527
10528 /* Builtins with folding operations that operate on "..." arguments
10529 need special handling; we need to store the arguments in a convenient
10530 data structure before attempting any folding. Fortunately there are
10531 only a few builtins that fall into this category. FNDECL is the
10532 function, EXP is the CALL_EXPR for the call. */
10533
10534 static tree
10535 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10536 {
10537 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10538 tree ret = NULL_TREE;
10539
10540 switch (fcode)
10541 {
10542 case BUILT_IN_FPCLASSIFY:
10543 ret = fold_builtin_fpclassify (loc, args, nargs);
10544 break;
10545
10546 default:
10547 break;
10548 }
10549 if (ret)
10550 {
10551 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10552 SET_EXPR_LOCATION (ret, loc);
10553 TREE_NO_WARNING (ret) = 1;
10554 return ret;
10555 }
10556 return NULL_TREE;
10557 }
10558
10559 /* Initialize format string characters in the target charset. */
10560
10561 bool
10562 init_target_chars (void)
10563 {
10564 static bool init;
10565 if (!init)
10566 {
10567 target_newline = lang_hooks.to_target_charset ('\n');
10568 target_percent = lang_hooks.to_target_charset ('%');
10569 target_c = lang_hooks.to_target_charset ('c');
10570 target_s = lang_hooks.to_target_charset ('s');
10571 if (target_newline == 0 || target_percent == 0 || target_c == 0
10572 || target_s == 0)
10573 return false;
10574
10575 target_percent_c[0] = target_percent;
10576 target_percent_c[1] = target_c;
10577 target_percent_c[2] = '\0';
10578
10579 target_percent_s[0] = target_percent;
10580 target_percent_s[1] = target_s;
10581 target_percent_s[2] = '\0';
10582
10583 target_percent_s_newline[0] = target_percent;
10584 target_percent_s_newline[1] = target_s;
10585 target_percent_s_newline[2] = target_newline;
10586 target_percent_s_newline[3] = '\0';
10587
10588 init = true;
10589 }
10590 return true;
10591 }
10592
10593 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10594 and no overflow/underflow occurred. INEXACT is true if M was not
10595 exactly calculated. TYPE is the tree type for the result. This
10596 function assumes that you cleared the MPFR flags and then
10597 calculated M to see if anything subsequently set a flag prior to
10598 entering this function. Return NULL_TREE if any checks fail. */
10599
10600 static tree
10601 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10602 {
10603 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10604 overflow/underflow occurred. If -frounding-math, proceed iff the
10605 result of calling FUNC was exact. */
10606 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10607 && (!flag_rounding_math || !inexact))
10608 {
10609 REAL_VALUE_TYPE rr;
10610
10611 real_from_mpfr (&rr, m, type, GMP_RNDN);
10612 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10613 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10614 but the mpft_t is not, then we underflowed in the
10615 conversion. */
10616 if (real_isfinite (&rr)
10617 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10618 {
10619 REAL_VALUE_TYPE rmode;
10620
10621 real_convert (&rmode, TYPE_MODE (type), &rr);
10622 /* Proceed iff the specified mode can hold the value. */
10623 if (real_identical (&rmode, &rr))
10624 return build_real (type, rmode);
10625 }
10626 }
10627 return NULL_TREE;
10628 }
10629
10630 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10631 number and no overflow/underflow occurred. INEXACT is true if M
10632 was not exactly calculated. TYPE is the tree type for the result.
10633 This function assumes that you cleared the MPFR flags and then
10634 calculated M to see if anything subsequently set a flag prior to
10635 entering this function. Return NULL_TREE if any checks fail, if
10636 FORCE_CONVERT is true, then bypass the checks. */
10637
10638 static tree
10639 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10640 {
10641 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10642 overflow/underflow occurred. If -frounding-math, proceed iff the
10643 result of calling FUNC was exact. */
10644 if (force_convert
10645 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10646 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10647 && (!flag_rounding_math || !inexact)))
10648 {
10649 REAL_VALUE_TYPE re, im;
10650
10651 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10652 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10653 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10654 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10655 but the mpft_t is not, then we underflowed in the
10656 conversion. */
10657 if (force_convert
10658 || (real_isfinite (&re) && real_isfinite (&im)
10659 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10660 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10661 {
10662 REAL_VALUE_TYPE re_mode, im_mode;
10663
10664 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10665 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10666 /* Proceed iff the specified mode can hold the value. */
10667 if (force_convert
10668 || (real_identical (&re_mode, &re)
10669 && real_identical (&im_mode, &im)))
10670 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10671 build_real (TREE_TYPE (type), im_mode));
10672 }
10673 }
10674 return NULL_TREE;
10675 }
10676
10677 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10678 the pointer *(ARG_QUO) and return the result. The type is taken
10679 from the type of ARG0 and is used for setting the precision of the
10680 calculation and results. */
10681
10682 static tree
10683 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10684 {
10685 tree const type = TREE_TYPE (arg0);
10686 tree result = NULL_TREE;
10687
10688 STRIP_NOPS (arg0);
10689 STRIP_NOPS (arg1);
10690
10691 /* To proceed, MPFR must exactly represent the target floating point
10692 format, which only happens when the target base equals two. */
10693 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10694 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10695 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10696 {
10697 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10698 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10699
10700 if (real_isfinite (ra0) && real_isfinite (ra1))
10701 {
10702 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10703 const int prec = fmt->p;
10704 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10705 tree result_rem;
10706 long integer_quo;
10707 mpfr_t m0, m1;
10708
10709 mpfr_inits2 (prec, m0, m1, NULL);
10710 mpfr_from_real (m0, ra0, GMP_RNDN);
10711 mpfr_from_real (m1, ra1, GMP_RNDN);
10712 mpfr_clear_flags ();
10713 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10714 /* Remquo is independent of the rounding mode, so pass
10715 inexact=0 to do_mpfr_ckconv(). */
10716 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10717 mpfr_clears (m0, m1, NULL);
10718 if (result_rem)
10719 {
10720 /* MPFR calculates quo in the host's long so it may
10721 return more bits in quo than the target int can hold
10722 if sizeof(host long) > sizeof(target int). This can
10723 happen even for native compilers in LP64 mode. In
10724 these cases, modulo the quo value with the largest
10725 number that the target int can hold while leaving one
10726 bit for the sign. */
10727 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10728 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10729
10730 /* Dereference the quo pointer argument. */
10731 arg_quo = build_fold_indirect_ref (arg_quo);
10732 /* Proceed iff a valid pointer type was passed in. */
10733 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10734 {
10735 /* Set the value. */
10736 tree result_quo
10737 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10738 build_int_cst (TREE_TYPE (arg_quo),
10739 integer_quo));
10740 TREE_SIDE_EFFECTS (result_quo) = 1;
10741 /* Combine the quo assignment with the rem. */
10742 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10743 result_quo, result_rem));
10744 }
10745 }
10746 }
10747 }
10748 return result;
10749 }
10750
10751 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10752 resulting value as a tree with type TYPE. The mpfr precision is
10753 set to the precision of TYPE. We assume that this mpfr function
10754 returns zero if the result could be calculated exactly within the
10755 requested precision. In addition, the integer pointer represented
10756 by ARG_SG will be dereferenced and set to the appropriate signgam
10757 (-1,1) value. */
10758
10759 static tree
10760 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10761 {
10762 tree result = NULL_TREE;
10763
10764 STRIP_NOPS (arg);
10765
10766 /* To proceed, MPFR must exactly represent the target floating point
10767 format, which only happens when the target base equals two. Also
10768 verify ARG is a constant and that ARG_SG is an int pointer. */
10769 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10770 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10771 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10772 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10773 {
10774 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10775
10776 /* In addition to NaN and Inf, the argument cannot be zero or a
10777 negative integer. */
10778 if (real_isfinite (ra)
10779 && ra->cl != rvc_zero
10780 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10781 {
10782 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10783 const int prec = fmt->p;
10784 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10785 int inexact, sg;
10786 mpfr_t m;
10787 tree result_lg;
10788
10789 mpfr_init2 (m, prec);
10790 mpfr_from_real (m, ra, GMP_RNDN);
10791 mpfr_clear_flags ();
10792 inexact = mpfr_lgamma (m, &sg, m, rnd);
10793 result_lg = do_mpfr_ckconv (m, type, inexact);
10794 mpfr_clear (m);
10795 if (result_lg)
10796 {
10797 tree result_sg;
10798
10799 /* Dereference the arg_sg pointer argument. */
10800 arg_sg = build_fold_indirect_ref (arg_sg);
10801 /* Assign the signgam value into *arg_sg. */
10802 result_sg = fold_build2 (MODIFY_EXPR,
10803 TREE_TYPE (arg_sg), arg_sg,
10804 build_int_cst (TREE_TYPE (arg_sg), sg));
10805 TREE_SIDE_EFFECTS (result_sg) = 1;
10806 /* Combine the signgam assignment with the lgamma result. */
10807 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10808 result_sg, result_lg));
10809 }
10810 }
10811 }
10812
10813 return result;
10814 }
10815
10816 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10817 mpc function FUNC on it and return the resulting value as a tree
10818 with type TYPE. The mpfr precision is set to the precision of
10819 TYPE. We assume that function FUNC returns zero if the result
10820 could be calculated exactly within the requested precision. If
10821 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10822 in the arguments and/or results. */
10823
10824 tree
10825 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10826 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10827 {
10828 tree result = NULL_TREE;
10829
10830 STRIP_NOPS (arg0);
10831 STRIP_NOPS (arg1);
10832
10833 /* To proceed, MPFR must exactly represent the target floating point
10834 format, which only happens when the target base equals two. */
10835 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10836 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10837 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10838 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10839 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10840 {
10841 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10842 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10843 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10844 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10845
10846 if (do_nonfinite
10847 || (real_isfinite (re0) && real_isfinite (im0)
10848 && real_isfinite (re1) && real_isfinite (im1)))
10849 {
10850 const struct real_format *const fmt =
10851 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10852 const int prec = fmt->p;
10853 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10854 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10855 int inexact;
10856 mpc_t m0, m1;
10857
10858 mpc_init2 (m0, prec);
10859 mpc_init2 (m1, prec);
10860 mpfr_from_real (mpc_realref (m0), re0, rnd);
10861 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10862 mpfr_from_real (mpc_realref (m1), re1, rnd);
10863 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10864 mpfr_clear_flags ();
10865 inexact = func (m0, m0, m1, crnd);
10866 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10867 mpc_clear (m0);
10868 mpc_clear (m1);
10869 }
10870 }
10871
10872 return result;
10873 }
10874
10875 /* A wrapper function for builtin folding that prevents warnings for
10876 "statement without effect" and the like, caused by removing the
10877 call node earlier than the warning is generated. */
10878
10879 tree
10880 fold_call_stmt (gcall *stmt, bool ignore)
10881 {
10882 tree ret = NULL_TREE;
10883 tree fndecl = gimple_call_fndecl (stmt);
10884 location_t loc = gimple_location (stmt);
10885 if (fndecl && fndecl_built_in_p (fndecl)
10886 && !gimple_call_va_arg_pack_p (stmt))
10887 {
10888 int nargs = gimple_call_num_args (stmt);
10889 tree *args = (nargs > 0
10890 ? gimple_call_arg_ptr (stmt, 0)
10891 : &error_mark_node);
10892
10893 if (avoid_folding_inline_builtin (fndecl))
10894 return NULL_TREE;
10895 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10896 {
10897 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10898 }
10899 else
10900 {
10901 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10902 if (ret)
10903 {
10904 /* Propagate location information from original call to
10905 expansion of builtin. Otherwise things like
10906 maybe_emit_chk_warning, that operate on the expansion
10907 of a builtin, will use the wrong location information. */
10908 if (gimple_has_location (stmt))
10909 {
10910 tree realret = ret;
10911 if (TREE_CODE (ret) == NOP_EXPR)
10912 realret = TREE_OPERAND (ret, 0);
10913 if (CAN_HAVE_LOCATION_P (realret)
10914 && !EXPR_HAS_LOCATION (realret))
10915 SET_EXPR_LOCATION (realret, loc);
10916 return realret;
10917 }
10918 return ret;
10919 }
10920 }
10921 }
10922 return NULL_TREE;
10923 }
10924
10925 /* Look up the function in builtin_decl that corresponds to DECL
10926 and set ASMSPEC as its user assembler name. DECL must be a
10927 function decl that declares a builtin. */
10928
10929 void
10930 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10931 {
10932 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10933 && asmspec != 0);
10934
10935 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10936 set_user_assembler_name (builtin, asmspec);
10937
10938 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10939 && INT_TYPE_SIZE < BITS_PER_WORD)
10940 {
10941 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10942 set_user_assembler_libfunc ("ffs", asmspec);
10943 set_optab_libfunc (ffs_optab, mode, "ffs");
10944 }
10945 }
10946
10947 /* Return true if DECL is a builtin that expands to a constant or similarly
10948 simple code. */
10949 bool
10950 is_simple_builtin (tree decl)
10951 {
10952 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10953 switch (DECL_FUNCTION_CODE (decl))
10954 {
10955 /* Builtins that expand to constants. */
10956 case BUILT_IN_CONSTANT_P:
10957 case BUILT_IN_EXPECT:
10958 case BUILT_IN_OBJECT_SIZE:
10959 case BUILT_IN_UNREACHABLE:
10960 /* Simple register moves or loads from stack. */
10961 case BUILT_IN_ASSUME_ALIGNED:
10962 case BUILT_IN_RETURN_ADDRESS:
10963 case BUILT_IN_EXTRACT_RETURN_ADDR:
10964 case BUILT_IN_FROB_RETURN_ADDR:
10965 case BUILT_IN_RETURN:
10966 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10967 case BUILT_IN_FRAME_ADDRESS:
10968 case BUILT_IN_VA_END:
10969 case BUILT_IN_STACK_SAVE:
10970 case BUILT_IN_STACK_RESTORE:
10971 /* Exception state returns or moves registers around. */
10972 case BUILT_IN_EH_FILTER:
10973 case BUILT_IN_EH_POINTER:
10974 case BUILT_IN_EH_COPY_VALUES:
10975 return true;
10976
10977 default:
10978 return false;
10979 }
10980
10981 return false;
10982 }
10983
10984 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10985 most probably expanded inline into reasonably simple code. This is a
10986 superset of is_simple_builtin. */
10987 bool
10988 is_inexpensive_builtin (tree decl)
10989 {
10990 if (!decl)
10991 return false;
10992 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10993 return true;
10994 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10995 switch (DECL_FUNCTION_CODE (decl))
10996 {
10997 case BUILT_IN_ABS:
10998 CASE_BUILT_IN_ALLOCA:
10999 case BUILT_IN_BSWAP16:
11000 case BUILT_IN_BSWAP32:
11001 case BUILT_IN_BSWAP64:
11002 case BUILT_IN_CLZ:
11003 case BUILT_IN_CLZIMAX:
11004 case BUILT_IN_CLZL:
11005 case BUILT_IN_CLZLL:
11006 case BUILT_IN_CTZ:
11007 case BUILT_IN_CTZIMAX:
11008 case BUILT_IN_CTZL:
11009 case BUILT_IN_CTZLL:
11010 case BUILT_IN_FFS:
11011 case BUILT_IN_FFSIMAX:
11012 case BUILT_IN_FFSL:
11013 case BUILT_IN_FFSLL:
11014 case BUILT_IN_IMAXABS:
11015 case BUILT_IN_FINITE:
11016 case BUILT_IN_FINITEF:
11017 case BUILT_IN_FINITEL:
11018 case BUILT_IN_FINITED32:
11019 case BUILT_IN_FINITED64:
11020 case BUILT_IN_FINITED128:
11021 case BUILT_IN_FPCLASSIFY:
11022 case BUILT_IN_ISFINITE:
11023 case BUILT_IN_ISINF_SIGN:
11024 case BUILT_IN_ISINF:
11025 case BUILT_IN_ISINFF:
11026 case BUILT_IN_ISINFL:
11027 case BUILT_IN_ISINFD32:
11028 case BUILT_IN_ISINFD64:
11029 case BUILT_IN_ISINFD128:
11030 case BUILT_IN_ISNAN:
11031 case BUILT_IN_ISNANF:
11032 case BUILT_IN_ISNANL:
11033 case BUILT_IN_ISNAND32:
11034 case BUILT_IN_ISNAND64:
11035 case BUILT_IN_ISNAND128:
11036 case BUILT_IN_ISNORMAL:
11037 case BUILT_IN_ISGREATER:
11038 case BUILT_IN_ISGREATEREQUAL:
11039 case BUILT_IN_ISLESS:
11040 case BUILT_IN_ISLESSEQUAL:
11041 case BUILT_IN_ISLESSGREATER:
11042 case BUILT_IN_ISUNORDERED:
11043 case BUILT_IN_VA_ARG_PACK:
11044 case BUILT_IN_VA_ARG_PACK_LEN:
11045 case BUILT_IN_VA_COPY:
11046 case BUILT_IN_TRAP:
11047 case BUILT_IN_SAVEREGS:
11048 case BUILT_IN_POPCOUNTL:
11049 case BUILT_IN_POPCOUNTLL:
11050 case BUILT_IN_POPCOUNTIMAX:
11051 case BUILT_IN_POPCOUNT:
11052 case BUILT_IN_PARITYL:
11053 case BUILT_IN_PARITYLL:
11054 case BUILT_IN_PARITYIMAX:
11055 case BUILT_IN_PARITY:
11056 case BUILT_IN_LABS:
11057 case BUILT_IN_LLABS:
11058 case BUILT_IN_PREFETCH:
11059 case BUILT_IN_ACC_ON_DEVICE:
11060 return true;
11061
11062 default:
11063 return is_simple_builtin (decl);
11064 }
11065
11066 return false;
11067 }
11068
11069 /* Return true if T is a constant and the value cast to a target char
11070 can be represented by a host char.
11071 Store the casted char constant in *P if so. */
11072
11073 bool
11074 target_char_cst_p (tree t, char *p)
11075 {
11076 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11077 return false;
11078
11079 *p = (char)tree_to_uhwi (t);
11080 return true;
11081 }
11082
11083 /* Return the maximum object size. */
11084
11085 tree
11086 max_object_size (void)
11087 {
11088 /* To do: Make this a configurable parameter. */
11089 return TYPE_MAX_VALUE (ptrdiff_type_node);
11090 }