libstdc++: Review _Local_iterator/_Local_const_iterator implementations.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-ssa.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
79
80 struct target_builtins default_target_builtins;
81 #if SWITCHABLE_TARGET
82 struct target_builtins *this_target_builtins = &default_target_builtins;
83 #endif
84
85 /* Define the names of the builtin function types and codes. */
86 const char *const built_in_class_names[BUILT_IN_LAST]
87 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
88
89 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
90 const char * built_in_names[(int) END_BUILTINS] =
91 {
92 #include "builtins.def"
93 };
94
95 /* Setup an array of builtin_info_type, make sure each element decl is
96 initialized to NULL_TREE. */
97 builtin_info_type builtin_info[(int)END_BUILTINS];
98
99 /* Non-zero if __builtin_constant_p should be folded right away. */
100 bool force_folding_builtin_constant_p;
101
102 static int target_char_cast (tree, char *);
103 static rtx get_memory_rtx (tree, tree);
104 static int apply_args_size (void);
105 static int apply_result_size (void);
106 static rtx result_vector (int, rtx);
107 static void expand_builtin_prefetch (tree);
108 static rtx expand_builtin_apply_args (void);
109 static rtx expand_builtin_apply_args_1 (void);
110 static rtx expand_builtin_apply (rtx, rtx, rtx);
111 static void expand_builtin_return (rtx);
112 static enum type_class type_to_class (tree);
113 static rtx expand_builtin_classify_type (tree);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx inline_expand_builtin_bytecmp (tree, rtx);
126 static rtx expand_builtin_strcmp (tree, rtx);
127 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
128 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
129 static rtx expand_builtin_memchr (tree, rtx);
130 static rtx expand_builtin_memcpy (tree, rtx);
131 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
132 rtx target, tree exp,
133 memop_ret retmode,
134 bool might_overlap);
135 static rtx expand_builtin_memmove (tree, rtx);
136 static rtx expand_builtin_mempcpy (tree, rtx);
137 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
138 static rtx expand_builtin_strcat (tree);
139 static rtx expand_builtin_strcpy (tree, rtx);
140 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
141 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
142 static rtx expand_builtin_stpncpy (tree, rtx);
143 static rtx expand_builtin_strncat (tree, rtx);
144 static rtx expand_builtin_strncpy (tree, rtx);
145 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
146 static rtx expand_builtin_memset (tree, rtx, machine_mode);
147 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
148 static rtx expand_builtin_bzero (tree);
149 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
150 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_alloca (tree);
152 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
153 static rtx expand_builtin_frame_address (tree, tree);
154 static tree stabilize_va_list_loc (location_t, tree, int);
155 static rtx expand_builtin_expect (tree, rtx);
156 static rtx expand_builtin_expect_with_probability (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_classify_type (tree);
159 static tree fold_builtin_strlen (location_t, tree, tree);
160 static tree fold_builtin_inf (location_t, tree, int);
161 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
162 static bool validate_arg (const_tree, enum tree_code code);
163 static rtx expand_builtin_fabs (tree, rtx, rtx);
164 static rtx expand_builtin_signbit (tree, rtx);
165 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166 static tree fold_builtin_isascii (location_t, tree);
167 static tree fold_builtin_toascii (location_t, tree);
168 static tree fold_builtin_isdigit (location_t, tree);
169 static tree fold_builtin_fabs (location_t, tree, tree);
170 static tree fold_builtin_abs (location_t, tree, tree);
171 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
172 enum tree_code);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
174
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
178
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_free_warning (tree);
185 static tree fold_builtin_object_size (tree, tree);
186
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
197
198 /* Return true if NAME starts with __builtin_ or __sync_. */
199
200 static bool
201 is_builtin_name (const char *name)
202 {
203 if (strncmp (name, "__builtin_", 10) == 0)
204 return true;
205 if (strncmp (name, "__sync_", 7) == 0)
206 return true;
207 if (strncmp (name, "__atomic_", 9) == 0)
208 return true;
209 return false;
210 }
211
212 /* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
215
216 bool
217 called_as_built_in (tree node)
218 {
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
221 will have. */
222 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
223 return is_builtin_name (name);
224 }
225
226 /* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
230
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
236 whereas foo() itself starts on an even address.
237
238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
240
241 static bool
242 get_object_alignment_2 (tree exp, unsigned int *alignp,
243 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
244 {
245 poly_int64 bitsize, bitpos;
246 tree offset;
247 machine_mode mode;
248 int unsignedp, reversep, volatilep;
249 unsigned int align = BITS_PER_UNIT;
250 bool known_alignment = false;
251
252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
254 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
255 &unsignedp, &reversep, &volatilep);
256
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
259 if (TREE_CODE (exp) == FUNCTION_DECL)
260 {
261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
266 align = 2 * BITS_PER_UNIT;
267 }
268 else if (TREE_CODE (exp) == LABEL_DECL)
269 ;
270 else if (TREE_CODE (exp) == CONST_DECL)
271 {
272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp = DECL_INITIAL (exp);
274 align = TYPE_ALIGN (TREE_TYPE (exp));
275 if (CONSTANT_CLASS_P (exp))
276 align = targetm.constant_alignment (exp, align);
277
278 known_alignment = true;
279 }
280 else if (DECL_P (exp))
281 {
282 align = DECL_ALIGN (exp);
283 known_alignment = true;
284 }
285 else if (TREE_CODE (exp) == INDIRECT_REF
286 || TREE_CODE (exp) == MEM_REF
287 || TREE_CODE (exp) == TARGET_MEM_REF)
288 {
289 tree addr = TREE_OPERAND (exp, 0);
290 unsigned ptr_align;
291 unsigned HOST_WIDE_INT ptr_bitpos;
292 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
293
294 /* If the address is explicitely aligned, handle that. */
295 if (TREE_CODE (addr) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 {
298 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
299 ptr_bitmask *= BITS_PER_UNIT;
300 align = least_bit_hwi (ptr_bitmask);
301 addr = TREE_OPERAND (addr, 0);
302 }
303
304 known_alignment
305 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
306 align = MAX (ptr_align, align);
307
308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos &= ptr_bitmask;
310
311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
313 if (TREE_CODE (exp) == TARGET_MEM_REF)
314 {
315 if (TMR_INDEX (exp))
316 {
317 unsigned HOST_WIDE_INT step = 1;
318 if (TMR_STEP (exp))
319 step = TREE_INT_CST_LOW (TMR_STEP (exp));
320 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
321 }
322 if (TMR_INDEX2 (exp))
323 align = BITS_PER_UNIT;
324 known_alignment = false;
325 }
326
327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
330 alignment knowledge and if using that alignment would
331 improve the situation. */
332 unsigned int talign;
333 if (!addr_p && !known_alignment
334 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
335 && talign > align)
336 align = talign;
337 else
338 {
339 /* Else adjust bitpos accordingly. */
340 bitpos += ptr_bitpos;
341 if (TREE_CODE (exp) == MEM_REF
342 || TREE_CODE (exp) == TARGET_MEM_REF)
343 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
344 }
345 }
346 else if (TREE_CODE (exp) == STRING_CST)
347 {
348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 if (CONSTANT_CLASS_P (exp))
352 align = targetm.constant_alignment (exp, align);
353
354 known_alignment = true;
355 }
356
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
359 if (offset)
360 {
361 unsigned int trailing_zeros = tree_ctz (offset);
362 if (trailing_zeros < HOST_BITS_PER_INT)
363 {
364 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
365 if (inner)
366 align = MIN (align, inner);
367 }
368 }
369
370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
373 if (alt_align != 0 && alt_align < align)
374 {
375 align = alt_align;
376 known_alignment = false;
377 }
378
379 *alignp = align;
380 *bitposp = bitpos.coeffs[0] & (align - 1);
381 return known_alignment;
382 }
383
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388
389 bool
390 get_object_alignment_1 (tree exp, unsigned int *alignp,
391 unsigned HOST_WIDE_INT *bitposp)
392 {
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
394 }
395
396 /* Return the alignment in bits of EXP, an object. */
397
398 unsigned int
399 get_object_alignment (tree exp)
400 {
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
403
404 get_object_alignment_1 (exp, &align, &bitpos);
405
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
408
409 if (bitpos != 0)
410 align = least_bit_hwi (bitpos);
411 return align;
412 }
413
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
418
419 If EXP is not a pointer, false is returned too. */
420
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
424 {
425 STRIP_NOPS (exp);
426
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 {
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
439 {
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
442 {
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
446 }
447 }
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
451 }
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 {
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 {
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
468 }
469 else
470 {
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
474 }
475 }
476 else if (TREE_CODE (exp) == INTEGER_CST)
477 {
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
482 }
483
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
487 }
488
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
495
496 unsigned int
497 get_pointer_alignment (tree exp)
498 {
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
501
502 get_pointer_alignment_1 (exp, &align, &bitpos);
503
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
506
507 if (bitpos != 0)
508 align = least_bit_hwi (bitpos);
509
510 return align;
511 }
512
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516
517 unsigned
518 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519 {
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521
522 unsigned n;
523
524 if (eltsize == 1)
525 {
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
528 {
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
532 }
533 }
534 else
535 {
536 for (n = 0; n < maxelts; n++)
537 {
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
541 }
542 }
543 return n;
544 }
545
546 /* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
549
550 void
551 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
552 {
553 if (TREE_NO_WARNING (arg))
554 return;
555
556 loc = expansion_point_location_if_in_system_header (loc);
557
558 if (warning_at (loc, OPT_Wstringop_overflow_,
559 "%qs argument missing terminating nul", fn))
560 {
561 inform (DECL_SOURCE_LOCATION (decl),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg) = 1;
564 }
565 }
566
567 /* For a call EXPR (which may be null) that expects a string argument
568 and SRC as the argument, returns false if SRC is a character array
569 with no terminating NUL. When nonnull, BOUND is the number of
570 characters in which to expect the terminating NUL.
571 When EXPR is nonnull also issues a warning. */
572
573 bool
574 check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
575 {
576 tree size;
577 bool exact;
578 tree nonstr = unterminated_array (src, &size, &exact);
579 if (!nonstr)
580 return true;
581
582 /* NONSTR refers to the non-nul terminated constant array and SIZE
583 is the constant size of the array in bytes. EXACT is true when
584 SIZE is exact. */
585
586 if (bound)
587 {
588 wide_int min, max;
589 if (TREE_CODE (bound) == INTEGER_CST)
590 min = max = wi::to_wide (bound);
591 else
592 {
593 value_range_kind rng = get_range_info (bound, &min, &max);
594 if (rng != VR_RANGE)
595 return true;
596 }
597
598 if (wi::leu_p (min, wi::to_wide (size)))
599 return true;
600 }
601
602 if (expr && !TREE_NO_WARNING (expr))
603 {
604 tree fndecl = get_callee_fndecl (expr);
605 const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
606 warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
607 }
608
609 return false;
610 }
611
612 /* If EXP refers to an unterminated constant character array return
613 the declaration of the object of which the array is a member or
614 element and if SIZE is not null, set *SIZE to the size of
615 the unterminated array and set *EXACT if the size is exact or
616 clear it otherwise. Otherwise return null. */
617
618 tree
619 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
620 {
621 /* C_STRLEN will return NULL and set DECL in the info
622 structure if EXP references a unterminated array. */
623 c_strlen_data lendata = { };
624 tree len = c_strlen (exp, 1, &lendata);
625 if (len == NULL_TREE && lendata.minlen && lendata.decl)
626 {
627 if (size)
628 {
629 len = lendata.minlen;
630 if (lendata.off)
631 {
632 /* Constant offsets are already accounted for in LENDATA.MINLEN,
633 but not in a SSA_NAME + CST expression. */
634 if (TREE_CODE (lendata.off) == INTEGER_CST)
635 *exact = true;
636 else if (TREE_CODE (lendata.off) == PLUS_EXPR
637 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
638 {
639 /* Subtract the offset from the size of the array. */
640 *exact = false;
641 tree temp = TREE_OPERAND (lendata.off, 1);
642 temp = fold_convert (ssizetype, temp);
643 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
644 }
645 else
646 *exact = false;
647 }
648 else
649 *exact = true;
650
651 *size = len;
652 }
653 return lendata.decl;
654 }
655
656 return NULL_TREE;
657 }
658
659 /* Compute the length of a null-terminated character string or wide
660 character string handling character sizes of 1, 2, and 4 bytes.
661 TREE_STRING_LENGTH is not the right way because it evaluates to
662 the size of the character array in bytes (as opposed to characters)
663 and because it can contain a zero byte in the middle.
664
665 ONLY_VALUE should be nonzero if the result is not going to be emitted
666 into the instruction stream and zero if it is going to be expanded.
667 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
668 is returned, otherwise NULL, since
669 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
670 evaluate the side-effects.
671
672 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
673 accesses. Note that this implies the result is not going to be emitted
674 into the instruction stream.
675
676 Additional information about the string accessed may be recorded
677 in DATA. For example, if ARG references an unterminated string,
678 then the declaration will be stored in the DECL field. If the
679 length of the unterminated string can be determined, it'll be
680 stored in the LEN field. Note this length could well be different
681 than what a C strlen call would return.
682
683 ELTSIZE is 1 for normal single byte character strings, and 2 or
684 4 for wide characer strings. ELTSIZE is by default 1.
685
686 The value returned is of type `ssizetype'. */
687
688 tree
689 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
690 {
691 /* If we were not passed a DATA pointer, then get one to a local
692 structure. That avoids having to check DATA for NULL before
693 each time we want to use it. */
694 c_strlen_data local_strlen_data = { };
695 if (!data)
696 data = &local_strlen_data;
697
698 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
699
700 tree src = STRIP_NOPS (arg);
701 if (TREE_CODE (src) == COND_EXPR
702 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
703 {
704 tree len1, len2;
705
706 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
707 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
708 if (tree_int_cst_equal (len1, len2))
709 return len1;
710 }
711
712 if (TREE_CODE (src) == COMPOUND_EXPR
713 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
714 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
715
716 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
717
718 /* Offset from the beginning of the string in bytes. */
719 tree byteoff;
720 tree memsize;
721 tree decl;
722 src = string_constant (src, &byteoff, &memsize, &decl);
723 if (src == 0)
724 return NULL_TREE;
725
726 /* Determine the size of the string element. */
727 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
728 return NULL_TREE;
729
730 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
731 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
732 in case the latter is less than the size of the array, such as when
733 SRC refers to a short string literal used to initialize a large array.
734 In that case, the elements of the array after the terminating NUL are
735 all NUL. */
736 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
737 strelts = strelts / eltsize;
738
739 if (!tree_fits_uhwi_p (memsize))
740 return NULL_TREE;
741
742 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
743
744 /* PTR can point to the byte representation of any string type, including
745 char* and wchar_t*. */
746 const char *ptr = TREE_STRING_POINTER (src);
747
748 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
749 {
750 /* The code below works only for single byte character types. */
751 if (eltsize != 1)
752 return NULL_TREE;
753
754 /* If the string has an internal NUL character followed by any
755 non-NUL characters (e.g., "foo\0bar"), we can't compute
756 the offset to the following NUL if we don't know where to
757 start searching for it. */
758 unsigned len = string_length (ptr, eltsize, strelts);
759
760 /* Return when an embedded null character is found or none at all.
761 In the latter case, set the DECL/LEN field in the DATA structure
762 so that callers may examine them. */
763 if (len + 1 < strelts)
764 return NULL_TREE;
765 else if (len >= maxelts)
766 {
767 data->decl = decl;
768 data->off = byteoff;
769 data->minlen = ssize_int (len);
770 return NULL_TREE;
771 }
772
773 /* For empty strings the result should be zero. */
774 if (len == 0)
775 return ssize_int (0);
776
777 /* We don't know the starting offset, but we do know that the string
778 has no internal zero bytes. If the offset falls within the bounds
779 of the string subtract the offset from the length of the string,
780 and return that. Otherwise the length is zero. Take care to
781 use SAVE_EXPR in case the OFFSET has side-effects. */
782 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
783 : byteoff;
784 offsave = fold_convert_loc (loc, sizetype, offsave);
785 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
786 size_int (len));
787 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
788 offsave);
789 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
790 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
791 build_zero_cst (ssizetype));
792 }
793
794 /* Offset from the beginning of the string in elements. */
795 HOST_WIDE_INT eltoff;
796
797 /* We have a known offset into the string. Start searching there for
798 a null character if we can represent it as a single HOST_WIDE_INT. */
799 if (byteoff == 0)
800 eltoff = 0;
801 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
802 eltoff = -1;
803 else
804 eltoff = tree_to_uhwi (byteoff) / eltsize;
805
806 /* If the offset is known to be out of bounds, warn, and call strlen at
807 runtime. */
808 if (eltoff < 0 || eltoff >= maxelts)
809 {
810 /* Suppress multiple warnings for propagated constant strings. */
811 if (only_value != 2
812 && !TREE_NO_WARNING (arg)
813 && warning_at (loc, OPT_Warray_bounds,
814 "offset %qwi outside bounds of constant string",
815 eltoff))
816 {
817 if (decl)
818 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
819 TREE_NO_WARNING (arg) = 1;
820 }
821 return NULL_TREE;
822 }
823
824 /* If eltoff is larger than strelts but less than maxelts the
825 string length is zero, since the excess memory will be zero. */
826 if (eltoff > strelts)
827 return ssize_int (0);
828
829 /* Use strlen to search for the first zero byte. Since any strings
830 constructed with build_string will have nulls appended, we win even
831 if we get handed something like (char[4])"abcd".
832
833 Since ELTOFF is our starting index into the string, no further
834 calculation is needed. */
835 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
836 strelts - eltoff);
837
838 /* Don't know what to return if there was no zero termination.
839 Ideally this would turn into a gcc_checking_assert over time.
840 Set DECL/LEN so callers can examine them. */
841 if (len >= maxelts - eltoff)
842 {
843 data->decl = decl;
844 data->off = byteoff;
845 data->minlen = ssize_int (len);
846 return NULL_TREE;
847 }
848
849 return ssize_int (len);
850 }
851
852 /* Return a constant integer corresponding to target reading
853 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
854 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
855 are assumed to be zero, otherwise it reads as many characters
856 as needed. */
857
858 rtx
859 c_readstr (const char *str, scalar_int_mode mode,
860 bool null_terminated_p/*=true*/)
861 {
862 HOST_WIDE_INT ch;
863 unsigned int i, j;
864 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
865
866 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
867 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
868 / HOST_BITS_PER_WIDE_INT;
869
870 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
871 for (i = 0; i < len; i++)
872 tmp[i] = 0;
873
874 ch = 1;
875 for (i = 0; i < GET_MODE_SIZE (mode); i++)
876 {
877 j = i;
878 if (WORDS_BIG_ENDIAN)
879 j = GET_MODE_SIZE (mode) - i - 1;
880 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
881 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
882 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
883 j *= BITS_PER_UNIT;
884
885 if (ch || !null_terminated_p)
886 ch = (unsigned char) str[i];
887 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
888 }
889
890 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
891 return immed_wide_int_const (c, mode);
892 }
893
894 /* Cast a target constant CST to target CHAR and if that value fits into
895 host char type, return zero and put that value into variable pointed to by
896 P. */
897
898 static int
899 target_char_cast (tree cst, char *p)
900 {
901 unsigned HOST_WIDE_INT val, hostval;
902
903 if (TREE_CODE (cst) != INTEGER_CST
904 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
905 return 1;
906
907 /* Do not care if it fits or not right here. */
908 val = TREE_INT_CST_LOW (cst);
909
910 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
911 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
912
913 hostval = val;
914 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
915 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
916
917 if (val != hostval)
918 return 1;
919
920 *p = hostval;
921 return 0;
922 }
923
924 /* Similar to save_expr, but assumes that arbitrary code is not executed
925 in between the multiple evaluations. In particular, we assume that a
926 non-addressable local variable will not be modified. */
927
928 static tree
929 builtin_save_expr (tree exp)
930 {
931 if (TREE_CODE (exp) == SSA_NAME
932 || (TREE_ADDRESSABLE (exp) == 0
933 && (TREE_CODE (exp) == PARM_DECL
934 || (VAR_P (exp) && !TREE_STATIC (exp)))))
935 return exp;
936
937 return save_expr (exp);
938 }
939
940 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
941 times to get the address of either a higher stack frame, or a return
942 address located within it (depending on FNDECL_CODE). */
943
944 static rtx
945 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
946 {
947 int i;
948 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
949 if (tem == NULL_RTX)
950 {
951 /* For a zero count with __builtin_return_address, we don't care what
952 frame address we return, because target-specific definitions will
953 override us. Therefore frame pointer elimination is OK, and using
954 the soft frame pointer is OK.
955
956 For a nonzero count, or a zero count with __builtin_frame_address,
957 we require a stable offset from the current frame pointer to the
958 previous one, so we must use the hard frame pointer, and
959 we must disable frame pointer elimination. */
960 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
961 tem = frame_pointer_rtx;
962 else
963 {
964 tem = hard_frame_pointer_rtx;
965
966 /* Tell reload not to eliminate the frame pointer. */
967 crtl->accesses_prior_frames = 1;
968 }
969 }
970
971 if (count > 0)
972 SETUP_FRAME_ADDRESSES ();
973
974 /* On the SPARC, the return address is not in the frame, it is in a
975 register. There is no way to access it off of the current frame
976 pointer, but it can be accessed off the previous frame pointer by
977 reading the value from the register window save area. */
978 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
979 count--;
980
981 /* Scan back COUNT frames to the specified frame. */
982 for (i = 0; i < count; i++)
983 {
984 /* Assume the dynamic chain pointer is in the word that the
985 frame address points to, unless otherwise specified. */
986 tem = DYNAMIC_CHAIN_ADDRESS (tem);
987 tem = memory_address (Pmode, tem);
988 tem = gen_frame_mem (Pmode, tem);
989 tem = copy_to_reg (tem);
990 }
991
992 /* For __builtin_frame_address, return what we've got. But, on
993 the SPARC for example, we may have to add a bias. */
994 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
995 return FRAME_ADDR_RTX (tem);
996
997 /* For __builtin_return_address, get the return address from that frame. */
998 #ifdef RETURN_ADDR_RTX
999 tem = RETURN_ADDR_RTX (count, tem);
1000 #else
1001 tem = memory_address (Pmode,
1002 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1003 tem = gen_frame_mem (Pmode, tem);
1004 #endif
1005 return tem;
1006 }
1007
1008 /* Alias set used for setjmp buffer. */
1009 static alias_set_type setjmp_alias_set = -1;
1010
1011 /* Construct the leading half of a __builtin_setjmp call. Control will
1012 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1013 exception handling code. */
1014
1015 void
1016 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1017 {
1018 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1019 rtx stack_save;
1020 rtx mem;
1021
1022 if (setjmp_alias_set == -1)
1023 setjmp_alias_set = new_alias_set ();
1024
1025 buf_addr = convert_memory_address (Pmode, buf_addr);
1026
1027 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1028
1029 /* We store the frame pointer and the address of receiver_label in
1030 the buffer and use the rest of it for the stack save area, which
1031 is machine-dependent. */
1032
1033 mem = gen_rtx_MEM (Pmode, buf_addr);
1034 set_mem_alias_set (mem, setjmp_alias_set);
1035 emit_move_insn (mem, hard_frame_pointer_rtx);
1036
1037 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1038 GET_MODE_SIZE (Pmode))),
1039 set_mem_alias_set (mem, setjmp_alias_set);
1040
1041 emit_move_insn (validize_mem (mem),
1042 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1043
1044 stack_save = gen_rtx_MEM (sa_mode,
1045 plus_constant (Pmode, buf_addr,
1046 2 * GET_MODE_SIZE (Pmode)));
1047 set_mem_alias_set (stack_save, setjmp_alias_set);
1048 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1049
1050 /* If there is further processing to do, do it. */
1051 if (targetm.have_builtin_setjmp_setup ())
1052 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1053
1054 /* We have a nonlocal label. */
1055 cfun->has_nonlocal_label = 1;
1056 }
1057
1058 /* Construct the trailing part of a __builtin_setjmp call. This is
1059 also called directly by the SJLJ exception handling code.
1060 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1061
1062 void
1063 expand_builtin_setjmp_receiver (rtx receiver_label)
1064 {
1065 rtx chain;
1066
1067 /* Mark the FP as used when we get here, so we have to make sure it's
1068 marked as used by this function. */
1069 emit_use (hard_frame_pointer_rtx);
1070
1071 /* Mark the static chain as clobbered here so life information
1072 doesn't get messed up for it. */
1073 chain = rtx_for_static_chain (current_function_decl, true);
1074 if (chain && REG_P (chain))
1075 emit_clobber (chain);
1076
1077 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1078 {
1079 /* If the argument pointer can be eliminated in favor of the
1080 frame pointer, we don't need to restore it. We assume here
1081 that if such an elimination is present, it can always be used.
1082 This is the case on all known machines; if we don't make this
1083 assumption, we do unnecessary saving on many machines. */
1084 size_t i;
1085 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1086
1087 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1088 if (elim_regs[i].from == ARG_POINTER_REGNUM
1089 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1090 break;
1091
1092 if (i == ARRAY_SIZE (elim_regs))
1093 {
1094 /* Now restore our arg pointer from the address at which it
1095 was saved in our stack frame. */
1096 emit_move_insn (crtl->args.internal_arg_pointer,
1097 copy_to_reg (get_arg_pointer_save_area ()));
1098 }
1099 }
1100
1101 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1102 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1103 else if (targetm.have_nonlocal_goto_receiver ())
1104 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1105 else
1106 { /* Nothing */ }
1107
1108 /* We must not allow the code we just generated to be reordered by
1109 scheduling. Specifically, the update of the frame pointer must
1110 happen immediately, not later. */
1111 emit_insn (gen_blockage ());
1112 }
1113
1114 /* __builtin_longjmp is passed a pointer to an array of five words (not
1115 all will be used on all machines). It operates similarly to the C
1116 library function of the same name, but is more efficient. Much of
1117 the code below is copied from the handling of non-local gotos. */
1118
1119 static void
1120 expand_builtin_longjmp (rtx buf_addr, rtx value)
1121 {
1122 rtx fp, lab, stack;
1123 rtx_insn *insn, *last;
1124 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1125
1126 /* DRAP is needed for stack realign if longjmp is expanded to current
1127 function */
1128 if (SUPPORTS_STACK_ALIGNMENT)
1129 crtl->need_drap = true;
1130
1131 if (setjmp_alias_set == -1)
1132 setjmp_alias_set = new_alias_set ();
1133
1134 buf_addr = convert_memory_address (Pmode, buf_addr);
1135
1136 buf_addr = force_reg (Pmode, buf_addr);
1137
1138 /* We require that the user must pass a second argument of 1, because
1139 that is what builtin_setjmp will return. */
1140 gcc_assert (value == const1_rtx);
1141
1142 last = get_last_insn ();
1143 if (targetm.have_builtin_longjmp ())
1144 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1145 else
1146 {
1147 fp = gen_rtx_MEM (Pmode, buf_addr);
1148 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1149 GET_MODE_SIZE (Pmode)));
1150
1151 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1152 2 * GET_MODE_SIZE (Pmode)));
1153 set_mem_alias_set (fp, setjmp_alias_set);
1154 set_mem_alias_set (lab, setjmp_alias_set);
1155 set_mem_alias_set (stack, setjmp_alias_set);
1156
1157 /* Pick up FP, label, and SP from the block and jump. This code is
1158 from expand_goto in stmt.c; see there for detailed comments. */
1159 if (targetm.have_nonlocal_goto ())
1160 /* We have to pass a value to the nonlocal_goto pattern that will
1161 get copied into the static_chain pointer, but it does not matter
1162 what that value is, because builtin_setjmp does not use it. */
1163 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1164 else
1165 {
1166 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1167 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1168
1169 lab = copy_to_reg (lab);
1170
1171 /* Restore the frame pointer and stack pointer. We must use a
1172 temporary since the setjmp buffer may be a local. */
1173 fp = copy_to_reg (fp);
1174 emit_stack_restore (SAVE_NONLOCAL, stack);
1175
1176 /* Ensure the frame pointer move is not optimized. */
1177 emit_insn (gen_blockage ());
1178 emit_clobber (hard_frame_pointer_rtx);
1179 emit_clobber (frame_pointer_rtx);
1180 emit_move_insn (hard_frame_pointer_rtx, fp);
1181
1182 emit_use (hard_frame_pointer_rtx);
1183 emit_use (stack_pointer_rtx);
1184 emit_indirect_jump (lab);
1185 }
1186 }
1187
1188 /* Search backwards and mark the jump insn as a non-local goto.
1189 Note that this precludes the use of __builtin_longjmp to a
1190 __builtin_setjmp target in the same function. However, we've
1191 already cautioned the user that these functions are for
1192 internal exception handling use only. */
1193 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1194 {
1195 gcc_assert (insn != last);
1196
1197 if (JUMP_P (insn))
1198 {
1199 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1200 break;
1201 }
1202 else if (CALL_P (insn))
1203 break;
1204 }
1205 }
1206
1207 static inline bool
1208 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1209 {
1210 return (iter->i < iter->n);
1211 }
1212
1213 /* This function validates the types of a function call argument list
1214 against a specified list of tree_codes. If the last specifier is a 0,
1215 that represents an ellipsis, otherwise the last specifier must be a
1216 VOID_TYPE. */
1217
1218 static bool
1219 validate_arglist (const_tree callexpr, ...)
1220 {
1221 enum tree_code code;
1222 bool res = 0;
1223 va_list ap;
1224 const_call_expr_arg_iterator iter;
1225 const_tree arg;
1226
1227 va_start (ap, callexpr);
1228 init_const_call_expr_arg_iterator (callexpr, &iter);
1229
1230 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1231 tree fn = CALL_EXPR_FN (callexpr);
1232 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1233
1234 for (unsigned argno = 1; ; ++argno)
1235 {
1236 code = (enum tree_code) va_arg (ap, int);
1237
1238 switch (code)
1239 {
1240 case 0:
1241 /* This signifies an ellipses, any further arguments are all ok. */
1242 res = true;
1243 goto end;
1244 case VOID_TYPE:
1245 /* This signifies an endlink, if no arguments remain, return
1246 true, otherwise return false. */
1247 res = !more_const_call_expr_args_p (&iter);
1248 goto end;
1249 case POINTER_TYPE:
1250 /* The actual argument must be nonnull when either the whole
1251 called function has been declared nonnull, or when the formal
1252 argument corresponding to the actual argument has been. */
1253 if (argmap
1254 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1255 {
1256 arg = next_const_call_expr_arg (&iter);
1257 if (!validate_arg (arg, code) || integer_zerop (arg))
1258 goto end;
1259 break;
1260 }
1261 /* FALLTHRU */
1262 default:
1263 /* If no parameters remain or the parameter's code does not
1264 match the specified code, return false. Otherwise continue
1265 checking any remaining arguments. */
1266 arg = next_const_call_expr_arg (&iter);
1267 if (!validate_arg (arg, code))
1268 goto end;
1269 break;
1270 }
1271 }
1272
1273 /* We need gotos here since we can only have one VA_CLOSE in a
1274 function. */
1275 end: ;
1276 va_end (ap);
1277
1278 BITMAP_FREE (argmap);
1279
1280 return res;
1281 }
1282
1283 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1284 and the address of the save area. */
1285
1286 static rtx
1287 expand_builtin_nonlocal_goto (tree exp)
1288 {
1289 tree t_label, t_save_area;
1290 rtx r_label, r_save_area, r_fp, r_sp;
1291 rtx_insn *insn;
1292
1293 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1294 return NULL_RTX;
1295
1296 t_label = CALL_EXPR_ARG (exp, 0);
1297 t_save_area = CALL_EXPR_ARG (exp, 1);
1298
1299 r_label = expand_normal (t_label);
1300 r_label = convert_memory_address (Pmode, r_label);
1301 r_save_area = expand_normal (t_save_area);
1302 r_save_area = convert_memory_address (Pmode, r_save_area);
1303 /* Copy the address of the save location to a register just in case it was
1304 based on the frame pointer. */
1305 r_save_area = copy_to_reg (r_save_area);
1306 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1307 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1308 plus_constant (Pmode, r_save_area,
1309 GET_MODE_SIZE (Pmode)));
1310
1311 crtl->has_nonlocal_goto = 1;
1312
1313 /* ??? We no longer need to pass the static chain value, afaik. */
1314 if (targetm.have_nonlocal_goto ())
1315 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1316 else
1317 {
1318 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1319 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1320
1321 r_label = copy_to_reg (r_label);
1322
1323 /* Restore the frame pointer and stack pointer. We must use a
1324 temporary since the setjmp buffer may be a local. */
1325 r_fp = copy_to_reg (r_fp);
1326 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1327
1328 /* Ensure the frame pointer move is not optimized. */
1329 emit_insn (gen_blockage ());
1330 emit_clobber (hard_frame_pointer_rtx);
1331 emit_clobber (frame_pointer_rtx);
1332 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1333
1334 /* USE of hard_frame_pointer_rtx added for consistency;
1335 not clear if really needed. */
1336 emit_use (hard_frame_pointer_rtx);
1337 emit_use (stack_pointer_rtx);
1338
1339 /* If the architecture is using a GP register, we must
1340 conservatively assume that the target function makes use of it.
1341 The prologue of functions with nonlocal gotos must therefore
1342 initialize the GP register to the appropriate value, and we
1343 must then make sure that this value is live at the point
1344 of the jump. (Note that this doesn't necessarily apply
1345 to targets with a nonlocal_goto pattern; they are free
1346 to implement it in their own way. Note also that this is
1347 a no-op if the GP register is a global invariant.) */
1348 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1349 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1350 emit_use (pic_offset_table_rtx);
1351
1352 emit_indirect_jump (r_label);
1353 }
1354
1355 /* Search backwards to the jump insn and mark it as a
1356 non-local goto. */
1357 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1358 {
1359 if (JUMP_P (insn))
1360 {
1361 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1362 break;
1363 }
1364 else if (CALL_P (insn))
1365 break;
1366 }
1367
1368 return const0_rtx;
1369 }
1370
1371 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1372 (not all will be used on all machines) that was passed to __builtin_setjmp.
1373 It updates the stack pointer in that block to the current value. This is
1374 also called directly by the SJLJ exception handling code. */
1375
1376 void
1377 expand_builtin_update_setjmp_buf (rtx buf_addr)
1378 {
1379 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1380 buf_addr = convert_memory_address (Pmode, buf_addr);
1381 rtx stack_save
1382 = gen_rtx_MEM (sa_mode,
1383 memory_address
1384 (sa_mode,
1385 plus_constant (Pmode, buf_addr,
1386 2 * GET_MODE_SIZE (Pmode))));
1387
1388 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1389 }
1390
1391 /* Expand a call to __builtin_prefetch. For a target that does not support
1392 data prefetch, evaluate the memory address argument in case it has side
1393 effects. */
1394
1395 static void
1396 expand_builtin_prefetch (tree exp)
1397 {
1398 tree arg0, arg1, arg2;
1399 int nargs;
1400 rtx op0, op1, op2;
1401
1402 if (!validate_arglist (exp, POINTER_TYPE, 0))
1403 return;
1404
1405 arg0 = CALL_EXPR_ARG (exp, 0);
1406
1407 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1408 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1409 locality). */
1410 nargs = call_expr_nargs (exp);
1411 if (nargs > 1)
1412 arg1 = CALL_EXPR_ARG (exp, 1);
1413 else
1414 arg1 = integer_zero_node;
1415 if (nargs > 2)
1416 arg2 = CALL_EXPR_ARG (exp, 2);
1417 else
1418 arg2 = integer_three_node;
1419
1420 /* Argument 0 is an address. */
1421 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1422
1423 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1424 if (TREE_CODE (arg1) != INTEGER_CST)
1425 {
1426 error ("second argument to %<__builtin_prefetch%> must be a constant");
1427 arg1 = integer_zero_node;
1428 }
1429 op1 = expand_normal (arg1);
1430 /* Argument 1 must be either zero or one. */
1431 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1432 {
1433 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1434 " using zero");
1435 op1 = const0_rtx;
1436 }
1437
1438 /* Argument 2 (locality) must be a compile-time constant int. */
1439 if (TREE_CODE (arg2) != INTEGER_CST)
1440 {
1441 error ("third argument to %<__builtin_prefetch%> must be a constant");
1442 arg2 = integer_zero_node;
1443 }
1444 op2 = expand_normal (arg2);
1445 /* Argument 2 must be 0, 1, 2, or 3. */
1446 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1447 {
1448 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1449 op2 = const0_rtx;
1450 }
1451
1452 if (targetm.have_prefetch ())
1453 {
1454 class expand_operand ops[3];
1455
1456 create_address_operand (&ops[0], op0);
1457 create_integer_operand (&ops[1], INTVAL (op1));
1458 create_integer_operand (&ops[2], INTVAL (op2));
1459 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1460 return;
1461 }
1462
1463 /* Don't do anything with direct references to volatile memory, but
1464 generate code to handle other side effects. */
1465 if (!MEM_P (op0) && side_effects_p (op0))
1466 emit_insn (op0);
1467 }
1468
1469 /* Get a MEM rtx for expression EXP which is the address of an operand
1470 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1471 the maximum length of the block of memory that might be accessed or
1472 NULL if unknown. */
1473
1474 static rtx
1475 get_memory_rtx (tree exp, tree len)
1476 {
1477 tree orig_exp = exp;
1478 rtx addr, mem;
1479
1480 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1481 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1482 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1483 exp = TREE_OPERAND (exp, 0);
1484
1485 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1486 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1487
1488 /* Get an expression we can use to find the attributes to assign to MEM.
1489 First remove any nops. */
1490 while (CONVERT_EXPR_P (exp)
1491 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1492 exp = TREE_OPERAND (exp, 0);
1493
1494 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1495 (as builtin stringops may alias with anything). */
1496 exp = fold_build2 (MEM_REF,
1497 build_array_type (char_type_node,
1498 build_range_type (sizetype,
1499 size_one_node, len)),
1500 exp, build_int_cst (ptr_type_node, 0));
1501
1502 /* If the MEM_REF has no acceptable address, try to get the base object
1503 from the original address we got, and build an all-aliasing
1504 unknown-sized access to that one. */
1505 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1506 set_mem_attributes (mem, exp, 0);
1507 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1508 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1509 0))))
1510 {
1511 exp = build_fold_addr_expr (exp);
1512 exp = fold_build2 (MEM_REF,
1513 build_array_type (char_type_node,
1514 build_range_type (sizetype,
1515 size_zero_node,
1516 NULL)),
1517 exp, build_int_cst (ptr_type_node, 0));
1518 set_mem_attributes (mem, exp, 0);
1519 }
1520 set_mem_alias_set (mem, 0);
1521 return mem;
1522 }
1523 \f
1524 /* Built-in functions to perform an untyped call and return. */
1525
1526 #define apply_args_mode \
1527 (this_target_builtins->x_apply_args_mode)
1528 #define apply_result_mode \
1529 (this_target_builtins->x_apply_result_mode)
1530
1531 /* Return the size required for the block returned by __builtin_apply_args,
1532 and initialize apply_args_mode. */
1533
1534 static int
1535 apply_args_size (void)
1536 {
1537 static int size = -1;
1538 int align;
1539 unsigned int regno;
1540
1541 /* The values computed by this function never change. */
1542 if (size < 0)
1543 {
1544 /* The first value is the incoming arg-pointer. */
1545 size = GET_MODE_SIZE (Pmode);
1546
1547 /* The second value is the structure value address unless this is
1548 passed as an "invisible" first argument. */
1549 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1550 size += GET_MODE_SIZE (Pmode);
1551
1552 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1553 if (FUNCTION_ARG_REGNO_P (regno))
1554 {
1555 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1556
1557 gcc_assert (mode != VOIDmode);
1558
1559 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1560 if (size % align != 0)
1561 size = CEIL (size, align) * align;
1562 size += GET_MODE_SIZE (mode);
1563 apply_args_mode[regno] = mode;
1564 }
1565 else
1566 {
1567 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1568 }
1569 }
1570 return size;
1571 }
1572
1573 /* Return the size required for the block returned by __builtin_apply,
1574 and initialize apply_result_mode. */
1575
1576 static int
1577 apply_result_size (void)
1578 {
1579 static int size = -1;
1580 int align, regno;
1581
1582 /* The values computed by this function never change. */
1583 if (size < 0)
1584 {
1585 size = 0;
1586
1587 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1588 if (targetm.calls.function_value_regno_p (regno))
1589 {
1590 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1591
1592 gcc_assert (mode != VOIDmode);
1593
1594 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1595 if (size % align != 0)
1596 size = CEIL (size, align) * align;
1597 size += GET_MODE_SIZE (mode);
1598 apply_result_mode[regno] = mode;
1599 }
1600 else
1601 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1602
1603 /* Allow targets that use untyped_call and untyped_return to override
1604 the size so that machine-specific information can be stored here. */
1605 #ifdef APPLY_RESULT_SIZE
1606 size = APPLY_RESULT_SIZE;
1607 #endif
1608 }
1609 return size;
1610 }
1611
1612 /* Create a vector describing the result block RESULT. If SAVEP is true,
1613 the result block is used to save the values; otherwise it is used to
1614 restore the values. */
1615
1616 static rtx
1617 result_vector (int savep, rtx result)
1618 {
1619 int regno, size, align, nelts;
1620 fixed_size_mode mode;
1621 rtx reg, mem;
1622 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1623
1624 size = nelts = 0;
1625 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1626 if ((mode = apply_result_mode[regno]) != VOIDmode)
1627 {
1628 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1629 if (size % align != 0)
1630 size = CEIL (size, align) * align;
1631 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1632 mem = adjust_address (result, mode, size);
1633 savevec[nelts++] = (savep
1634 ? gen_rtx_SET (mem, reg)
1635 : gen_rtx_SET (reg, mem));
1636 size += GET_MODE_SIZE (mode);
1637 }
1638 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1639 }
1640
1641 /* Save the state required to perform an untyped call with the same
1642 arguments as were passed to the current function. */
1643
1644 static rtx
1645 expand_builtin_apply_args_1 (void)
1646 {
1647 rtx registers, tem;
1648 int size, align, regno;
1649 fixed_size_mode mode;
1650 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1651
1652 /* Create a block where the arg-pointer, structure value address,
1653 and argument registers can be saved. */
1654 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1655
1656 /* Walk past the arg-pointer and structure value address. */
1657 size = GET_MODE_SIZE (Pmode);
1658 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1659 size += GET_MODE_SIZE (Pmode);
1660
1661 /* Save each register used in calling a function to the block. */
1662 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1663 if ((mode = apply_args_mode[regno]) != VOIDmode)
1664 {
1665 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1666 if (size % align != 0)
1667 size = CEIL (size, align) * align;
1668
1669 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1670
1671 emit_move_insn (adjust_address (registers, mode, size), tem);
1672 size += GET_MODE_SIZE (mode);
1673 }
1674
1675 /* Save the arg pointer to the block. */
1676 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1677 /* We need the pointer as the caller actually passed them to us, not
1678 as we might have pretended they were passed. Make sure it's a valid
1679 operand, as emit_move_insn isn't expected to handle a PLUS. */
1680 if (STACK_GROWS_DOWNWARD)
1681 tem
1682 = force_operand (plus_constant (Pmode, tem,
1683 crtl->args.pretend_args_size),
1684 NULL_RTX);
1685 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1686
1687 size = GET_MODE_SIZE (Pmode);
1688
1689 /* Save the structure value address unless this is passed as an
1690 "invisible" first argument. */
1691 if (struct_incoming_value)
1692 emit_move_insn (adjust_address (registers, Pmode, size),
1693 copy_to_reg (struct_incoming_value));
1694
1695 /* Return the address of the block. */
1696 return copy_addr_to_reg (XEXP (registers, 0));
1697 }
1698
1699 /* __builtin_apply_args returns block of memory allocated on
1700 the stack into which is stored the arg pointer, structure
1701 value address, static chain, and all the registers that might
1702 possibly be used in performing a function call. The code is
1703 moved to the start of the function so the incoming values are
1704 saved. */
1705
1706 static rtx
1707 expand_builtin_apply_args (void)
1708 {
1709 /* Don't do __builtin_apply_args more than once in a function.
1710 Save the result of the first call and reuse it. */
1711 if (apply_args_value != 0)
1712 return apply_args_value;
1713 {
1714 /* When this function is called, it means that registers must be
1715 saved on entry to this function. So we migrate the
1716 call to the first insn of this function. */
1717 rtx temp;
1718
1719 start_sequence ();
1720 temp = expand_builtin_apply_args_1 ();
1721 rtx_insn *seq = get_insns ();
1722 end_sequence ();
1723
1724 apply_args_value = temp;
1725
1726 /* Put the insns after the NOTE that starts the function.
1727 If this is inside a start_sequence, make the outer-level insn
1728 chain current, so the code is placed at the start of the
1729 function. If internal_arg_pointer is a non-virtual pseudo,
1730 it needs to be placed after the function that initializes
1731 that pseudo. */
1732 push_topmost_sequence ();
1733 if (REG_P (crtl->args.internal_arg_pointer)
1734 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1735 emit_insn_before (seq, parm_birth_insn);
1736 else
1737 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1738 pop_topmost_sequence ();
1739 return temp;
1740 }
1741 }
1742
1743 /* Perform an untyped call and save the state required to perform an
1744 untyped return of whatever value was returned by the given function. */
1745
1746 static rtx
1747 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1748 {
1749 int size, align, regno;
1750 fixed_size_mode mode;
1751 rtx incoming_args, result, reg, dest, src;
1752 rtx_call_insn *call_insn;
1753 rtx old_stack_level = 0;
1754 rtx call_fusage = 0;
1755 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1756
1757 arguments = convert_memory_address (Pmode, arguments);
1758
1759 /* Create a block where the return registers can be saved. */
1760 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1761
1762 /* Fetch the arg pointer from the ARGUMENTS block. */
1763 incoming_args = gen_reg_rtx (Pmode);
1764 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1765 if (!STACK_GROWS_DOWNWARD)
1766 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1767 incoming_args, 0, OPTAB_LIB_WIDEN);
1768
1769 /* Push a new argument block and copy the arguments. Do not allow
1770 the (potential) memcpy call below to interfere with our stack
1771 manipulations. */
1772 do_pending_stack_adjust ();
1773 NO_DEFER_POP;
1774
1775 /* Save the stack with nonlocal if available. */
1776 if (targetm.have_save_stack_nonlocal ())
1777 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1778 else
1779 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1780
1781 /* Allocate a block of memory onto the stack and copy the memory
1782 arguments to the outgoing arguments address. We can pass TRUE
1783 as the 4th argument because we just saved the stack pointer
1784 and will restore it right after the call. */
1785 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1786
1787 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1788 may have already set current_function_calls_alloca to true.
1789 current_function_calls_alloca won't be set if argsize is zero,
1790 so we have to guarantee need_drap is true here. */
1791 if (SUPPORTS_STACK_ALIGNMENT)
1792 crtl->need_drap = true;
1793
1794 dest = virtual_outgoing_args_rtx;
1795 if (!STACK_GROWS_DOWNWARD)
1796 {
1797 if (CONST_INT_P (argsize))
1798 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1799 else
1800 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1801 }
1802 dest = gen_rtx_MEM (BLKmode, dest);
1803 set_mem_align (dest, PARM_BOUNDARY);
1804 src = gen_rtx_MEM (BLKmode, incoming_args);
1805 set_mem_align (src, PARM_BOUNDARY);
1806 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1807
1808 /* Refer to the argument block. */
1809 apply_args_size ();
1810 arguments = gen_rtx_MEM (BLKmode, arguments);
1811 set_mem_align (arguments, PARM_BOUNDARY);
1812
1813 /* Walk past the arg-pointer and structure value address. */
1814 size = GET_MODE_SIZE (Pmode);
1815 if (struct_value)
1816 size += GET_MODE_SIZE (Pmode);
1817
1818 /* Restore each of the registers previously saved. Make USE insns
1819 for each of these registers for use in making the call. */
1820 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1821 if ((mode = apply_args_mode[regno]) != VOIDmode)
1822 {
1823 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1824 if (size % align != 0)
1825 size = CEIL (size, align) * align;
1826 reg = gen_rtx_REG (mode, regno);
1827 emit_move_insn (reg, adjust_address (arguments, mode, size));
1828 use_reg (&call_fusage, reg);
1829 size += GET_MODE_SIZE (mode);
1830 }
1831
1832 /* Restore the structure value address unless this is passed as an
1833 "invisible" first argument. */
1834 size = GET_MODE_SIZE (Pmode);
1835 if (struct_value)
1836 {
1837 rtx value = gen_reg_rtx (Pmode);
1838 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1839 emit_move_insn (struct_value, value);
1840 if (REG_P (struct_value))
1841 use_reg (&call_fusage, struct_value);
1842 }
1843
1844 /* All arguments and registers used for the call are set up by now! */
1845 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1846
1847 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1848 and we don't want to load it into a register as an optimization,
1849 because prepare_call_address already did it if it should be done. */
1850 if (GET_CODE (function) != SYMBOL_REF)
1851 function = memory_address (FUNCTION_MODE, function);
1852
1853 /* Generate the actual call instruction and save the return value. */
1854 if (targetm.have_untyped_call ())
1855 {
1856 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1857 emit_call_insn (targetm.gen_untyped_call (mem, result,
1858 result_vector (1, result)));
1859 }
1860 else if (targetm.have_call_value ())
1861 {
1862 rtx valreg = 0;
1863
1864 /* Locate the unique return register. It is not possible to
1865 express a call that sets more than one return register using
1866 call_value; use untyped_call for that. In fact, untyped_call
1867 only needs to save the return registers in the given block. */
1868 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1869 if ((mode = apply_result_mode[regno]) != VOIDmode)
1870 {
1871 gcc_assert (!valreg); /* have_untyped_call required. */
1872
1873 valreg = gen_rtx_REG (mode, regno);
1874 }
1875
1876 emit_insn (targetm.gen_call_value (valreg,
1877 gen_rtx_MEM (FUNCTION_MODE, function),
1878 const0_rtx, NULL_RTX, const0_rtx));
1879
1880 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1881 }
1882 else
1883 gcc_unreachable ();
1884
1885 /* Find the CALL insn we just emitted, and attach the register usage
1886 information. */
1887 call_insn = last_call_insn ();
1888 add_function_usage_to (call_insn, call_fusage);
1889
1890 /* Restore the stack. */
1891 if (targetm.have_save_stack_nonlocal ())
1892 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1893 else
1894 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1895 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1896
1897 OK_DEFER_POP;
1898
1899 /* Return the address of the result block. */
1900 result = copy_addr_to_reg (XEXP (result, 0));
1901 return convert_memory_address (ptr_mode, result);
1902 }
1903
1904 /* Perform an untyped return. */
1905
1906 static void
1907 expand_builtin_return (rtx result)
1908 {
1909 int size, align, regno;
1910 fixed_size_mode mode;
1911 rtx reg;
1912 rtx_insn *call_fusage = 0;
1913
1914 result = convert_memory_address (Pmode, result);
1915
1916 apply_result_size ();
1917 result = gen_rtx_MEM (BLKmode, result);
1918
1919 if (targetm.have_untyped_return ())
1920 {
1921 rtx vector = result_vector (0, result);
1922 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1923 emit_barrier ();
1924 return;
1925 }
1926
1927 /* Restore the return value and note that each value is used. */
1928 size = 0;
1929 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1930 if ((mode = apply_result_mode[regno]) != VOIDmode)
1931 {
1932 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1933 if (size % align != 0)
1934 size = CEIL (size, align) * align;
1935 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1936 emit_move_insn (reg, adjust_address (result, mode, size));
1937
1938 push_to_sequence (call_fusage);
1939 emit_use (reg);
1940 call_fusage = get_insns ();
1941 end_sequence ();
1942 size += GET_MODE_SIZE (mode);
1943 }
1944
1945 /* Put the USE insns before the return. */
1946 emit_insn (call_fusage);
1947
1948 /* Return whatever values was restored by jumping directly to the end
1949 of the function. */
1950 expand_naked_return ();
1951 }
1952
1953 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1954
1955 static enum type_class
1956 type_to_class (tree type)
1957 {
1958 switch (TREE_CODE (type))
1959 {
1960 case VOID_TYPE: return void_type_class;
1961 case INTEGER_TYPE: return integer_type_class;
1962 case ENUMERAL_TYPE: return enumeral_type_class;
1963 case BOOLEAN_TYPE: return boolean_type_class;
1964 case POINTER_TYPE: return pointer_type_class;
1965 case REFERENCE_TYPE: return reference_type_class;
1966 case OFFSET_TYPE: return offset_type_class;
1967 case REAL_TYPE: return real_type_class;
1968 case COMPLEX_TYPE: return complex_type_class;
1969 case FUNCTION_TYPE: return function_type_class;
1970 case METHOD_TYPE: return method_type_class;
1971 case RECORD_TYPE: return record_type_class;
1972 case UNION_TYPE:
1973 case QUAL_UNION_TYPE: return union_type_class;
1974 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1975 ? string_type_class : array_type_class);
1976 case LANG_TYPE: return lang_type_class;
1977 default: return no_type_class;
1978 }
1979 }
1980
1981 /* Expand a call EXP to __builtin_classify_type. */
1982
1983 static rtx
1984 expand_builtin_classify_type (tree exp)
1985 {
1986 if (call_expr_nargs (exp))
1987 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1988 return GEN_INT (no_type_class);
1989 }
1990
1991 /* This helper macro, meant to be used in mathfn_built_in below, determines
1992 which among a set of builtin math functions is appropriate for a given type
1993 mode. The `F' (float) and `L' (long double) are automatically generated
1994 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1995 types, there are additional types that are considered with 'F32', 'F64',
1996 'F128', etc. suffixes. */
1997 #define CASE_MATHFN(MATHFN) \
1998 CASE_CFN_##MATHFN: \
1999 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2000 fcodel = BUILT_IN_##MATHFN##L ; break;
2001 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2002 types. */
2003 #define CASE_MATHFN_FLOATN(MATHFN) \
2004 CASE_CFN_##MATHFN: \
2005 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2006 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2007 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2008 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2009 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2010 break;
2011 /* Similar to above, but appends _R after any F/L suffix. */
2012 #define CASE_MATHFN_REENT(MATHFN) \
2013 case CFN_BUILT_IN_##MATHFN##_R: \
2014 case CFN_BUILT_IN_##MATHFN##F_R: \
2015 case CFN_BUILT_IN_##MATHFN##L_R: \
2016 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2017 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2018
2019 /* Return a function equivalent to FN but operating on floating-point
2020 values of type TYPE, or END_BUILTINS if no such function exists.
2021 This is purely an operation on function codes; it does not guarantee
2022 that the target actually has an implementation of the function. */
2023
2024 static built_in_function
2025 mathfn_built_in_2 (tree type, combined_fn fn)
2026 {
2027 tree mtype;
2028 built_in_function fcode, fcodef, fcodel;
2029 built_in_function fcodef16 = END_BUILTINS;
2030 built_in_function fcodef32 = END_BUILTINS;
2031 built_in_function fcodef64 = END_BUILTINS;
2032 built_in_function fcodef128 = END_BUILTINS;
2033 built_in_function fcodef32x = END_BUILTINS;
2034 built_in_function fcodef64x = END_BUILTINS;
2035 built_in_function fcodef128x = END_BUILTINS;
2036
2037 switch (fn)
2038 {
2039 CASE_MATHFN (ACOS)
2040 CASE_MATHFN (ACOSH)
2041 CASE_MATHFN (ASIN)
2042 CASE_MATHFN (ASINH)
2043 CASE_MATHFN (ATAN)
2044 CASE_MATHFN (ATAN2)
2045 CASE_MATHFN (ATANH)
2046 CASE_MATHFN (CBRT)
2047 CASE_MATHFN_FLOATN (CEIL)
2048 CASE_MATHFN (CEXPI)
2049 CASE_MATHFN_FLOATN (COPYSIGN)
2050 CASE_MATHFN (COS)
2051 CASE_MATHFN (COSH)
2052 CASE_MATHFN (DREM)
2053 CASE_MATHFN (ERF)
2054 CASE_MATHFN (ERFC)
2055 CASE_MATHFN (EXP)
2056 CASE_MATHFN (EXP10)
2057 CASE_MATHFN (EXP2)
2058 CASE_MATHFN (EXPM1)
2059 CASE_MATHFN (FABS)
2060 CASE_MATHFN (FDIM)
2061 CASE_MATHFN_FLOATN (FLOOR)
2062 CASE_MATHFN_FLOATN (FMA)
2063 CASE_MATHFN_FLOATN (FMAX)
2064 CASE_MATHFN_FLOATN (FMIN)
2065 CASE_MATHFN (FMOD)
2066 CASE_MATHFN (FREXP)
2067 CASE_MATHFN (GAMMA)
2068 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2069 CASE_MATHFN (HUGE_VAL)
2070 CASE_MATHFN (HYPOT)
2071 CASE_MATHFN (ILOGB)
2072 CASE_MATHFN (ICEIL)
2073 CASE_MATHFN (IFLOOR)
2074 CASE_MATHFN (INF)
2075 CASE_MATHFN (IRINT)
2076 CASE_MATHFN (IROUND)
2077 CASE_MATHFN (ISINF)
2078 CASE_MATHFN (J0)
2079 CASE_MATHFN (J1)
2080 CASE_MATHFN (JN)
2081 CASE_MATHFN (LCEIL)
2082 CASE_MATHFN (LDEXP)
2083 CASE_MATHFN (LFLOOR)
2084 CASE_MATHFN (LGAMMA)
2085 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2086 CASE_MATHFN (LLCEIL)
2087 CASE_MATHFN (LLFLOOR)
2088 CASE_MATHFN (LLRINT)
2089 CASE_MATHFN (LLROUND)
2090 CASE_MATHFN (LOG)
2091 CASE_MATHFN (LOG10)
2092 CASE_MATHFN (LOG1P)
2093 CASE_MATHFN (LOG2)
2094 CASE_MATHFN (LOGB)
2095 CASE_MATHFN (LRINT)
2096 CASE_MATHFN (LROUND)
2097 CASE_MATHFN (MODF)
2098 CASE_MATHFN (NAN)
2099 CASE_MATHFN (NANS)
2100 CASE_MATHFN_FLOATN (NEARBYINT)
2101 CASE_MATHFN (NEXTAFTER)
2102 CASE_MATHFN (NEXTTOWARD)
2103 CASE_MATHFN (POW)
2104 CASE_MATHFN (POWI)
2105 CASE_MATHFN (POW10)
2106 CASE_MATHFN (REMAINDER)
2107 CASE_MATHFN (REMQUO)
2108 CASE_MATHFN_FLOATN (RINT)
2109 CASE_MATHFN_FLOATN (ROUND)
2110 CASE_MATHFN_FLOATN (ROUNDEVEN)
2111 CASE_MATHFN (SCALB)
2112 CASE_MATHFN (SCALBLN)
2113 CASE_MATHFN (SCALBN)
2114 CASE_MATHFN (SIGNBIT)
2115 CASE_MATHFN (SIGNIFICAND)
2116 CASE_MATHFN (SIN)
2117 CASE_MATHFN (SINCOS)
2118 CASE_MATHFN (SINH)
2119 CASE_MATHFN_FLOATN (SQRT)
2120 CASE_MATHFN (TAN)
2121 CASE_MATHFN (TANH)
2122 CASE_MATHFN (TGAMMA)
2123 CASE_MATHFN_FLOATN (TRUNC)
2124 CASE_MATHFN (Y0)
2125 CASE_MATHFN (Y1)
2126 CASE_MATHFN (YN)
2127
2128 default:
2129 return END_BUILTINS;
2130 }
2131
2132 mtype = TYPE_MAIN_VARIANT (type);
2133 if (mtype == double_type_node)
2134 return fcode;
2135 else if (mtype == float_type_node)
2136 return fcodef;
2137 else if (mtype == long_double_type_node)
2138 return fcodel;
2139 else if (mtype == float16_type_node)
2140 return fcodef16;
2141 else if (mtype == float32_type_node)
2142 return fcodef32;
2143 else if (mtype == float64_type_node)
2144 return fcodef64;
2145 else if (mtype == float128_type_node)
2146 return fcodef128;
2147 else if (mtype == float32x_type_node)
2148 return fcodef32x;
2149 else if (mtype == float64x_type_node)
2150 return fcodef64x;
2151 else if (mtype == float128x_type_node)
2152 return fcodef128x;
2153 else
2154 return END_BUILTINS;
2155 }
2156
2157 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2158 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2159 otherwise use the explicit declaration. If we can't do the conversion,
2160 return null. */
2161
2162 static tree
2163 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2164 {
2165 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2166 if (fcode2 == END_BUILTINS)
2167 return NULL_TREE;
2168
2169 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2170 return NULL_TREE;
2171
2172 return builtin_decl_explicit (fcode2);
2173 }
2174
2175 /* Like mathfn_built_in_1, but always use the implicit array. */
2176
2177 tree
2178 mathfn_built_in (tree type, combined_fn fn)
2179 {
2180 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2181 }
2182
2183 /* Like mathfn_built_in_1, but take a built_in_function and
2184 always use the implicit array. */
2185
2186 tree
2187 mathfn_built_in (tree type, enum built_in_function fn)
2188 {
2189 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2190 }
2191
2192 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2193 return its code, otherwise return IFN_LAST. Note that this function
2194 only tests whether the function is defined in internals.def, not whether
2195 it is actually available on the target. */
2196
2197 internal_fn
2198 associated_internal_fn (tree fndecl)
2199 {
2200 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2201 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2202 switch (DECL_FUNCTION_CODE (fndecl))
2203 {
2204 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2205 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2206 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2207 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2208 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2209 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2210 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2211 #include "internal-fn.def"
2212
2213 CASE_FLT_FN (BUILT_IN_POW10):
2214 return IFN_EXP10;
2215
2216 CASE_FLT_FN (BUILT_IN_DREM):
2217 return IFN_REMAINDER;
2218
2219 CASE_FLT_FN (BUILT_IN_SCALBN):
2220 CASE_FLT_FN (BUILT_IN_SCALBLN):
2221 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2222 return IFN_LDEXP;
2223 return IFN_LAST;
2224
2225 default:
2226 return IFN_LAST;
2227 }
2228 }
2229
2230 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2231 on the current target by a call to an internal function, return the
2232 code of that internal function, otherwise return IFN_LAST. The caller
2233 is responsible for ensuring that any side-effects of the built-in
2234 call are dealt with correctly. E.g. if CALL sets errno, the caller
2235 must decide that the errno result isn't needed or make it available
2236 in some other way. */
2237
2238 internal_fn
2239 replacement_internal_fn (gcall *call)
2240 {
2241 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2242 {
2243 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2244 if (ifn != IFN_LAST)
2245 {
2246 tree_pair types = direct_internal_fn_types (ifn, call);
2247 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2248 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2249 return ifn;
2250 }
2251 }
2252 return IFN_LAST;
2253 }
2254
2255 /* Expand a call to the builtin trinary math functions (fma).
2256 Return NULL_RTX if a normal call should be emitted rather than expanding the
2257 function in-line. EXP is the expression that is a call to the builtin
2258 function; if convenient, the result should be placed in TARGET.
2259 SUBTARGET may be used as the target for computing one of EXP's
2260 operands. */
2261
2262 static rtx
2263 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2264 {
2265 optab builtin_optab;
2266 rtx op0, op1, op2, result;
2267 rtx_insn *insns;
2268 tree fndecl = get_callee_fndecl (exp);
2269 tree arg0, arg1, arg2;
2270 machine_mode mode;
2271
2272 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2273 return NULL_RTX;
2274
2275 arg0 = CALL_EXPR_ARG (exp, 0);
2276 arg1 = CALL_EXPR_ARG (exp, 1);
2277 arg2 = CALL_EXPR_ARG (exp, 2);
2278
2279 switch (DECL_FUNCTION_CODE (fndecl))
2280 {
2281 CASE_FLT_FN (BUILT_IN_FMA):
2282 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2283 builtin_optab = fma_optab; break;
2284 default:
2285 gcc_unreachable ();
2286 }
2287
2288 /* Make a suitable register to place result in. */
2289 mode = TYPE_MODE (TREE_TYPE (exp));
2290
2291 /* Before working hard, check whether the instruction is available. */
2292 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2293 return NULL_RTX;
2294
2295 result = gen_reg_rtx (mode);
2296
2297 /* Always stabilize the argument list. */
2298 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2299 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2300 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2301
2302 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2303 op1 = expand_normal (arg1);
2304 op2 = expand_normal (arg2);
2305
2306 start_sequence ();
2307
2308 /* Compute into RESULT.
2309 Set RESULT to wherever the result comes back. */
2310 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2311 result, 0);
2312
2313 /* If we were unable to expand via the builtin, stop the sequence
2314 (without outputting the insns) and call to the library function
2315 with the stabilized argument list. */
2316 if (result == 0)
2317 {
2318 end_sequence ();
2319 return expand_call (exp, target, target == const0_rtx);
2320 }
2321
2322 /* Output the entire sequence. */
2323 insns = get_insns ();
2324 end_sequence ();
2325 emit_insn (insns);
2326
2327 return result;
2328 }
2329
2330 /* Expand a call to the builtin sin and cos math functions.
2331 Return NULL_RTX if a normal call should be emitted rather than expanding the
2332 function in-line. EXP is the expression that is a call to the builtin
2333 function; if convenient, the result should be placed in TARGET.
2334 SUBTARGET may be used as the target for computing one of EXP's
2335 operands. */
2336
2337 static rtx
2338 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2339 {
2340 optab builtin_optab;
2341 rtx op0;
2342 rtx_insn *insns;
2343 tree fndecl = get_callee_fndecl (exp);
2344 machine_mode mode;
2345 tree arg;
2346
2347 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2348 return NULL_RTX;
2349
2350 arg = CALL_EXPR_ARG (exp, 0);
2351
2352 switch (DECL_FUNCTION_CODE (fndecl))
2353 {
2354 CASE_FLT_FN (BUILT_IN_SIN):
2355 CASE_FLT_FN (BUILT_IN_COS):
2356 builtin_optab = sincos_optab; break;
2357 default:
2358 gcc_unreachable ();
2359 }
2360
2361 /* Make a suitable register to place result in. */
2362 mode = TYPE_MODE (TREE_TYPE (exp));
2363
2364 /* Check if sincos insn is available, otherwise fallback
2365 to sin or cos insn. */
2366 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2367 switch (DECL_FUNCTION_CODE (fndecl))
2368 {
2369 CASE_FLT_FN (BUILT_IN_SIN):
2370 builtin_optab = sin_optab; break;
2371 CASE_FLT_FN (BUILT_IN_COS):
2372 builtin_optab = cos_optab; break;
2373 default:
2374 gcc_unreachable ();
2375 }
2376
2377 /* Before working hard, check whether the instruction is available. */
2378 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2379 {
2380 rtx result = gen_reg_rtx (mode);
2381
2382 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2383 need to expand the argument again. This way, we will not perform
2384 side-effects more the once. */
2385 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2386
2387 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2388
2389 start_sequence ();
2390
2391 /* Compute into RESULT.
2392 Set RESULT to wherever the result comes back. */
2393 if (builtin_optab == sincos_optab)
2394 {
2395 int ok;
2396
2397 switch (DECL_FUNCTION_CODE (fndecl))
2398 {
2399 CASE_FLT_FN (BUILT_IN_SIN):
2400 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2401 break;
2402 CASE_FLT_FN (BUILT_IN_COS):
2403 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2404 break;
2405 default:
2406 gcc_unreachable ();
2407 }
2408 gcc_assert (ok);
2409 }
2410 else
2411 result = expand_unop (mode, builtin_optab, op0, result, 0);
2412
2413 if (result != 0)
2414 {
2415 /* Output the entire sequence. */
2416 insns = get_insns ();
2417 end_sequence ();
2418 emit_insn (insns);
2419 return result;
2420 }
2421
2422 /* If we were unable to expand via the builtin, stop the sequence
2423 (without outputting the insns) and call to the library function
2424 with the stabilized argument list. */
2425 end_sequence ();
2426 }
2427
2428 return expand_call (exp, target, target == const0_rtx);
2429 }
2430
2431 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2432 return an RTL instruction code that implements the functionality.
2433 If that isn't possible or available return CODE_FOR_nothing. */
2434
2435 static enum insn_code
2436 interclass_mathfn_icode (tree arg, tree fndecl)
2437 {
2438 bool errno_set = false;
2439 optab builtin_optab = unknown_optab;
2440 machine_mode mode;
2441
2442 switch (DECL_FUNCTION_CODE (fndecl))
2443 {
2444 CASE_FLT_FN (BUILT_IN_ILOGB):
2445 errno_set = true; builtin_optab = ilogb_optab; break;
2446 CASE_FLT_FN (BUILT_IN_ISINF):
2447 builtin_optab = isinf_optab; break;
2448 case BUILT_IN_ISNORMAL:
2449 case BUILT_IN_ISFINITE:
2450 CASE_FLT_FN (BUILT_IN_FINITE):
2451 case BUILT_IN_FINITED32:
2452 case BUILT_IN_FINITED64:
2453 case BUILT_IN_FINITED128:
2454 case BUILT_IN_ISINFD32:
2455 case BUILT_IN_ISINFD64:
2456 case BUILT_IN_ISINFD128:
2457 /* These builtins have no optabs (yet). */
2458 break;
2459 default:
2460 gcc_unreachable ();
2461 }
2462
2463 /* There's no easy way to detect the case we need to set EDOM. */
2464 if (flag_errno_math && errno_set)
2465 return CODE_FOR_nothing;
2466
2467 /* Optab mode depends on the mode of the input argument. */
2468 mode = TYPE_MODE (TREE_TYPE (arg));
2469
2470 if (builtin_optab)
2471 return optab_handler (builtin_optab, mode);
2472 return CODE_FOR_nothing;
2473 }
2474
2475 /* Expand a call to one of the builtin math functions that operate on
2476 floating point argument and output an integer result (ilogb, isinf,
2477 isnan, etc).
2478 Return 0 if a normal call should be emitted rather than expanding the
2479 function in-line. EXP is the expression that is a call to the builtin
2480 function; if convenient, the result should be placed in TARGET. */
2481
2482 static rtx
2483 expand_builtin_interclass_mathfn (tree exp, rtx target)
2484 {
2485 enum insn_code icode = CODE_FOR_nothing;
2486 rtx op0;
2487 tree fndecl = get_callee_fndecl (exp);
2488 machine_mode mode;
2489 tree arg;
2490
2491 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2492 return NULL_RTX;
2493
2494 arg = CALL_EXPR_ARG (exp, 0);
2495 icode = interclass_mathfn_icode (arg, fndecl);
2496 mode = TYPE_MODE (TREE_TYPE (arg));
2497
2498 if (icode != CODE_FOR_nothing)
2499 {
2500 class expand_operand ops[1];
2501 rtx_insn *last = get_last_insn ();
2502 tree orig_arg = arg;
2503
2504 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2505 need to expand the argument again. This way, we will not perform
2506 side-effects more the once. */
2507 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2508
2509 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2510
2511 if (mode != GET_MODE (op0))
2512 op0 = convert_to_mode (mode, op0, 0);
2513
2514 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2515 if (maybe_legitimize_operands (icode, 0, 1, ops)
2516 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2517 return ops[0].value;
2518
2519 delete_insns_since (last);
2520 CALL_EXPR_ARG (exp, 0) = orig_arg;
2521 }
2522
2523 return NULL_RTX;
2524 }
2525
2526 /* Expand a call to the builtin sincos math function.
2527 Return NULL_RTX if a normal call should be emitted rather than expanding the
2528 function in-line. EXP is the expression that is a call to the builtin
2529 function. */
2530
2531 static rtx
2532 expand_builtin_sincos (tree exp)
2533 {
2534 rtx op0, op1, op2, target1, target2;
2535 machine_mode mode;
2536 tree arg, sinp, cosp;
2537 int result;
2538 location_t loc = EXPR_LOCATION (exp);
2539 tree alias_type, alias_off;
2540
2541 if (!validate_arglist (exp, REAL_TYPE,
2542 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2543 return NULL_RTX;
2544
2545 arg = CALL_EXPR_ARG (exp, 0);
2546 sinp = CALL_EXPR_ARG (exp, 1);
2547 cosp = CALL_EXPR_ARG (exp, 2);
2548
2549 /* Make a suitable register to place result in. */
2550 mode = TYPE_MODE (TREE_TYPE (arg));
2551
2552 /* Check if sincos insn is available, otherwise emit the call. */
2553 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2554 return NULL_RTX;
2555
2556 target1 = gen_reg_rtx (mode);
2557 target2 = gen_reg_rtx (mode);
2558
2559 op0 = expand_normal (arg);
2560 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2561 alias_off = build_int_cst (alias_type, 0);
2562 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2563 sinp, alias_off));
2564 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2565 cosp, alias_off));
2566
2567 /* Compute into target1 and target2.
2568 Set TARGET to wherever the result comes back. */
2569 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2570 gcc_assert (result);
2571
2572 /* Move target1 and target2 to the memory locations indicated
2573 by op1 and op2. */
2574 emit_move_insn (op1, target1);
2575 emit_move_insn (op2, target2);
2576
2577 return const0_rtx;
2578 }
2579
2580 /* Expand a call to the internal cexpi builtin to the sincos math function.
2581 EXP is the expression that is a call to the builtin function; if convenient,
2582 the result should be placed in TARGET. */
2583
2584 static rtx
2585 expand_builtin_cexpi (tree exp, rtx target)
2586 {
2587 tree fndecl = get_callee_fndecl (exp);
2588 tree arg, type;
2589 machine_mode mode;
2590 rtx op0, op1, op2;
2591 location_t loc = EXPR_LOCATION (exp);
2592
2593 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2594 return NULL_RTX;
2595
2596 arg = CALL_EXPR_ARG (exp, 0);
2597 type = TREE_TYPE (arg);
2598 mode = TYPE_MODE (TREE_TYPE (arg));
2599
2600 /* Try expanding via a sincos optab, fall back to emitting a libcall
2601 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2602 is only generated from sincos, cexp or if we have either of them. */
2603 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2604 {
2605 op1 = gen_reg_rtx (mode);
2606 op2 = gen_reg_rtx (mode);
2607
2608 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2609
2610 /* Compute into op1 and op2. */
2611 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2612 }
2613 else if (targetm.libc_has_function (function_sincos))
2614 {
2615 tree call, fn = NULL_TREE;
2616 tree top1, top2;
2617 rtx op1a, op2a;
2618
2619 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2620 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2622 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2623 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2624 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2625 else
2626 gcc_unreachable ();
2627
2628 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2629 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2630 op1a = copy_addr_to_reg (XEXP (op1, 0));
2631 op2a = copy_addr_to_reg (XEXP (op2, 0));
2632 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2633 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2634
2635 /* Make sure not to fold the sincos call again. */
2636 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2637 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2638 call, 3, arg, top1, top2));
2639 }
2640 else
2641 {
2642 tree call, fn = NULL_TREE, narg;
2643 tree ctype = build_complex_type (type);
2644
2645 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2646 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2647 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2648 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2649 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2650 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2651 else
2652 gcc_unreachable ();
2653
2654 /* If we don't have a decl for cexp create one. This is the
2655 friendliest fallback if the user calls __builtin_cexpi
2656 without full target C99 function support. */
2657 if (fn == NULL_TREE)
2658 {
2659 tree fntype;
2660 const char *name = NULL;
2661
2662 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2663 name = "cexpf";
2664 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2665 name = "cexp";
2666 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2667 name = "cexpl";
2668
2669 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2670 fn = build_fn_decl (name, fntype);
2671 }
2672
2673 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2674 build_real (type, dconst0), arg);
2675
2676 /* Make sure not to fold the cexp call again. */
2677 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2678 return expand_expr (build_call_nary (ctype, call, 1, narg),
2679 target, VOIDmode, EXPAND_NORMAL);
2680 }
2681
2682 /* Now build the proper return type. */
2683 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2684 make_tree (TREE_TYPE (arg), op2),
2685 make_tree (TREE_TYPE (arg), op1)),
2686 target, VOIDmode, EXPAND_NORMAL);
2687 }
2688
2689 /* Conveniently construct a function call expression. FNDECL names the
2690 function to be called, N is the number of arguments, and the "..."
2691 parameters are the argument expressions. Unlike build_call_exr
2692 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2693
2694 static tree
2695 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2696 {
2697 va_list ap;
2698 tree fntype = TREE_TYPE (fndecl);
2699 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2700
2701 va_start (ap, n);
2702 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2703 va_end (ap);
2704 SET_EXPR_LOCATION (fn, loc);
2705 return fn;
2706 }
2707
2708 /* Expand a call to one of the builtin rounding functions gcc defines
2709 as an extension (lfloor and lceil). As these are gcc extensions we
2710 do not need to worry about setting errno to EDOM.
2711 If expanding via optab fails, lower expression to (int)(floor(x)).
2712 EXP is the expression that is a call to the builtin function;
2713 if convenient, the result should be placed in TARGET. */
2714
2715 static rtx
2716 expand_builtin_int_roundingfn (tree exp, rtx target)
2717 {
2718 convert_optab builtin_optab;
2719 rtx op0, tmp;
2720 rtx_insn *insns;
2721 tree fndecl = get_callee_fndecl (exp);
2722 enum built_in_function fallback_fn;
2723 tree fallback_fndecl;
2724 machine_mode mode;
2725 tree arg;
2726
2727 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2728 return NULL_RTX;
2729
2730 arg = CALL_EXPR_ARG (exp, 0);
2731
2732 switch (DECL_FUNCTION_CODE (fndecl))
2733 {
2734 CASE_FLT_FN (BUILT_IN_ICEIL):
2735 CASE_FLT_FN (BUILT_IN_LCEIL):
2736 CASE_FLT_FN (BUILT_IN_LLCEIL):
2737 builtin_optab = lceil_optab;
2738 fallback_fn = BUILT_IN_CEIL;
2739 break;
2740
2741 CASE_FLT_FN (BUILT_IN_IFLOOR):
2742 CASE_FLT_FN (BUILT_IN_LFLOOR):
2743 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2744 builtin_optab = lfloor_optab;
2745 fallback_fn = BUILT_IN_FLOOR;
2746 break;
2747
2748 default:
2749 gcc_unreachable ();
2750 }
2751
2752 /* Make a suitable register to place result in. */
2753 mode = TYPE_MODE (TREE_TYPE (exp));
2754
2755 target = gen_reg_rtx (mode);
2756
2757 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2758 need to expand the argument again. This way, we will not perform
2759 side-effects more the once. */
2760 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2761
2762 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2763
2764 start_sequence ();
2765
2766 /* Compute into TARGET. */
2767 if (expand_sfix_optab (target, op0, builtin_optab))
2768 {
2769 /* Output the entire sequence. */
2770 insns = get_insns ();
2771 end_sequence ();
2772 emit_insn (insns);
2773 return target;
2774 }
2775
2776 /* If we were unable to expand via the builtin, stop the sequence
2777 (without outputting the insns). */
2778 end_sequence ();
2779
2780 /* Fall back to floating point rounding optab. */
2781 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2782
2783 /* For non-C99 targets we may end up without a fallback fndecl here
2784 if the user called __builtin_lfloor directly. In this case emit
2785 a call to the floor/ceil variants nevertheless. This should result
2786 in the best user experience for not full C99 targets. */
2787 if (fallback_fndecl == NULL_TREE)
2788 {
2789 tree fntype;
2790 const char *name = NULL;
2791
2792 switch (DECL_FUNCTION_CODE (fndecl))
2793 {
2794 case BUILT_IN_ICEIL:
2795 case BUILT_IN_LCEIL:
2796 case BUILT_IN_LLCEIL:
2797 name = "ceil";
2798 break;
2799 case BUILT_IN_ICEILF:
2800 case BUILT_IN_LCEILF:
2801 case BUILT_IN_LLCEILF:
2802 name = "ceilf";
2803 break;
2804 case BUILT_IN_ICEILL:
2805 case BUILT_IN_LCEILL:
2806 case BUILT_IN_LLCEILL:
2807 name = "ceill";
2808 break;
2809 case BUILT_IN_IFLOOR:
2810 case BUILT_IN_LFLOOR:
2811 case BUILT_IN_LLFLOOR:
2812 name = "floor";
2813 break;
2814 case BUILT_IN_IFLOORF:
2815 case BUILT_IN_LFLOORF:
2816 case BUILT_IN_LLFLOORF:
2817 name = "floorf";
2818 break;
2819 case BUILT_IN_IFLOORL:
2820 case BUILT_IN_LFLOORL:
2821 case BUILT_IN_LLFLOORL:
2822 name = "floorl";
2823 break;
2824 default:
2825 gcc_unreachable ();
2826 }
2827
2828 fntype = build_function_type_list (TREE_TYPE (arg),
2829 TREE_TYPE (arg), NULL_TREE);
2830 fallback_fndecl = build_fn_decl (name, fntype);
2831 }
2832
2833 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2834
2835 tmp = expand_normal (exp);
2836 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2837
2838 /* Truncate the result of floating point optab to integer
2839 via expand_fix (). */
2840 target = gen_reg_rtx (mode);
2841 expand_fix (target, tmp, 0);
2842
2843 return target;
2844 }
2845
2846 /* Expand a call to one of the builtin math functions doing integer
2847 conversion (lrint).
2848 Return 0 if a normal call should be emitted rather than expanding the
2849 function in-line. EXP is the expression that is a call to the builtin
2850 function; if convenient, the result should be placed in TARGET. */
2851
2852 static rtx
2853 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2854 {
2855 convert_optab builtin_optab;
2856 rtx op0;
2857 rtx_insn *insns;
2858 tree fndecl = get_callee_fndecl (exp);
2859 tree arg;
2860 machine_mode mode;
2861 enum built_in_function fallback_fn = BUILT_IN_NONE;
2862
2863 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2864 return NULL_RTX;
2865
2866 arg = CALL_EXPR_ARG (exp, 0);
2867
2868 switch (DECL_FUNCTION_CODE (fndecl))
2869 {
2870 CASE_FLT_FN (BUILT_IN_IRINT):
2871 fallback_fn = BUILT_IN_LRINT;
2872 gcc_fallthrough ();
2873 CASE_FLT_FN (BUILT_IN_LRINT):
2874 CASE_FLT_FN (BUILT_IN_LLRINT):
2875 builtin_optab = lrint_optab;
2876 break;
2877
2878 CASE_FLT_FN (BUILT_IN_IROUND):
2879 fallback_fn = BUILT_IN_LROUND;
2880 gcc_fallthrough ();
2881 CASE_FLT_FN (BUILT_IN_LROUND):
2882 CASE_FLT_FN (BUILT_IN_LLROUND):
2883 builtin_optab = lround_optab;
2884 break;
2885
2886 default:
2887 gcc_unreachable ();
2888 }
2889
2890 /* There's no easy way to detect the case we need to set EDOM. */
2891 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2892 return NULL_RTX;
2893
2894 /* Make a suitable register to place result in. */
2895 mode = TYPE_MODE (TREE_TYPE (exp));
2896
2897 /* There's no easy way to detect the case we need to set EDOM. */
2898 if (!flag_errno_math)
2899 {
2900 rtx result = gen_reg_rtx (mode);
2901
2902 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2903 need to expand the argument again. This way, we will not perform
2904 side-effects more the once. */
2905 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2906
2907 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2908
2909 start_sequence ();
2910
2911 if (expand_sfix_optab (result, op0, builtin_optab))
2912 {
2913 /* Output the entire sequence. */
2914 insns = get_insns ();
2915 end_sequence ();
2916 emit_insn (insns);
2917 return result;
2918 }
2919
2920 /* If we were unable to expand via the builtin, stop the sequence
2921 (without outputting the insns) and call to the library function
2922 with the stabilized argument list. */
2923 end_sequence ();
2924 }
2925
2926 if (fallback_fn != BUILT_IN_NONE)
2927 {
2928 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2929 targets, (int) round (x) should never be transformed into
2930 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2931 a call to lround in the hope that the target provides at least some
2932 C99 functions. This should result in the best user experience for
2933 not full C99 targets. */
2934 tree fallback_fndecl = mathfn_built_in_1
2935 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2936
2937 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2938 fallback_fndecl, 1, arg);
2939
2940 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2941 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2942 return convert_to_mode (mode, target, 0);
2943 }
2944
2945 return expand_call (exp, target, target == const0_rtx);
2946 }
2947
2948 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2949 a normal call should be emitted rather than expanding the function
2950 in-line. EXP is the expression that is a call to the builtin
2951 function; if convenient, the result should be placed in TARGET. */
2952
2953 static rtx
2954 expand_builtin_powi (tree exp, rtx target)
2955 {
2956 tree arg0, arg1;
2957 rtx op0, op1;
2958 machine_mode mode;
2959 machine_mode mode2;
2960
2961 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2962 return NULL_RTX;
2963
2964 arg0 = CALL_EXPR_ARG (exp, 0);
2965 arg1 = CALL_EXPR_ARG (exp, 1);
2966 mode = TYPE_MODE (TREE_TYPE (exp));
2967
2968 /* Emit a libcall to libgcc. */
2969
2970 /* Mode of the 2nd argument must match that of an int. */
2971 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2972
2973 if (target == NULL_RTX)
2974 target = gen_reg_rtx (mode);
2975
2976 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2977 if (GET_MODE (op0) != mode)
2978 op0 = convert_to_mode (mode, op0, 0);
2979 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2980 if (GET_MODE (op1) != mode2)
2981 op1 = convert_to_mode (mode2, op1, 0);
2982
2983 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2984 target, LCT_CONST, mode,
2985 op0, mode, op1, mode2);
2986
2987 return target;
2988 }
2989
2990 /* Expand expression EXP which is a call to the strlen builtin. Return
2991 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2992 try to get the result in TARGET, if convenient. */
2993
2994 static rtx
2995 expand_builtin_strlen (tree exp, rtx target,
2996 machine_mode target_mode)
2997 {
2998 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2999 return NULL_RTX;
3000
3001 class expand_operand ops[4];
3002 rtx pat;
3003 tree len;
3004 tree src = CALL_EXPR_ARG (exp, 0);
3005 rtx src_reg;
3006 rtx_insn *before_strlen;
3007 machine_mode insn_mode;
3008 enum insn_code icode = CODE_FOR_nothing;
3009 unsigned int align;
3010
3011 /* If the length can be computed at compile-time, return it. */
3012 len = c_strlen (src, 0);
3013 if (len)
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3015
3016 /* If the length can be computed at compile-time and is constant
3017 integer, but there are side-effects in src, evaluate
3018 src for side-effects, then return len.
3019 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3020 can be optimized into: i++; x = 3; */
3021 len = c_strlen (src, 1);
3022 if (len && TREE_CODE (len) == INTEGER_CST)
3023 {
3024 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3025 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3026 }
3027
3028 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3029
3030 /* If SRC is not a pointer type, don't do this operation inline. */
3031 if (align == 0)
3032 return NULL_RTX;
3033
3034 /* Bail out if we can't compute strlen in the right mode. */
3035 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3036 {
3037 icode = optab_handler (strlen_optab, insn_mode);
3038 if (icode != CODE_FOR_nothing)
3039 break;
3040 }
3041 if (insn_mode == VOIDmode)
3042 return NULL_RTX;
3043
3044 /* Make a place to hold the source address. We will not expand
3045 the actual source until we are sure that the expansion will
3046 not fail -- there are trees that cannot be expanded twice. */
3047 src_reg = gen_reg_rtx (Pmode);
3048
3049 /* Mark the beginning of the strlen sequence so we can emit the
3050 source operand later. */
3051 before_strlen = get_last_insn ();
3052
3053 create_output_operand (&ops[0], target, insn_mode);
3054 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3055 create_integer_operand (&ops[2], 0);
3056 create_integer_operand (&ops[3], align);
3057 if (!maybe_expand_insn (icode, 4, ops))
3058 return NULL_RTX;
3059
3060 /* Check to see if the argument was declared attribute nonstring
3061 and if so, issue a warning since at this point it's not known
3062 to be nul-terminated. */
3063 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3064
3065 /* Now that we are assured of success, expand the source. */
3066 start_sequence ();
3067 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3068 if (pat != src_reg)
3069 {
3070 #ifdef POINTERS_EXTEND_UNSIGNED
3071 if (GET_MODE (pat) != Pmode)
3072 pat = convert_to_mode (Pmode, pat,
3073 POINTERS_EXTEND_UNSIGNED);
3074 #endif
3075 emit_move_insn (src_reg, pat);
3076 }
3077 pat = get_insns ();
3078 end_sequence ();
3079
3080 if (before_strlen)
3081 emit_insn_after (pat, before_strlen);
3082 else
3083 emit_insn_before (pat, get_insns ());
3084
3085 /* Return the value in the proper mode for this function. */
3086 if (GET_MODE (ops[0].value) == target_mode)
3087 target = ops[0].value;
3088 else if (target != 0)
3089 convert_move (target, ops[0].value, 0);
3090 else
3091 target = convert_to_mode (target_mode, ops[0].value, 0);
3092
3093 return target;
3094 }
3095
3096 /* Expand call EXP to the strnlen built-in, returning the result
3097 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3098
3099 static rtx
3100 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3101 {
3102 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3103 return NULL_RTX;
3104
3105 tree src = CALL_EXPR_ARG (exp, 0);
3106 tree bound = CALL_EXPR_ARG (exp, 1);
3107
3108 if (!bound)
3109 return NULL_RTX;
3110
3111 location_t loc = UNKNOWN_LOCATION;
3112 if (EXPR_HAS_LOCATION (exp))
3113 loc = EXPR_LOCATION (exp);
3114
3115 tree maxobjsize = max_object_size ();
3116 tree func = get_callee_fndecl (exp);
3117
3118 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3119 so these conversions aren't necessary. */
3120 c_strlen_data lendata = { };
3121 tree len = c_strlen (src, 0, &lendata, 1);
3122 if (len)
3123 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3124
3125 if (TREE_CODE (bound) == INTEGER_CST)
3126 {
3127 if (!TREE_NO_WARNING (exp)
3128 && tree_int_cst_lt (maxobjsize, bound)
3129 && warning_at (loc, OPT_Wstringop_overflow_,
3130 "%K%qD specified bound %E "
3131 "exceeds maximum object size %E",
3132 exp, func, bound, maxobjsize))
3133 TREE_NO_WARNING (exp) = true;
3134
3135 bool exact = true;
3136 if (!len || TREE_CODE (len) != INTEGER_CST)
3137 {
3138 /* Clear EXACT if LEN may be less than SRC suggests,
3139 such as in
3140 strnlen (&a[i], sizeof a)
3141 where the value of i is unknown. Unless i's value is
3142 zero, the call is unsafe because the bound is greater. */
3143 lendata.decl = unterminated_array (src, &len, &exact);
3144 if (!lendata.decl)
3145 return NULL_RTX;
3146 }
3147
3148 if (lendata.decl && (tree_int_cst_lt (len, bound) || !exact))
3149 {
3150 location_t warnloc
3151 = expansion_point_location_if_in_system_header (loc);
3152
3153 if (!TREE_NO_WARNING (exp)
3154 && warning_at (warnloc, OPT_Wstringop_overflow_,
3155 exact
3156 ? G_("%K%qD specified bound %E exceeds the size "
3157 "%E of unterminated array")
3158 : G_("%K%qD specified bound %E may exceed the "
3159 "size of at most %E of unterminated array"),
3160 exp, func, bound, len))
3161 {
3162 inform (DECL_SOURCE_LOCATION (lendata.decl),
3163 "referenced argument declared here");
3164 TREE_NO_WARNING (exp) = true;
3165 }
3166 return NULL_RTX;
3167 }
3168
3169 if (!len)
3170 return NULL_RTX;
3171
3172 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3173 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3174 }
3175
3176 if (TREE_CODE (bound) != SSA_NAME)
3177 return NULL_RTX;
3178
3179 wide_int min, max;
3180 enum value_range_kind rng = get_range_info (bound, &min, &max);
3181 if (rng != VR_RANGE)
3182 return NULL_RTX;
3183
3184 if (!TREE_NO_WARNING (exp)
3185 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3186 && warning_at (loc, OPT_Wstringop_overflow_,
3187 "%K%qD specified bound [%wu, %wu] "
3188 "exceeds maximum object size %E",
3189 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3190 TREE_NO_WARNING (exp) = true;
3191
3192 bool exact = true;
3193 if (!len || TREE_CODE (len) != INTEGER_CST)
3194 {
3195 lendata.decl = unterminated_array (src, &len, &exact);
3196 if (!lendata.decl)
3197 return NULL_RTX;
3198 }
3199
3200 if (lendata.decl
3201 && !TREE_NO_WARNING (exp)
3202 && (wi::ltu_p (wi::to_wide (len), min)
3203 || !exact))
3204 {
3205 location_t warnloc
3206 = expansion_point_location_if_in_system_header (loc);
3207
3208 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3209 exact
3210 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3211 "the size %E of unterminated array")
3212 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3213 "the size of at most %E of unterminated array"),
3214 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3215 {
3216 inform (DECL_SOURCE_LOCATION (lendata.decl),
3217 "referenced argument declared here");
3218 TREE_NO_WARNING (exp) = true;
3219 }
3220 }
3221
3222 if (lendata.decl)
3223 return NULL_RTX;
3224
3225 if (wi::gtu_p (min, wi::to_wide (len)))
3226 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3227
3228 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3229 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3230 }
3231
3232 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3233 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3234 a target constant. */
3235
3236 static rtx
3237 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3238 scalar_int_mode mode)
3239 {
3240 /* The REPresentation pointed to by DATA need not be a nul-terminated
3241 string but the caller guarantees it's large enough for MODE. */
3242 const char *rep = (const char *) data;
3243
3244 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3245 }
3246
3247 /* LEN specify length of the block of memcpy/memset operation.
3248 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3249 In some cases we can make very likely guess on max size, then we
3250 set it into PROBABLE_MAX_SIZE. */
3251
3252 static void
3253 determine_block_size (tree len, rtx len_rtx,
3254 unsigned HOST_WIDE_INT *min_size,
3255 unsigned HOST_WIDE_INT *max_size,
3256 unsigned HOST_WIDE_INT *probable_max_size)
3257 {
3258 if (CONST_INT_P (len_rtx))
3259 {
3260 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3261 return;
3262 }
3263 else
3264 {
3265 wide_int min, max;
3266 enum value_range_kind range_type = VR_UNDEFINED;
3267
3268 /* Determine bounds from the type. */
3269 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3270 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3271 else
3272 *min_size = 0;
3273 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3274 *probable_max_size = *max_size
3275 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3276 else
3277 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3278
3279 if (TREE_CODE (len) == SSA_NAME)
3280 range_type = get_range_info (len, &min, &max);
3281 if (range_type == VR_RANGE)
3282 {
3283 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3284 *min_size = min.to_uhwi ();
3285 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3286 *probable_max_size = *max_size = max.to_uhwi ();
3287 }
3288 else if (range_type == VR_ANTI_RANGE)
3289 {
3290 /* Anti range 0...N lets us to determine minimal size to N+1. */
3291 if (min == 0)
3292 {
3293 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3294 *min_size = max.to_uhwi () + 1;
3295 }
3296 /* Code like
3297
3298 int n;
3299 if (n < 100)
3300 memcpy (a, b, n)
3301
3302 Produce anti range allowing negative values of N. We still
3303 can use the information and make a guess that N is not negative.
3304 */
3305 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3306 *probable_max_size = min.to_uhwi () - 1;
3307 }
3308 }
3309 gcc_checking_assert (*max_size <=
3310 (unsigned HOST_WIDE_INT)
3311 GET_MODE_MASK (GET_MODE (len_rtx)));
3312 }
3313
3314 /* For an expression EXP issue an access warning controlled by option OPT
3315 with access to a region SLEN bytes in size in the RANGE of sizes. */
3316
3317 static bool
3318 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
3319 tree slen, bool access)
3320 {
3321 bool warned = false;
3322
3323 if (access)
3324 {
3325 if (tree_int_cst_equal (range[0], range[1]))
3326 warned = (func
3327 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3328 "%K%qD reading %E byte from a region of size %E",
3329 "%K%qD reading %E bytes from a region of size %E",
3330 exp, func, range[0], slen)
3331 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3332 "%Kreading %E byte from a region of size %E",
3333 "%Kreading %E bytes from a region of size %E",
3334 exp, range[0], slen));
3335 else if (tree_int_cst_sign_bit (range[1]))
3336 {
3337 /* Avoid printing the upper bound if it's invalid. */
3338 warned = (func
3339 ? warning_at (loc, opt,
3340 "%K%qD reading %E or more bytes from a region "
3341 "of size %E",
3342 exp, func, range[0], slen)
3343 : warning_at (loc, opt,
3344 "%Kreading %E or more bytes from a region "
3345 "of size %E",
3346 exp, range[0], slen));
3347 }
3348 else
3349 warned = (func
3350 ? warning_at (loc, opt,
3351 "%K%qD reading between %E and %E bytes from "
3352 "a region of size %E",
3353 exp, func, range[0], range[1], slen)
3354 : warning_at (loc, opt,
3355 "%Kreading between %E and %E bytes from "
3356 "a region of size %E",
3357 exp, range[0], range[1], slen));
3358
3359 return warned;
3360 }
3361
3362 if (tree_int_cst_equal (range[0], range[1]))
3363 warned = (func
3364 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3365 "%K%qD epecting %E byte in a region of size %E",
3366 "%K%qD expecting %E bytes in a region of size %E",
3367 exp, func, range[0], slen)
3368 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3369 "%Kexpecting %E byte in a region of size %E",
3370 "%Kexpecting %E bytes in a region of size %E",
3371 exp, range[0], slen));
3372 else if (tree_int_cst_sign_bit (range[1]))
3373 {
3374 /* Avoid printing the upper bound if it's invalid. */
3375 warned = (func
3376 ? warning_at (loc, opt,
3377 "%K%qD expecting %E or more bytes in a region "
3378 "of size %E",
3379 exp, func, range[0], slen)
3380 : warning_at (loc, opt,
3381 "%Kexpecting %E or more bytes in a region "
3382 "of size %E",
3383 exp, range[0], slen));
3384 }
3385 else
3386 warned = (func
3387 ? warning_at (loc, opt,
3388 "%K%qD expecting between %E and %E bytes in "
3389 "a region of size %E",
3390 exp, func, range[0], range[1], slen)
3391 : warning_at (loc, opt,
3392 "%Kexpectting between %E and %E bytes in "
3393 "a region of size %E",
3394 exp, range[0], range[1], slen));
3395 return warned;
3396 }
3397
3398 /* Issue an inform message describing the target of an access REF.
3399 WRITE is set for a write access and clear for a read access. */
3400
3401 static void
3402 inform_access (const access_ref &ref, bool write)
3403 {
3404 if (!ref.ref)
3405 return;
3406
3407 /* Convert offset range and avoid including a zero range since it isn't
3408 necessarily meaningful. */
3409 long long minoff = 0, maxoff = 0;
3410 if (wi::fits_shwi_p (ref.offrng[0])
3411 && wi::fits_shwi_p (ref.offrng[1]))
3412 {
3413 minoff = ref.offrng[0].to_shwi ();
3414 maxoff = ref.offrng[1].to_shwi ();
3415 }
3416
3417 /* Convert size range and always include it since all sizes are
3418 meaningful. */
3419 unsigned long long minsize = 0, maxsize = 0;
3420 if (wi::fits_shwi_p (ref.sizrng[0])
3421 && wi::fits_shwi_p (ref.sizrng[1]))
3422 {
3423 minsize = ref.sizrng[0].to_shwi ();
3424 maxsize = ref.sizrng[1].to_shwi ();
3425 }
3426
3427 char sizestr[80];
3428 location_t loc;
3429 tree allocfn = NULL_TREE;
3430 if (TREE_CODE (ref.ref) == SSA_NAME)
3431 {
3432 gimple *stmt = SSA_NAME_DEF_STMT (ref.ref);
3433 gcc_assert (is_gimple_call (stmt));
3434 loc = gimple_location (stmt);
3435 allocfn = gimple_call_fndecl (stmt);
3436 if (!allocfn)
3437 /* Handle calls through pointers to functions. */
3438 allocfn = gimple_call_fn (stmt);
3439
3440 /* SIZRNG doesn't necessarily have the same range as the allocation
3441 size determined by gimple_call_alloc_size (). */
3442
3443 if (minsize == maxsize)
3444 sprintf (sizestr, "%llu", minsize);
3445 else
3446 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
3447
3448 }
3449 else
3450 loc = DECL_SOURCE_LOCATION (ref.ref);
3451
3452 if (write)
3453 {
3454 if (DECL_P (ref.ref))
3455 {
3456 if (minoff == maxoff)
3457 {
3458 if (minoff == 0)
3459 inform (loc, "destination object %qD", ref.ref);
3460 else
3461 inform (loc, "at offset %lli into destination object %qD",
3462 minoff, ref.ref);
3463 }
3464 else
3465 inform (loc, "at offset [%lli, %lli] into destination object %qD",
3466 minoff, maxoff, ref.ref);
3467 return;
3468 }
3469
3470 if (minoff == maxoff)
3471 {
3472 if (minoff == 0)
3473 inform (loc, "destination object of size %s allocated by %qE",
3474 sizestr, allocfn);
3475 else
3476 inform (loc,
3477 "at offset %lli into destination object of size %s "
3478 "allocated by %qE", minoff, sizestr, allocfn);
3479 }
3480 else
3481 inform (loc,
3482 "at offset [%lli, %lli] into destination object of size %s "
3483 "allocated by %qE",
3484 minoff, maxoff, sizestr, allocfn);
3485
3486 return;
3487 }
3488
3489 if (DECL_P (ref.ref))
3490 {
3491 if (minoff == maxoff)
3492 {
3493 if (minoff == 0)
3494 inform (loc, "source object %qD", ref.ref);
3495 else
3496 inform (loc, "at offset %lli into source object %qD",
3497 minoff, ref.ref);
3498 }
3499 else
3500 inform (loc, "at offset [%lli, %lli] into source object %qD",
3501 minoff, maxoff, ref.ref);
3502 return;
3503 }
3504
3505 if (minoff == maxoff)
3506 {
3507 if (minoff == 0)
3508 inform (loc, "source object of size %s allocated by %qE",
3509 sizestr, allocfn);
3510 else
3511 inform (loc,
3512 "at offset %lli into source object of size %s "
3513 "allocated by %qE", minoff, sizestr, allocfn);
3514 }
3515 else
3516 inform (loc,
3517 "at offset [%lli, %lli] into source object of size %s "
3518 "allocated by %qE",
3519 minoff, maxoff, sizestr, allocfn);
3520 }
3521
3522 /* Try to verify that the sizes and lengths of the arguments to a string
3523 manipulation function given by EXP are within valid bounds and that
3524 the operation does not lead to buffer overflow or read past the end.
3525 Arguments other than EXP may be null. When non-null, the arguments
3526 have the following meaning:
3527 DST is the destination of a copy call or NULL otherwise.
3528 SRC is the source of a copy call or NULL otherwise.
3529 DSTWRITE is the number of bytes written into the destination obtained
3530 from the user-supplied size argument to the function (such as in
3531 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3532 MAXREAD is the user-supplied bound on the length of the source sequence
3533 (such as in strncat(d, s, N). It specifies the upper limit on the number
3534 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3535 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3536 expression EXP is a string function call (as opposed to a memory call
3537 like memcpy). As an exception, SRCSTR can also be an integer denoting
3538 the precomputed size of the source string or object (for functions like
3539 memcpy).
3540 DSTSIZE is the size of the destination object specified by the last
3541 argument to the _chk builtins, typically resulting from the expansion
3542 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3543 DSTSIZE).
3544
3545 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3546 SIZE_MAX.
3547
3548 ACCESS is true for accesses, false for simple size checks in calls
3549 to functions that neither read from nor write to the region.
3550
3551 When nonnull, PAD points to a more detailed description of the access.
3552
3553 If the call is successfully verified as safe return true, otherwise
3554 return false. */
3555
3556 bool
3557 check_access (tree exp, tree, tree, tree dstwrite,
3558 tree maxread, tree srcstr, tree dstsize,
3559 bool access /* = true */,
3560 const access_data *pad /* = NULL */)
3561 {
3562 int opt = OPT_Wstringop_overflow_;
3563
3564 /* The size of the largest object is half the address space, or
3565 PTRDIFF_MAX. (This is way too permissive.) */
3566 tree maxobjsize = max_object_size ();
3567
3568 /* Either the length of the source string for string functions or
3569 the size of the source object for raw memory functions. */
3570 tree slen = NULL_TREE;
3571
3572 tree range[2] = { NULL_TREE, NULL_TREE };
3573
3574 /* Set to true when the exact number of bytes written by a string
3575 function like strcpy is not known and the only thing that is
3576 known is that it must be at least one (for the terminating nul). */
3577 bool at_least_one = false;
3578 if (srcstr)
3579 {
3580 /* SRCSTR is normally a pointer to string but as a special case
3581 it can be an integer denoting the length of a string. */
3582 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3583 {
3584 /* Try to determine the range of lengths the source string
3585 refers to. If it can be determined and is less than
3586 the upper bound given by MAXREAD add one to it for
3587 the terminating nul. Otherwise, set it to one for
3588 the same reason, or to MAXREAD as appropriate. */
3589 c_strlen_data lendata = { };
3590 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3591 range[0] = lendata.minlen;
3592 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3593 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3594 {
3595 if (maxread && tree_int_cst_le (maxread, range[0]))
3596 range[0] = range[1] = maxread;
3597 else
3598 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3599 range[0], size_one_node);
3600
3601 if (maxread && tree_int_cst_le (maxread, range[1]))
3602 range[1] = maxread;
3603 else if (!integer_all_onesp (range[1]))
3604 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3605 range[1], size_one_node);
3606
3607 slen = range[0];
3608 }
3609 else
3610 {
3611 at_least_one = true;
3612 slen = size_one_node;
3613 }
3614 }
3615 else
3616 slen = srcstr;
3617 }
3618
3619 if (!dstwrite && !maxread)
3620 {
3621 /* When the only available piece of data is the object size
3622 there is nothing to do. */
3623 if (!slen)
3624 return true;
3625
3626 /* Otherwise, when the length of the source sequence is known
3627 (as with strlen), set DSTWRITE to it. */
3628 if (!range[0])
3629 dstwrite = slen;
3630 }
3631
3632 if (!dstsize)
3633 dstsize = maxobjsize;
3634
3635 if (dstwrite)
3636 get_size_range (dstwrite, range);
3637
3638 tree func = get_callee_fndecl (exp);
3639
3640 /* First check the number of bytes to be written against the maximum
3641 object size. */
3642 if (range[0]
3643 && TREE_CODE (range[0]) == INTEGER_CST
3644 && tree_int_cst_lt (maxobjsize, range[0]))
3645 {
3646 if (TREE_NO_WARNING (exp))
3647 return false;
3648
3649 location_t loc = tree_nonartificial_location (exp);
3650 loc = expansion_point_location_if_in_system_header (loc);
3651
3652 bool warned;
3653 if (range[0] == range[1])
3654 warned = (func
3655 ? warning_at (loc, opt,
3656 "%K%qD specified size %E "
3657 "exceeds maximum object size %E",
3658 exp, func, range[0], maxobjsize)
3659 : warning_at (loc, opt,
3660 "%Kspecified size %E "
3661 "exceeds maximum object size %E",
3662 exp, range[0], maxobjsize));
3663 else
3664 warned = (func
3665 ? warning_at (loc, opt,
3666 "%K%qD specified size between %E and %E "
3667 "exceeds maximum object size %E",
3668 exp, func,
3669 range[0], range[1], maxobjsize)
3670 : warning_at (loc, opt,
3671 "%Kspecified size between %E and %E "
3672 "exceeds maximum object size %E",
3673 exp, range[0], range[1], maxobjsize));
3674 if (warned)
3675 TREE_NO_WARNING (exp) = true;
3676
3677 return false;
3678 }
3679
3680 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3681 constant, and in range of unsigned HOST_WIDE_INT. */
3682 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3683
3684 /* Next check the number of bytes to be written against the destination
3685 object size. */
3686 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3687 {
3688 if (range[0]
3689 && TREE_CODE (range[0]) == INTEGER_CST
3690 && ((tree_fits_uhwi_p (dstsize)
3691 && tree_int_cst_lt (dstsize, range[0]))
3692 || (dstwrite
3693 && tree_fits_uhwi_p (dstwrite)
3694 && tree_int_cst_lt (dstwrite, range[0]))))
3695 {
3696 if (TREE_NO_WARNING (exp))
3697 return false;
3698
3699 location_t loc = tree_nonartificial_location (exp);
3700 loc = expansion_point_location_if_in_system_header (loc);
3701
3702 bool warned = false;
3703 if (dstwrite == slen && at_least_one)
3704 {
3705 /* This is a call to strcpy with a destination of 0 size
3706 and a source of unknown length. The call will write
3707 at least one byte past the end of the destination. */
3708 warned = (func
3709 ? warning_at (loc, opt,
3710 "%K%qD writing %E or more bytes into "
3711 "a region of size %E overflows "
3712 "the destination",
3713 exp, func, range[0], dstsize)
3714 : warning_at (loc, opt,
3715 "%Kwriting %E or more bytes into "
3716 "a region of size %E overflows "
3717 "the destination",
3718 exp, range[0], dstsize));
3719 }
3720 else if (tree_int_cst_equal (range[0], range[1]))
3721 warned = (func
3722 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3723 "%K%qD writing %E byte into a region "
3724 "of size %E overflows the destination",
3725 "%K%qD writing %E bytes into a region "
3726 "of size %E overflows the destination",
3727 exp, func, range[0], dstsize)
3728 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3729 "%Kwriting %E byte into a region "
3730 "of size %E overflows the destination",
3731 "%Kwriting %E bytes into a region "
3732 "of size %E overflows the destination",
3733 exp, range[0], dstsize));
3734 else if (tree_int_cst_sign_bit (range[1]))
3735 {
3736 /* Avoid printing the upper bound if it's invalid. */
3737 warned = (func
3738 ? warning_at (loc, opt,
3739 "%K%qD writing %E or more bytes into "
3740 "a region of size %E overflows "
3741 "the destination",
3742 exp, func, range[0], dstsize)
3743 : warning_at (loc, opt,
3744 "%Kwriting %E or more bytes into "
3745 "a region of size %E overflows "
3746 "the destination",
3747 exp, range[0], dstsize));
3748 }
3749 else
3750 warned = (func
3751 ? warning_at (loc, opt,
3752 "%K%qD writing between %E and %E bytes "
3753 "into a region of size %E overflows "
3754 "the destination",
3755 exp, func, range[0], range[1],
3756 dstsize)
3757 : warning_at (loc, opt,
3758 "%Kwriting between %E and %E bytes "
3759 "into a region of size %E overflows "
3760 "the destination",
3761 exp, range[0], range[1],
3762 dstsize));
3763 if (warned)
3764 {
3765 TREE_NO_WARNING (exp) = true;
3766 if (pad)
3767 inform_access (pad->dst, true);
3768 }
3769
3770 /* Return error when an overflow has been detected. */
3771 return false;
3772 }
3773 }
3774
3775 /* Check the maximum length of the source sequence against the size
3776 of the destination object if known, or against the maximum size
3777 of an object. */
3778 if (maxread)
3779 {
3780 get_size_range (maxread, range);
3781 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3782 {
3783 location_t loc = tree_nonartificial_location (exp);
3784 loc = expansion_point_location_if_in_system_header (loc);
3785
3786 if (tree_int_cst_lt (maxobjsize, range[0]))
3787 {
3788 if (TREE_NO_WARNING (exp))
3789 return false;
3790
3791 bool warned = false;
3792
3793 /* Warn about crazy big sizes first since that's more
3794 likely to be meaningful than saying that the bound
3795 is greater than the object size if both are big. */
3796 if (range[0] == range[1])
3797 warned = (func
3798 ? warning_at (loc, opt,
3799 "%K%qD specified bound %E "
3800 "exceeds maximum object size %E",
3801 exp, func, range[0], maxobjsize)
3802 : warning_at (loc, opt,
3803 "%Kspecified bound %E "
3804 "exceeds maximum object size %E",
3805 exp, range[0], maxobjsize));
3806 else
3807 warned = (func
3808 ? warning_at (loc, opt,
3809 "%K%qD specified bound between "
3810 "%E and %E exceeds maximum object "
3811 "size %E",
3812 exp, func,
3813 range[0], range[1], maxobjsize)
3814 : warning_at (loc, opt,
3815 "%Kspecified bound between "
3816 "%E and %E exceeds maximum object "
3817 "size %E",
3818 exp, range[0], range[1], maxobjsize));
3819 if (warned)
3820 TREE_NO_WARNING (exp) = true;
3821
3822 return false;
3823 }
3824
3825 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3826 {
3827 if (TREE_NO_WARNING (exp))
3828 return false;
3829
3830 bool warned = false;
3831
3832 if (tree_int_cst_equal (range[0], range[1]))
3833 warned = (func
3834 ? warning_at (loc, opt,
3835 "%K%qD specified bound %E "
3836 "exceeds destination size %E",
3837 exp, func,
3838 range[0], dstsize)
3839 : warning_at (loc, opt,
3840 "%Kspecified bound %E "
3841 "exceeds destination size %E",
3842 exp, range[0], dstsize));
3843 else
3844 warned = (func
3845 ? warning_at (loc, opt,
3846 "%K%qD specified bound between %E "
3847 "and %E exceeds destination size %E",
3848 exp, func,
3849 range[0], range[1], dstsize)
3850 : warning_at (loc, opt,
3851 "%Kspecified bound between %E "
3852 "and %E exceeds destination size %E",
3853 exp,
3854 range[0], range[1], dstsize));
3855 if (warned)
3856 TREE_NO_WARNING (exp) = true;
3857
3858 return false;
3859 }
3860 }
3861 }
3862
3863 /* Check for reading past the end of SRC. */
3864 if (slen
3865 && slen == srcstr
3866 && dstwrite && range[0]
3867 && tree_int_cst_lt (slen, range[0]))
3868 {
3869 if (TREE_NO_WARNING (exp))
3870 return false;
3871
3872 location_t loc = tree_nonartificial_location (exp);
3873 loc = expansion_point_location_if_in_system_header (loc);
3874
3875 if (warn_for_access (loc, func, exp, opt, range, slen, access))
3876 {
3877 TREE_NO_WARNING (exp) = true;
3878 if (pad)
3879 inform_access (pad->src, false);
3880 }
3881 return false;
3882 }
3883
3884 return true;
3885 }
3886
3887 /* If STMT is a call to an allocation function, returns the constant
3888 size of the object allocated by the call represented as sizetype.
3889 If nonnull, sets RNG1[] to the range of the size. */
3890
3891 tree
3892 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
3893 const vr_values *rvals /* = NULL */)
3894 {
3895 if (!stmt)
3896 return NULL_TREE;
3897
3898 tree allocfntype;
3899 if (tree fndecl = gimple_call_fndecl (stmt))
3900 allocfntype = TREE_TYPE (fndecl);
3901 else
3902 allocfntype = gimple_call_fntype (stmt);
3903
3904 if (!allocfntype)
3905 return NULL_TREE;
3906
3907 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
3908 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
3909 if (!at)
3910 {
3911 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
3912 return NULL_TREE;
3913
3914 argidx1 = 0;
3915 }
3916
3917 unsigned nargs = gimple_call_num_args (stmt);
3918
3919 if (argidx1 == UINT_MAX)
3920 {
3921 tree atval = TREE_VALUE (at);
3922 if (!atval)
3923 return NULL_TREE;
3924
3925 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3926 if (nargs <= argidx1)
3927 return NULL_TREE;
3928
3929 atval = TREE_CHAIN (atval);
3930 if (atval)
3931 {
3932 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3933 if (nargs <= argidx2)
3934 return NULL_TREE;
3935 }
3936 }
3937
3938 tree size = gimple_call_arg (stmt, argidx1);
3939
3940 wide_int rng1_buf[2];
3941 /* If RNG1 is not set, use the buffer. */
3942 if (!rng1)
3943 rng1 = rng1_buf;
3944
3945 if (!get_range (size, rng1, rvals))
3946 return NULL_TREE;
3947
3948 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
3949 return fold_convert (sizetype, size);
3950
3951 /* To handle ranges do the math in wide_int and return the product
3952 of the upper bounds as a constant. Ignore anti-ranges. */
3953 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
3954 wide_int rng2[2];
3955 if (!get_range (n, rng2, rvals))
3956 return NULL_TREE;
3957
3958 /* Extend to the maximum precision to avoid overflow. */
3959 const int prec = ADDR_MAX_PRECISION;
3960 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
3961 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
3962 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
3963 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
3964
3965 /* Compute products of both bounds for the caller but return the lesser
3966 of SIZE_MAX and the product of the upper bounds as a constant. */
3967 rng1[0] = rng1[0] * rng2[0];
3968 rng1[1] = rng1[1] * rng2[1];
3969 tree size_max = TYPE_MAX_VALUE (sizetype);
3970 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
3971 {
3972 rng1[1] = wi::to_wide (size_max);
3973 return size_max;
3974 }
3975
3976 return wide_int_to_tree (sizetype, rng1[1]);
3977 }
3978
3979 /* Wrapper around the wide_int overload of get_range. Returns the same
3980 result but accepts offset_int instead. */
3981
3982 static bool
3983 get_range (tree x, signop sgn, offset_int r[2],
3984 const vr_values *rvals /* = NULL */)
3985 {
3986 wide_int wr[2];
3987 if (!get_range (x, wr, rvals))
3988 return false;
3989
3990 r[0] = offset_int::from (wr[0], sgn);
3991 r[1] = offset_int::from (wr[1], sgn);
3992 return true;
3993 }
3994
3995 /* Helper to compute the size of the object referenced by the PTR
3996 expression which must have pointer type, using Object Size type
3997 OSTYPE (only the least significant 2 bits are used).
3998 On success, sets PREF->REF to the DECL of the referenced object
3999 if it's unique, otherwise to null, PREF->OFFRNG to the range of
4000 offsets into it, and PREF->SIZRNG to the range of sizes of
4001 the object(s).
4002 VISITED is used to avoid visiting the same PHI operand multiple
4003 times, and, when nonnull, RVALS to determine range information.
4004 Returns true on success, false when the size cannot be determined.
4005
4006 The function is intended for diagnostics and should not be used
4007 to influence code generation or optimization. */
4008
4009 static bool
4010 compute_objsize (tree ptr, int ostype, access_ref *pref,
4011 bitmap *visited, const vr_values *rvals /* = NULL */)
4012 {
4013 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
4014 if (addr)
4015 ptr = TREE_OPERAND (ptr, 0);
4016
4017 if (DECL_P (ptr))
4018 {
4019 /* Bail if the reference is to the pointer itself (as opposed
4020 to what it points to). */
4021 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
4022 return false;
4023
4024 tree size = decl_init_size (ptr, false);
4025 if (!size || TREE_CODE (size) != INTEGER_CST)
4026 return false;
4027
4028 pref->ref = ptr;
4029 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4030 return true;
4031 }
4032
4033 const tree_code code = TREE_CODE (ptr);
4034
4035 if (code == COMPONENT_REF)
4036 {
4037 tree field = TREE_OPERAND (ptr, 1);
4038
4039 if (ostype == 0)
4040 {
4041 /* For raw memory functions like memcpy bail if the size
4042 of the enclosing object cannot be determined. */
4043 tree ref = TREE_OPERAND (ptr, 0);
4044 if (!compute_objsize (ref, ostype, pref, visited, rvals)
4045 || !pref->ref)
4046 return false;
4047
4048 /* Otherwise, use the size of the enclosing object and add
4049 the offset of the member to the offset computed so far. */
4050 tree offset = byte_position (field);
4051 if (TREE_CODE (offset) != INTEGER_CST)
4052 return false;
4053 offset_int off = wi::to_offset (offset);
4054 pref->offrng[0] += off;
4055 pref->offrng[1] += off;
4056 return true;
4057 }
4058
4059 /* Bail if the reference is to the pointer itself (as opposed
4060 to what it points to). */
4061 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
4062 return false;
4063
4064 pref->ref = field;
4065 /* Only return constant sizes for now while callers depend
4066 on it. INT0LEN is true for interior zero-length arrays. */
4067 bool int0len = false;
4068 tree size = component_ref_size (ptr, &int0len);
4069 if (int0len)
4070 {
4071 pref->sizrng[0] = pref->sizrng[1] = 0;
4072 return true;
4073 }
4074
4075 if (!size || TREE_CODE (size) != INTEGER_CST)
4076 return false;
4077
4078 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4079 return true;
4080 }
4081
4082 if (code == ARRAY_REF || code == MEM_REF)
4083 {
4084 tree ref = TREE_OPERAND (ptr, 0);
4085 tree reftype = TREE_TYPE (ref);
4086 if (code == ARRAY_REF
4087 && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
4088 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
4089 of known bound. */
4090 return false;
4091
4092 if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
4093 {
4094 /* Give up for MEM_REFs of vector types; those may be synthesized
4095 from multiple assignments to consecutive data members. See PR
4096 93200.
4097 FIXME: Deal with this more generally, e.g., by marking up such
4098 MEM_REFs at the time they're created. */
4099 reftype = TREE_TYPE (reftype);
4100 if (TREE_CODE (reftype) == VECTOR_TYPE)
4101 return false;
4102 }
4103
4104 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4105 return false;
4106
4107 offset_int orng[2];
4108 tree off = TREE_OPERAND (ptr, 1);
4109 if (!get_range (off, SIGNED, orng, rvals))
4110 /* Fail unless the size of the object is zero. */
4111 return pref->sizrng[0] == 0 && pref->sizrng[0] == pref->sizrng[1];
4112
4113 if (TREE_CODE (ptr) == ARRAY_REF)
4114 {
4115 /* Convert the array index range determined above to a byte
4116 offset. */
4117 tree lowbnd = array_ref_low_bound (ptr);
4118 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4119 {
4120 /* Adjust the index by the low bound of the array domain
4121 (normally zero but 1 in Fortran). */
4122 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4123 orng[0] -= lb;
4124 orng[1] -= lb;
4125 }
4126
4127 tree eltype = TREE_TYPE (ptr);
4128 tree tpsize = TYPE_SIZE_UNIT (eltype);
4129 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
4130 return false;
4131
4132 offset_int sz = wi::to_offset (tpsize);
4133 orng[0] *= sz;
4134 orng[1] *= sz;
4135
4136 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
4137 {
4138 /* Execpt for the permissive raw memory functions which
4139 use the size of the whole object determined above,
4140 use the size of the referenced array. */
4141 pref->sizrng[0] = pref->offrng[0] + orng[0] + sz;
4142 pref->sizrng[1] = pref->offrng[1] + orng[1] + sz;
4143 }
4144 }
4145
4146 pref->offrng[0] += orng[0];
4147 pref->offrng[1] += orng[1];
4148
4149 return true;
4150 }
4151
4152 if (TREE_CODE (ptr) == SSA_NAME)
4153 {
4154 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
4155 if (is_gimple_call (stmt))
4156 {
4157 /* If STMT is a call to an allocation function get the size
4158 from its argument(s). If successful, also set *PDECL to
4159 PTR for the caller to include in diagnostics. */
4160 wide_int wr[2];
4161 if (gimple_call_alloc_size (stmt, wr, rvals))
4162 {
4163 pref->ref = ptr;
4164 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
4165 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
4166 return true;
4167 }
4168 return false;
4169 }
4170
4171 /* TODO: Handle PHI. */
4172
4173 if (!is_gimple_assign (stmt))
4174 return false;
4175
4176 ptr = gimple_assign_rhs1 (stmt);
4177
4178 tree_code code = gimple_assign_rhs_code (stmt);
4179 if (TREE_CODE (TREE_TYPE (ptr)) != POINTER_TYPE)
4180 /* Avoid conversions from non-pointers. */
4181 return false;
4182
4183 if (code == POINTER_PLUS_EXPR)
4184 {
4185 /* If the the offset in the expression can be determined use
4186 it to adjust the overall offset. Otherwise, set the overall
4187 offset to the maximum. */
4188 offset_int orng[2];
4189 tree off = gimple_assign_rhs2 (stmt);
4190 if (!get_range (off, SIGNED, orng, rvals)
4191 || !wi::les_p (orng[0], orng[1]))
4192 {
4193 orng[0] = wi::to_offset (TYPE_MIN_VALUE (ptrdiff_type_node));
4194 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
4195 }
4196
4197 pref->offrng[0] += orng[0];
4198 pref->offrng[1] += orng[1];
4199 }
4200 else if (code != ADDR_EXPR)
4201 return false;
4202
4203 return compute_objsize (ptr, ostype, pref, visited, rvals);
4204 }
4205
4206 tree type = TREE_TYPE (ptr);
4207 type = TYPE_MAIN_VARIANT (type);
4208 if (TREE_CODE (ptr) == ADDR_EXPR)
4209 ptr = TREE_OPERAND (ptr, 0);
4210
4211 if (TREE_CODE (type) == ARRAY_TYPE
4212 && !array_at_struct_end_p (ptr))
4213 {
4214 if (tree size = TYPE_SIZE_UNIT (type))
4215 return get_range (size, UNSIGNED, pref->sizrng, rvals);
4216 }
4217
4218 return false;
4219 }
4220
4221 /* Convenience wrapper around the above. */
4222
4223 static tree
4224 compute_objsize (tree ptr, int ostype, access_ref *pref,
4225 const vr_values *rvals = NULL)
4226 {
4227 bitmap visited = NULL;
4228
4229 bool success
4230 = compute_objsize (ptr, ostype, pref, &visited, rvals);
4231
4232 if (visited)
4233 BITMAP_FREE (visited);
4234
4235 if (!success)
4236 return NULL_TREE;
4237
4238 if (pref->offrng[0] < 0)
4239 {
4240 if (pref->offrng[1] < 0)
4241 return size_zero_node;
4242
4243 pref->offrng[0] = 0;
4244 }
4245
4246 if (pref->sizrng[1] < pref->offrng[0])
4247 return size_zero_node;
4248
4249 return wide_int_to_tree (sizetype, pref->sizrng[1] - pref->offrng[0]);
4250 }
4251
4252 /* Transitional wrapper around the above. The function should be removed
4253 once callers transition to one of the two above. */
4254
4255 tree
4256 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
4257 tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
4258 {
4259 /* Set the initial offsets to zero and size to negative to indicate
4260 none has been computed yet. */
4261 access_ref ref;
4262 tree size = compute_objsize (ptr, ostype, &ref, rvals);
4263 if (!size)
4264 return NULL_TREE;
4265
4266 if (pdecl)
4267 *pdecl = ref.ref;
4268
4269 if (poff)
4270 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
4271
4272 return size;
4273 }
4274
4275 /* Helper to determine and check the sizes of the source and the destination
4276 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4277 call expression, DEST is the destination argument, SRC is the source
4278 argument or null, and LEN is the number of bytes. Use Object Size type-0
4279 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
4280 (no overflow or invalid sizes), false otherwise. */
4281
4282 static bool
4283 check_memop_access (tree exp, tree dest, tree src, tree size)
4284 {
4285 /* For functions like memset and memcpy that operate on raw memory
4286 try to determine the size of the largest source and destination
4287 object using type-0 Object Size regardless of the object size
4288 type specified by the option. */
4289 access_data data;
4290 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
4291 tree dstsize = compute_objsize (dest, 0, &data.dst);
4292
4293 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
4294 srcsize, dstsize, true, &data);
4295 }
4296
4297 /* Validate memchr arguments without performing any expansion.
4298 Return NULL_RTX. */
4299
4300 static rtx
4301 expand_builtin_memchr (tree exp, rtx)
4302 {
4303 if (!validate_arglist (exp,
4304 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4305 return NULL_RTX;
4306
4307 tree arg1 = CALL_EXPR_ARG (exp, 0);
4308 tree len = CALL_EXPR_ARG (exp, 2);
4309
4310 /* Diagnose calls where the specified length exceeds the size
4311 of the object. */
4312 if (warn_stringop_overflow)
4313 {
4314 access_data data;
4315 tree size = compute_objsize (arg1, 0, &data.src);
4316 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4317 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE,
4318 true, &data);
4319 }
4320
4321 return NULL_RTX;
4322 }
4323
4324 /* Expand a call EXP to the memcpy builtin.
4325 Return NULL_RTX if we failed, the caller should emit a normal call,
4326 otherwise try to get the result in TARGET, if convenient (and in
4327 mode MODE if that's convenient). */
4328
4329 static rtx
4330 expand_builtin_memcpy (tree exp, rtx target)
4331 {
4332 if (!validate_arglist (exp,
4333 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4334 return NULL_RTX;
4335
4336 tree dest = CALL_EXPR_ARG (exp, 0);
4337 tree src = CALL_EXPR_ARG (exp, 1);
4338 tree len = CALL_EXPR_ARG (exp, 2);
4339
4340 check_memop_access (exp, dest, src, len);
4341
4342 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4343 /*retmode=*/ RETURN_BEGIN, false);
4344 }
4345
4346 /* Check a call EXP to the memmove built-in for validity.
4347 Return NULL_RTX on both success and failure. */
4348
4349 static rtx
4350 expand_builtin_memmove (tree exp, rtx target)
4351 {
4352 if (!validate_arglist (exp,
4353 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4354 return NULL_RTX;
4355
4356 tree dest = CALL_EXPR_ARG (exp, 0);
4357 tree src = CALL_EXPR_ARG (exp, 1);
4358 tree len = CALL_EXPR_ARG (exp, 2);
4359
4360 check_memop_access (exp, dest, src, len);
4361
4362 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4363 /*retmode=*/ RETURN_BEGIN, true);
4364 }
4365
4366 /* Expand a call EXP to the mempcpy builtin.
4367 Return NULL_RTX if we failed; the caller should emit a normal call,
4368 otherwise try to get the result in TARGET, if convenient (and in
4369 mode MODE if that's convenient). */
4370
4371 static rtx
4372 expand_builtin_mempcpy (tree exp, rtx target)
4373 {
4374 if (!validate_arglist (exp,
4375 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4376 return NULL_RTX;
4377
4378 tree dest = CALL_EXPR_ARG (exp, 0);
4379 tree src = CALL_EXPR_ARG (exp, 1);
4380 tree len = CALL_EXPR_ARG (exp, 2);
4381
4382 /* Policy does not generally allow using compute_objsize (which
4383 is used internally by check_memop_size) to change code generation
4384 or drive optimization decisions.
4385
4386 In this instance it is safe because the code we generate has
4387 the same semantics regardless of the return value of
4388 check_memop_sizes. Exactly the same amount of data is copied
4389 and the return value is exactly the same in both cases.
4390
4391 Furthermore, check_memop_size always uses mode 0 for the call to
4392 compute_objsize, so the imprecise nature of compute_objsize is
4393 avoided. */
4394
4395 /* Avoid expanding mempcpy into memcpy when the call is determined
4396 to overflow the buffer. This also prevents the same overflow
4397 from being diagnosed again when expanding memcpy. */
4398 if (!check_memop_access (exp, dest, src, len))
4399 return NULL_RTX;
4400
4401 return expand_builtin_mempcpy_args (dest, src, len,
4402 target, exp, /*retmode=*/ RETURN_END);
4403 }
4404
4405 /* Helper function to do the actual work for expand of memory copy family
4406 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
4407 of memory from SRC to DEST and assign to TARGET if convenient. Return
4408 value is based on RETMODE argument. */
4409
4410 static rtx
4411 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
4412 rtx target, tree exp, memop_ret retmode,
4413 bool might_overlap)
4414 {
4415 unsigned int src_align = get_pointer_alignment (src);
4416 unsigned int dest_align = get_pointer_alignment (dest);
4417 rtx dest_mem, src_mem, dest_addr, len_rtx;
4418 HOST_WIDE_INT expected_size = -1;
4419 unsigned int expected_align = 0;
4420 unsigned HOST_WIDE_INT min_size;
4421 unsigned HOST_WIDE_INT max_size;
4422 unsigned HOST_WIDE_INT probable_max_size;
4423
4424 bool is_move_done;
4425
4426 /* If DEST is not a pointer type, call the normal function. */
4427 if (dest_align == 0)
4428 return NULL_RTX;
4429
4430 /* If either SRC is not a pointer type, don't do this
4431 operation in-line. */
4432 if (src_align == 0)
4433 return NULL_RTX;
4434
4435 if (currently_expanding_gimple_stmt)
4436 stringop_block_profile (currently_expanding_gimple_stmt,
4437 &expected_align, &expected_size);
4438
4439 if (expected_align < dest_align)
4440 expected_align = dest_align;
4441 dest_mem = get_memory_rtx (dest, len);
4442 set_mem_align (dest_mem, dest_align);
4443 len_rtx = expand_normal (len);
4444 determine_block_size (len, len_rtx, &min_size, &max_size,
4445 &probable_max_size);
4446
4447 /* Try to get the byte representation of the constant SRC points to,
4448 with its byte size in NBYTES. */
4449 unsigned HOST_WIDE_INT nbytes;
4450 const char *rep = c_getstr (src, &nbytes);
4451
4452 /* If the function's constant bound LEN_RTX is less than or equal
4453 to the byte size of the representation of the constant argument,
4454 and if block move would be done by pieces, we can avoid loading
4455 the bytes from memory and only store the computed constant.
4456 This works in the overlap (memmove) case as well because
4457 store_by_pieces just generates a series of stores of constants
4458 from the representation returned by c_getstr(). */
4459 if (rep
4460 && CONST_INT_P (len_rtx)
4461 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
4462 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4463 CONST_CAST (char *, rep),
4464 dest_align, false))
4465 {
4466 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4467 builtin_memcpy_read_str,
4468 CONST_CAST (char *, rep),
4469 dest_align, false, retmode);
4470 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4471 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4472 return dest_mem;
4473 }
4474
4475 src_mem = get_memory_rtx (src, len);
4476 set_mem_align (src_mem, src_align);
4477
4478 /* Copy word part most expediently. */
4479 enum block_op_methods method = BLOCK_OP_NORMAL;
4480 if (CALL_EXPR_TAILCALL (exp)
4481 && (retmode == RETURN_BEGIN || target == const0_rtx))
4482 method = BLOCK_OP_TAILCALL;
4483 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4484 && retmode == RETURN_END
4485 && !might_overlap
4486 && target != const0_rtx);
4487 if (use_mempcpy_call)
4488 method = BLOCK_OP_NO_LIBCALL_RET;
4489 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
4490 expected_align, expected_size,
4491 min_size, max_size, probable_max_size,
4492 use_mempcpy_call, &is_move_done,
4493 might_overlap);
4494
4495 /* Bail out when a mempcpy call would be expanded as libcall and when
4496 we have a target that provides a fast implementation
4497 of mempcpy routine. */
4498 if (!is_move_done)
4499 return NULL_RTX;
4500
4501 if (dest_addr == pc_rtx)
4502 return NULL_RTX;
4503
4504 if (dest_addr == 0)
4505 {
4506 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4507 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4508 }
4509
4510 if (retmode != RETURN_BEGIN && target != const0_rtx)
4511 {
4512 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4513 /* stpcpy pointer to last byte. */
4514 if (retmode == RETURN_END_MINUS_ONE)
4515 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
4516 }
4517
4518 return dest_addr;
4519 }
4520
4521 static rtx
4522 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
4523 rtx target, tree orig_exp, memop_ret retmode)
4524 {
4525 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
4526 retmode, false);
4527 }
4528
4529 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
4530 we failed, the caller should emit a normal call, otherwise try to
4531 get the result in TARGET, if convenient.
4532 Return value is based on RETMODE argument. */
4533
4534 static rtx
4535 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
4536 {
4537 class expand_operand ops[3];
4538 rtx dest_mem;
4539 rtx src_mem;
4540
4541 if (!targetm.have_movstr ())
4542 return NULL_RTX;
4543
4544 dest_mem = get_memory_rtx (dest, NULL);
4545 src_mem = get_memory_rtx (src, NULL);
4546 if (retmode == RETURN_BEGIN)
4547 {
4548 target = force_reg (Pmode, XEXP (dest_mem, 0));
4549 dest_mem = replace_equiv_address (dest_mem, target);
4550 }
4551
4552 create_output_operand (&ops[0],
4553 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4554 create_fixed_operand (&ops[1], dest_mem);
4555 create_fixed_operand (&ops[2], src_mem);
4556 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4557 return NULL_RTX;
4558
4559 if (retmode != RETURN_BEGIN && target != const0_rtx)
4560 {
4561 target = ops[0].value;
4562 /* movstr is supposed to set end to the address of the NUL
4563 terminator. If the caller requested a mempcpy-like return value,
4564 adjust it. */
4565 if (retmode == RETURN_END)
4566 {
4567 rtx tem = plus_constant (GET_MODE (target),
4568 gen_lowpart (GET_MODE (target), target), 1);
4569 emit_move_insn (target, force_operand (tem, NULL_RTX));
4570 }
4571 }
4572 return target;
4573 }
4574
4575 /* Do some very basic size validation of a call to the strcpy builtin
4576 given by EXP. Return NULL_RTX to have the built-in expand to a call
4577 to the library function. */
4578
4579 static rtx
4580 expand_builtin_strcat (tree exp)
4581 {
4582 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4583 || !warn_stringop_overflow)
4584 return NULL_RTX;
4585
4586 tree dest = CALL_EXPR_ARG (exp, 0);
4587 tree src = CALL_EXPR_ARG (exp, 1);
4588
4589 /* Detect unterminated source (only). */
4590 if (!check_nul_terminated_array (exp, src))
4591 return NULL_RTX;
4592
4593 /* There is no way here to determine the length of the string in
4594 the destination to which the SRC string is being appended so
4595 just diagnose cases when the souce string is longer than
4596 the destination object. */
4597
4598 access_data data;
4599 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4600
4601 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4602 destsize, true, &data);
4603
4604 return NULL_RTX;
4605 }
4606
4607 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4608 NULL_RTX if we failed the caller should emit a normal call, otherwise
4609 try to get the result in TARGET, if convenient (and in mode MODE if that's
4610 convenient). */
4611
4612 static rtx
4613 expand_builtin_strcpy (tree exp, rtx target)
4614 {
4615 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4616 return NULL_RTX;
4617
4618 tree dest = CALL_EXPR_ARG (exp, 0);
4619 tree src = CALL_EXPR_ARG (exp, 1);
4620
4621 if (warn_stringop_overflow)
4622 {
4623 access_data data;
4624 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1,
4625 &data.dst);
4626 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4627 src, destsize, true, &data);
4628 }
4629
4630 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4631 {
4632 /* Check to see if the argument was declared attribute nonstring
4633 and if so, issue a warning since at this point it's not known
4634 to be nul-terminated. */
4635 tree fndecl = get_callee_fndecl (exp);
4636 maybe_warn_nonstring_arg (fndecl, exp);
4637 return ret;
4638 }
4639
4640 return NULL_RTX;
4641 }
4642
4643 /* Helper function to do the actual work for expand_builtin_strcpy. The
4644 arguments to the builtin_strcpy call DEST and SRC are broken out
4645 so that this can also be called without constructing an actual CALL_EXPR.
4646 The other arguments and return value are the same as for
4647 expand_builtin_strcpy. */
4648
4649 static rtx
4650 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4651 {
4652 /* Detect strcpy calls with unterminated arrays.. */
4653 if (tree nonstr = unterminated_array (src))
4654 {
4655 /* NONSTR refers to the non-nul terminated constant array. */
4656 if (!TREE_NO_WARNING (exp))
4657 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4658 return NULL_RTX;
4659 }
4660
4661 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4662 }
4663
4664 /* Expand a call EXP to the stpcpy builtin.
4665 Return NULL_RTX if we failed the caller should emit a normal call,
4666 otherwise try to get the result in TARGET, if convenient (and in
4667 mode MODE if that's convenient). */
4668
4669 static rtx
4670 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4671 {
4672 tree dst, src;
4673 location_t loc = EXPR_LOCATION (exp);
4674
4675 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4676 return NULL_RTX;
4677
4678 dst = CALL_EXPR_ARG (exp, 0);
4679 src = CALL_EXPR_ARG (exp, 1);
4680
4681 if (warn_stringop_overflow)
4682 {
4683 access_data data;
4684 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
4685 &data.dst);
4686 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4687 src, destsize, true, &data);
4688 }
4689
4690 /* If return value is ignored, transform stpcpy into strcpy. */
4691 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4692 {
4693 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4694 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4695 return expand_expr (result, target, mode, EXPAND_NORMAL);
4696 }
4697 else
4698 {
4699 tree len, lenp1;
4700 rtx ret;
4701
4702 /* Ensure we get an actual string whose length can be evaluated at
4703 compile-time, not an expression containing a string. This is
4704 because the latter will potentially produce pessimized code
4705 when used to produce the return value. */
4706 c_strlen_data lendata = { };
4707 if (!c_getstr (src, NULL)
4708 || !(len = c_strlen (src, 0, &lendata, 1)))
4709 return expand_movstr (dst, src, target,
4710 /*retmode=*/ RETURN_END_MINUS_ONE);
4711
4712 if (lendata.decl && !TREE_NO_WARNING (exp))
4713 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4714
4715 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4716 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4717 target, exp,
4718 /*retmode=*/ RETURN_END_MINUS_ONE);
4719
4720 if (ret)
4721 return ret;
4722
4723 if (TREE_CODE (len) == INTEGER_CST)
4724 {
4725 rtx len_rtx = expand_normal (len);
4726
4727 if (CONST_INT_P (len_rtx))
4728 {
4729 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4730
4731 if (ret)
4732 {
4733 if (! target)
4734 {
4735 if (mode != VOIDmode)
4736 target = gen_reg_rtx (mode);
4737 else
4738 target = gen_reg_rtx (GET_MODE (ret));
4739 }
4740 if (GET_MODE (target) != GET_MODE (ret))
4741 ret = gen_lowpart (GET_MODE (target), ret);
4742
4743 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4744 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4745 gcc_assert (ret);
4746
4747 return target;
4748 }
4749 }
4750 }
4751
4752 return expand_movstr (dst, src, target,
4753 /*retmode=*/ RETURN_END_MINUS_ONE);
4754 }
4755 }
4756
4757 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4758 arguments while being careful to avoid duplicate warnings (which could
4759 be issued if the expander were to expand the call, resulting in it
4760 being emitted in expand_call(). */
4761
4762 static rtx
4763 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4764 {
4765 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4766 {
4767 /* The call has been successfully expanded. Check for nonstring
4768 arguments and issue warnings as appropriate. */
4769 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4770 return ret;
4771 }
4772
4773 return NULL_RTX;
4774 }
4775
4776 /* Check a call EXP to the stpncpy built-in for validity.
4777 Return NULL_RTX on both success and failure. */
4778
4779 static rtx
4780 expand_builtin_stpncpy (tree exp, rtx)
4781 {
4782 if (!validate_arglist (exp,
4783 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4784 || !warn_stringop_overflow)
4785 return NULL_RTX;
4786
4787 /* The source and destination of the call. */
4788 tree dest = CALL_EXPR_ARG (exp, 0);
4789 tree src = CALL_EXPR_ARG (exp, 1);
4790
4791 /* The exact number of bytes to write (not the maximum). */
4792 tree len = CALL_EXPR_ARG (exp, 2);
4793 if (!check_nul_terminated_array (exp, src, len))
4794 return NULL_RTX;
4795
4796 access_data data;
4797 /* The size of the destination object. */
4798 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4799
4800 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize,
4801 true, &data);
4802
4803 return NULL_RTX;
4804 }
4805
4806 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4807 bytes from constant string DATA + OFFSET and return it as target
4808 constant. */
4809
4810 rtx
4811 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4812 scalar_int_mode mode)
4813 {
4814 const char *str = (const char *) data;
4815
4816 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4817 return const0_rtx;
4818
4819 return c_readstr (str + offset, mode);
4820 }
4821
4822 /* Helper to check the sizes of sequences and the destination of calls
4823 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4824 success (no overflow or invalid sizes), false otherwise. */
4825
4826 static bool
4827 check_strncat_sizes (tree exp, tree objsize)
4828 {
4829 tree dest = CALL_EXPR_ARG (exp, 0);
4830 tree src = CALL_EXPR_ARG (exp, 1);
4831 tree maxread = CALL_EXPR_ARG (exp, 2);
4832
4833 /* Try to determine the range of lengths that the source expression
4834 refers to. */
4835 c_strlen_data lendata = { };
4836 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4837
4838 /* Try to verify that the destination is big enough for the shortest
4839 string. */
4840
4841 access_data data;
4842 if (!objsize && warn_stringop_overflow)
4843 {
4844 /* If it hasn't been provided by __strncat_chk, try to determine
4845 the size of the destination object into which the source is
4846 being copied. */
4847 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4848 }
4849
4850 /* Add one for the terminating nul. */
4851 tree srclen = (lendata.minlen
4852 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4853 size_one_node)
4854 : NULL_TREE);
4855
4856 /* The strncat function copies at most MAXREAD bytes and always appends
4857 the terminating nul so the specified upper bound should never be equal
4858 to (or greater than) the size of the destination. */
4859 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4860 && tree_int_cst_equal (objsize, maxread))
4861 {
4862 location_t loc = tree_nonartificial_location (exp);
4863 loc = expansion_point_location_if_in_system_header (loc);
4864
4865 warning_at (loc, OPT_Wstringop_overflow_,
4866 "%K%qD specified bound %E equals destination size",
4867 exp, get_callee_fndecl (exp), maxread);
4868
4869 return false;
4870 }
4871
4872 if (!srclen
4873 || (maxread && tree_fits_uhwi_p (maxread)
4874 && tree_fits_uhwi_p (srclen)
4875 && tree_int_cst_lt (maxread, srclen)))
4876 srclen = maxread;
4877
4878 /* The number of bytes to write is LEN but check_access will alsoa
4879 check SRCLEN if LEN's value isn't known. */
4880 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4881 objsize, true, &data);
4882 }
4883
4884 /* Similar to expand_builtin_strcat, do some very basic size validation
4885 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4886 the built-in expand to a call to the library function. */
4887
4888 static rtx
4889 expand_builtin_strncat (tree exp, rtx)
4890 {
4891 if (!validate_arglist (exp,
4892 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4893 || !warn_stringop_overflow)
4894 return NULL_RTX;
4895
4896 tree dest = CALL_EXPR_ARG (exp, 0);
4897 tree src = CALL_EXPR_ARG (exp, 1);
4898 /* The upper bound on the number of bytes to write. */
4899 tree maxread = CALL_EXPR_ARG (exp, 2);
4900
4901 /* Detect unterminated source (only). */
4902 if (!check_nul_terminated_array (exp, src, maxread))
4903 return NULL_RTX;
4904
4905 /* The length of the source sequence. */
4906 tree slen = c_strlen (src, 1);
4907
4908 /* Try to determine the range of lengths that the source expression
4909 refers to. Since the lengths are only used for warning and not
4910 for code generation disable strict mode below. */
4911 tree maxlen = slen;
4912 if (!maxlen)
4913 {
4914 c_strlen_data lendata = { };
4915 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4916 maxlen = lendata.maxbound;
4917 }
4918
4919 access_data data;
4920 /* Try to verify that the destination is big enough for the shortest
4921 string. First try to determine the size of the destination object
4922 into which the source is being copied. */
4923 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4924
4925 /* Add one for the terminating nul. */
4926 tree srclen = (maxlen
4927 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4928 size_one_node)
4929 : NULL_TREE);
4930
4931 /* The strncat function copies at most MAXREAD bytes and always appends
4932 the terminating nul so the specified upper bound should never be equal
4933 to (or greater than) the size of the destination. */
4934 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4935 && tree_int_cst_equal (destsize, maxread))
4936 {
4937 location_t loc = tree_nonartificial_location (exp);
4938 loc = expansion_point_location_if_in_system_header (loc);
4939
4940 warning_at (loc, OPT_Wstringop_overflow_,
4941 "%K%qD specified bound %E equals destination size",
4942 exp, get_callee_fndecl (exp), maxread);
4943
4944 return NULL_RTX;
4945 }
4946
4947 if (!srclen
4948 || (maxread && tree_fits_uhwi_p (maxread)
4949 && tree_fits_uhwi_p (srclen)
4950 && tree_int_cst_lt (maxread, srclen)))
4951 srclen = maxread;
4952
4953 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize,
4954 true, &data);
4955
4956 return NULL_RTX;
4957 }
4958
4959 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4960 NULL_RTX if we failed the caller should emit a normal call. */
4961
4962 static rtx
4963 expand_builtin_strncpy (tree exp, rtx target)
4964 {
4965 location_t loc = EXPR_LOCATION (exp);
4966
4967 if (!validate_arglist (exp,
4968 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4969 return NULL_RTX;
4970 tree dest = CALL_EXPR_ARG (exp, 0);
4971 tree src = CALL_EXPR_ARG (exp, 1);
4972 /* The number of bytes to write (not the maximum). */
4973 tree len = CALL_EXPR_ARG (exp, 2);
4974
4975 if (!check_nul_terminated_array (exp, src, len))
4976 return NULL_RTX;
4977
4978 /* The length of the source sequence. */
4979 tree slen = c_strlen (src, 1);
4980
4981 if (warn_stringop_overflow)
4982 {
4983 access_data data;
4984 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1,
4985 &data.dst);
4986
4987 /* The number of bytes to write is LEN but check_access will also
4988 check SLEN if LEN's value isn't known. */
4989 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4990 destsize, true, &data);
4991 }
4992
4993 /* We must be passed a constant len and src parameter. */
4994 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4995 return NULL_RTX;
4996
4997 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4998
4999 /* We're required to pad with trailing zeros if the requested
5000 len is greater than strlen(s2)+1. In that case try to
5001 use store_by_pieces, if it fails, punt. */
5002 if (tree_int_cst_lt (slen, len))
5003 {
5004 unsigned int dest_align = get_pointer_alignment (dest);
5005 const char *p = c_getstr (src);
5006 rtx dest_mem;
5007
5008 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
5009 || !can_store_by_pieces (tree_to_uhwi (len),
5010 builtin_strncpy_read_str,
5011 CONST_CAST (char *, p),
5012 dest_align, false))
5013 return NULL_RTX;
5014
5015 dest_mem = get_memory_rtx (dest, len);
5016 store_by_pieces (dest_mem, tree_to_uhwi (len),
5017 builtin_strncpy_read_str,
5018 CONST_CAST (char *, p), dest_align, false,
5019 RETURN_BEGIN);
5020 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5021 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5022 return dest_mem;
5023 }
5024
5025 return NULL_RTX;
5026 }
5027
5028 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5029 bytes from constant string DATA + OFFSET and return it as target
5030 constant. */
5031
5032 rtx
5033 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5034 scalar_int_mode mode)
5035 {
5036 const char *c = (const char *) data;
5037 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
5038
5039 memset (p, *c, GET_MODE_SIZE (mode));
5040
5041 return c_readstr (p, mode);
5042 }
5043
5044 /* Callback routine for store_by_pieces. Return the RTL of a register
5045 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
5046 char value given in the RTL register data. For example, if mode is
5047 4 bytes wide, return the RTL for 0x01010101*data. */
5048
5049 static rtx
5050 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5051 scalar_int_mode mode)
5052 {
5053 rtx target, coeff;
5054 size_t size;
5055 char *p;
5056
5057 size = GET_MODE_SIZE (mode);
5058 if (size == 1)
5059 return (rtx) data;
5060
5061 p = XALLOCAVEC (char, size);
5062 memset (p, 1, size);
5063 coeff = c_readstr (p, mode);
5064
5065 target = convert_to_mode (mode, (rtx) data, 1);
5066 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
5067 return force_reg (mode, target);
5068 }
5069
5070 /* Expand expression EXP, which is a call to the memset builtin. Return
5071 NULL_RTX if we failed the caller should emit a normal call, otherwise
5072 try to get the result in TARGET, if convenient (and in mode MODE if that's
5073 convenient). */
5074
5075 static rtx
5076 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
5077 {
5078 if (!validate_arglist (exp,
5079 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5080 return NULL_RTX;
5081
5082 tree dest = CALL_EXPR_ARG (exp, 0);
5083 tree val = CALL_EXPR_ARG (exp, 1);
5084 tree len = CALL_EXPR_ARG (exp, 2);
5085
5086 check_memop_access (exp, dest, NULL_TREE, len);
5087
5088 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5089 }
5090
5091 /* Helper function to do the actual work for expand_builtin_memset. The
5092 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
5093 so that this can also be called without constructing an actual CALL_EXPR.
5094 The other arguments and return value are the same as for
5095 expand_builtin_memset. */
5096
5097 static rtx
5098 expand_builtin_memset_args (tree dest, tree val, tree len,
5099 rtx target, machine_mode mode, tree orig_exp)
5100 {
5101 tree fndecl, fn;
5102 enum built_in_function fcode;
5103 machine_mode val_mode;
5104 char c;
5105 unsigned int dest_align;
5106 rtx dest_mem, dest_addr, len_rtx;
5107 HOST_WIDE_INT expected_size = -1;
5108 unsigned int expected_align = 0;
5109 unsigned HOST_WIDE_INT min_size;
5110 unsigned HOST_WIDE_INT max_size;
5111 unsigned HOST_WIDE_INT probable_max_size;
5112
5113 dest_align = get_pointer_alignment (dest);
5114
5115 /* If DEST is not a pointer type, don't do this operation in-line. */
5116 if (dest_align == 0)
5117 return NULL_RTX;
5118
5119 if (currently_expanding_gimple_stmt)
5120 stringop_block_profile (currently_expanding_gimple_stmt,
5121 &expected_align, &expected_size);
5122
5123 if (expected_align < dest_align)
5124 expected_align = dest_align;
5125
5126 /* If the LEN parameter is zero, return DEST. */
5127 if (integer_zerop (len))
5128 {
5129 /* Evaluate and ignore VAL in case it has side-effects. */
5130 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
5131 return expand_expr (dest, target, mode, EXPAND_NORMAL);
5132 }
5133
5134 /* Stabilize the arguments in case we fail. */
5135 dest = builtin_save_expr (dest);
5136 val = builtin_save_expr (val);
5137 len = builtin_save_expr (len);
5138
5139 len_rtx = expand_normal (len);
5140 determine_block_size (len, len_rtx, &min_size, &max_size,
5141 &probable_max_size);
5142 dest_mem = get_memory_rtx (dest, len);
5143 val_mode = TYPE_MODE (unsigned_char_type_node);
5144
5145 if (TREE_CODE (val) != INTEGER_CST)
5146 {
5147 rtx val_rtx;
5148
5149 val_rtx = expand_normal (val);
5150 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
5151
5152 /* Assume that we can memset by pieces if we can store
5153 * the coefficients by pieces (in the required modes).
5154 * We can't pass builtin_memset_gen_str as that emits RTL. */
5155 c = 1;
5156 if (tree_fits_uhwi_p (len)
5157 && can_store_by_pieces (tree_to_uhwi (len),
5158 builtin_memset_read_str, &c, dest_align,
5159 true))
5160 {
5161 val_rtx = force_reg (val_mode, val_rtx);
5162 store_by_pieces (dest_mem, tree_to_uhwi (len),
5163 builtin_memset_gen_str, val_rtx, dest_align,
5164 true, RETURN_BEGIN);
5165 }
5166 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5167 dest_align, expected_align,
5168 expected_size, min_size, max_size,
5169 probable_max_size))
5170 goto do_libcall;
5171
5172 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5173 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5174 return dest_mem;
5175 }
5176
5177 if (target_char_cast (val, &c))
5178 goto do_libcall;
5179
5180 if (c)
5181 {
5182 if (tree_fits_uhwi_p (len)
5183 && can_store_by_pieces (tree_to_uhwi (len),
5184 builtin_memset_read_str, &c, dest_align,
5185 true))
5186 store_by_pieces (dest_mem, tree_to_uhwi (len),
5187 builtin_memset_read_str, &c, dest_align, true,
5188 RETURN_BEGIN);
5189 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5190 gen_int_mode (c, val_mode),
5191 dest_align, expected_align,
5192 expected_size, min_size, max_size,
5193 probable_max_size))
5194 goto do_libcall;
5195
5196 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5197 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5198 return dest_mem;
5199 }
5200
5201 set_mem_align (dest_mem, dest_align);
5202 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5203 CALL_EXPR_TAILCALL (orig_exp)
5204 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5205 expected_align, expected_size,
5206 min_size, max_size,
5207 probable_max_size);
5208
5209 if (dest_addr == 0)
5210 {
5211 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5212 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5213 }
5214
5215 return dest_addr;
5216
5217 do_libcall:
5218 fndecl = get_callee_fndecl (orig_exp);
5219 fcode = DECL_FUNCTION_CODE (fndecl);
5220 if (fcode == BUILT_IN_MEMSET)
5221 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5222 dest, val, len);
5223 else if (fcode == BUILT_IN_BZERO)
5224 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5225 dest, len);
5226 else
5227 gcc_unreachable ();
5228 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5229 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5230 return expand_call (fn, target, target == const0_rtx);
5231 }
5232
5233 /* Expand expression EXP, which is a call to the bzero builtin. Return
5234 NULL_RTX if we failed the caller should emit a normal call. */
5235
5236 static rtx
5237 expand_builtin_bzero (tree exp)
5238 {
5239 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5240 return NULL_RTX;
5241
5242 tree dest = CALL_EXPR_ARG (exp, 0);
5243 tree size = CALL_EXPR_ARG (exp, 1);
5244
5245 check_memop_access (exp, dest, NULL_TREE, size);
5246
5247 /* New argument list transforming bzero(ptr x, int y) to
5248 memset(ptr x, int 0, size_t y). This is done this way
5249 so that if it isn't expanded inline, we fallback to
5250 calling bzero instead of memset. */
5251
5252 location_t loc = EXPR_LOCATION (exp);
5253
5254 return expand_builtin_memset_args (dest, integer_zero_node,
5255 fold_convert_loc (loc,
5256 size_type_node, size),
5257 const0_rtx, VOIDmode, exp);
5258 }
5259
5260 /* Try to expand cmpstr operation ICODE with the given operands.
5261 Return the result rtx on success, otherwise return null. */
5262
5263 static rtx
5264 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5265 HOST_WIDE_INT align)
5266 {
5267 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5268
5269 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5270 target = NULL_RTX;
5271
5272 class expand_operand ops[4];
5273 create_output_operand (&ops[0], target, insn_mode);
5274 create_fixed_operand (&ops[1], arg1_rtx);
5275 create_fixed_operand (&ops[2], arg2_rtx);
5276 create_integer_operand (&ops[3], align);
5277 if (maybe_expand_insn (icode, 4, ops))
5278 return ops[0].value;
5279 return NULL_RTX;
5280 }
5281
5282 /* Expand expression EXP, which is a call to the memcmp built-in function.
5283 Return NULL_RTX if we failed and the caller should emit a normal call,
5284 otherwise try to get the result in TARGET, if convenient.
5285 RESULT_EQ is true if we can relax the returned value to be either zero
5286 or nonzero, without caring about the sign. */
5287
5288 static rtx
5289 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
5290 {
5291 if (!validate_arglist (exp,
5292 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5293 return NULL_RTX;
5294
5295 tree arg1 = CALL_EXPR_ARG (exp, 0);
5296 tree arg2 = CALL_EXPR_ARG (exp, 1);
5297 tree len = CALL_EXPR_ARG (exp, 2);
5298 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5299 bool no_overflow = true;
5300
5301 /* Diagnose calls where the specified length exceeds the size of either
5302 object. */
5303 access_data data;
5304 tree size = compute_objsize (arg1, 0, &data.src);
5305 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5306 len, /*maxread=*/NULL_TREE, size,
5307 /*objsize=*/NULL_TREE, true, &data);
5308 if (no_overflow)
5309 {
5310 access_data data;
5311 size = compute_objsize (arg2, 0, &data.src);
5312 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5313 len, /*maxread=*/NULL_TREE, size,
5314 /*objsize=*/NULL_TREE, true, &data);
5315 }
5316
5317 /* If the specified length exceeds the size of either object,
5318 call the function. */
5319 if (!no_overflow)
5320 return NULL_RTX;
5321
5322 /* Due to the performance benefit, always inline the calls first
5323 when result_eq is false. */
5324 rtx result = NULL_RTX;
5325
5326 if (!result_eq && fcode != BUILT_IN_BCMP)
5327 {
5328 result = inline_expand_builtin_bytecmp (exp, target);
5329 if (result)
5330 return result;
5331 }
5332
5333 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5334 location_t loc = EXPR_LOCATION (exp);
5335
5336 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5337 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5338
5339 /* If we don't have POINTER_TYPE, call the function. */
5340 if (arg1_align == 0 || arg2_align == 0)
5341 return NULL_RTX;
5342
5343 rtx arg1_rtx = get_memory_rtx (arg1, len);
5344 rtx arg2_rtx = get_memory_rtx (arg2, len);
5345 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
5346
5347 /* Set MEM_SIZE as appropriate. */
5348 if (CONST_INT_P (len_rtx))
5349 {
5350 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5351 set_mem_size (arg2_rtx, INTVAL (len_rtx));
5352 }
5353
5354 by_pieces_constfn constfn = NULL;
5355
5356 /* Try to get the byte representation of the constant ARG2 (or, only
5357 when the function's result is used for equality to zero, ARG1)
5358 points to, with its byte size in NBYTES. */
5359 unsigned HOST_WIDE_INT nbytes;
5360 const char *rep = c_getstr (arg2, &nbytes);
5361 if (result_eq && rep == NULL)
5362 {
5363 /* For equality to zero the arguments are interchangeable. */
5364 rep = c_getstr (arg1, &nbytes);
5365 if (rep != NULL)
5366 std::swap (arg1_rtx, arg2_rtx);
5367 }
5368
5369 /* If the function's constant bound LEN_RTX is less than or equal
5370 to the byte size of the representation of the constant argument,
5371 and if block move would be done by pieces, we can avoid loading
5372 the bytes from memory and only store the computed constant result. */
5373 if (rep
5374 && CONST_INT_P (len_rtx)
5375 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
5376 constfn = builtin_memcpy_read_str;
5377
5378 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5379 TREE_TYPE (len), target,
5380 result_eq, constfn,
5381 CONST_CAST (char *, rep));
5382
5383 if (result)
5384 {
5385 /* Return the value in the proper mode for this function. */
5386 if (GET_MODE (result) == mode)
5387 return result;
5388
5389 if (target != 0)
5390 {
5391 convert_move (target, result, 0);
5392 return target;
5393 }
5394
5395 return convert_to_mode (mode, result, 0);
5396 }
5397
5398 return NULL_RTX;
5399 }
5400
5401 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
5402 if we failed the caller should emit a normal call, otherwise try to get
5403 the result in TARGET, if convenient. */
5404
5405 static rtx
5406 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
5407 {
5408 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5409 return NULL_RTX;
5410
5411 tree arg1 = CALL_EXPR_ARG (exp, 0);
5412 tree arg2 = CALL_EXPR_ARG (exp, 1);
5413
5414 if (!check_nul_terminated_array (exp, arg1)
5415 || !check_nul_terminated_array (exp, arg2))
5416 return NULL_RTX;
5417
5418 /* Due to the performance benefit, always inline the calls first. */
5419 rtx result = NULL_RTX;
5420 result = inline_expand_builtin_bytecmp (exp, target);
5421 if (result)
5422 return result;
5423
5424 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5425 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5426 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5427 return NULL_RTX;
5428
5429 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5430 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5431
5432 /* If we don't have POINTER_TYPE, call the function. */
5433 if (arg1_align == 0 || arg2_align == 0)
5434 return NULL_RTX;
5435
5436 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5437 arg1 = builtin_save_expr (arg1);
5438 arg2 = builtin_save_expr (arg2);
5439
5440 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5441 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
5442
5443 /* Try to call cmpstrsi. */
5444 if (cmpstr_icode != CODE_FOR_nothing)
5445 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5446 MIN (arg1_align, arg2_align));
5447
5448 /* Try to determine at least one length and call cmpstrnsi. */
5449 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5450 {
5451 tree len;
5452 rtx arg3_rtx;
5453
5454 tree len1 = c_strlen (arg1, 1);
5455 tree len2 = c_strlen (arg2, 1);
5456
5457 if (len1)
5458 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5459 if (len2)
5460 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5461
5462 /* If we don't have a constant length for the first, use the length
5463 of the second, if we know it. We don't require a constant for
5464 this case; some cost analysis could be done if both are available
5465 but neither is constant. For now, assume they're equally cheap,
5466 unless one has side effects. If both strings have constant lengths,
5467 use the smaller. */
5468
5469 if (!len1)
5470 len = len2;
5471 else if (!len2)
5472 len = len1;
5473 else if (TREE_SIDE_EFFECTS (len1))
5474 len = len2;
5475 else if (TREE_SIDE_EFFECTS (len2))
5476 len = len1;
5477 else if (TREE_CODE (len1) != INTEGER_CST)
5478 len = len2;
5479 else if (TREE_CODE (len2) != INTEGER_CST)
5480 len = len1;
5481 else if (tree_int_cst_lt (len1, len2))
5482 len = len1;
5483 else
5484 len = len2;
5485
5486 /* If both arguments have side effects, we cannot optimize. */
5487 if (len && !TREE_SIDE_EFFECTS (len))
5488 {
5489 arg3_rtx = expand_normal (len);
5490 result = expand_cmpstrn_or_cmpmem
5491 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5492 arg3_rtx, MIN (arg1_align, arg2_align));
5493 }
5494 }
5495
5496 tree fndecl = get_callee_fndecl (exp);
5497 if (result)
5498 {
5499 /* Check to see if the argument was declared attribute nonstring
5500 and if so, issue a warning since at this point it's not known
5501 to be nul-terminated. */
5502 maybe_warn_nonstring_arg (fndecl, exp);
5503
5504 /* Return the value in the proper mode for this function. */
5505 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5506 if (GET_MODE (result) == mode)
5507 return result;
5508 if (target == 0)
5509 return convert_to_mode (mode, result, 0);
5510 convert_move (target, result, 0);
5511 return target;
5512 }
5513
5514 /* Expand the library call ourselves using a stabilized argument
5515 list to avoid re-evaluating the function's arguments twice. */
5516 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5517 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5518 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5519 return expand_call (fn, target, target == const0_rtx);
5520 }
5521
5522 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5523 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
5524 the result in TARGET, if convenient. */
5525
5526 static rtx
5527 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5528 ATTRIBUTE_UNUSED machine_mode mode)
5529 {
5530 if (!validate_arglist (exp,
5531 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5532 return NULL_RTX;
5533
5534 tree arg1 = CALL_EXPR_ARG (exp, 0);
5535 tree arg2 = CALL_EXPR_ARG (exp, 1);
5536 tree arg3 = CALL_EXPR_ARG (exp, 2);
5537
5538 if (!check_nul_terminated_array (exp, arg1, arg3)
5539 || !check_nul_terminated_array (exp, arg2, arg3))
5540 return NULL_RTX;
5541
5542 /* Due to the performance benefit, always inline the calls first. */
5543 rtx result = NULL_RTX;
5544 result = inline_expand_builtin_bytecmp (exp, target);
5545 if (result)
5546 return result;
5547
5548 /* If c_strlen can determine an expression for one of the string
5549 lengths, and it doesn't have side effects, then emit cmpstrnsi
5550 using length MIN(strlen(string)+1, arg3). */
5551 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5552 if (cmpstrn_icode == CODE_FOR_nothing)
5553 return NULL_RTX;
5554
5555 tree len;
5556
5557 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5558 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5559
5560 tree len1 = c_strlen (arg1, 1);
5561 tree len2 = c_strlen (arg2, 1);
5562
5563 location_t loc = EXPR_LOCATION (exp);
5564
5565 if (len1)
5566 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5567 if (len2)
5568 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5569
5570 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5571
5572 /* If we don't have a constant length for the first, use the length
5573 of the second, if we know it. If neither string is constant length,
5574 use the given length argument. We don't require a constant for
5575 this case; some cost analysis could be done if both are available
5576 but neither is constant. For now, assume they're equally cheap,
5577 unless one has side effects. If both strings have constant lengths,
5578 use the smaller. */
5579
5580 if (!len1 && !len2)
5581 len = len3;
5582 else if (!len1)
5583 len = len2;
5584 else if (!len2)
5585 len = len1;
5586 else if (TREE_SIDE_EFFECTS (len1))
5587 len = len2;
5588 else if (TREE_SIDE_EFFECTS (len2))
5589 len = len1;
5590 else if (TREE_CODE (len1) != INTEGER_CST)
5591 len = len2;
5592 else if (TREE_CODE (len2) != INTEGER_CST)
5593 len = len1;
5594 else if (tree_int_cst_lt (len1, len2))
5595 len = len1;
5596 else
5597 len = len2;
5598
5599 /* If we are not using the given length, we must incorporate it here.
5600 The actual new length parameter will be MIN(len,arg3) in this case. */
5601 if (len != len3)
5602 {
5603 len = fold_convert_loc (loc, sizetype, len);
5604 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5605 }
5606 rtx arg1_rtx = get_memory_rtx (arg1, len);
5607 rtx arg2_rtx = get_memory_rtx (arg2, len);
5608 rtx arg3_rtx = expand_normal (len);
5609 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5610 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5611 MIN (arg1_align, arg2_align));
5612
5613 tree fndecl = get_callee_fndecl (exp);
5614 if (result)
5615 {
5616 /* Check to see if the argument was declared attribute nonstring
5617 and if so, issue a warning since at this point it's not known
5618 to be nul-terminated. */
5619 maybe_warn_nonstring_arg (fndecl, exp);
5620
5621 /* Return the value in the proper mode for this function. */
5622 mode = TYPE_MODE (TREE_TYPE (exp));
5623 if (GET_MODE (result) == mode)
5624 return result;
5625 if (target == 0)
5626 return convert_to_mode (mode, result, 0);
5627 convert_move (target, result, 0);
5628 return target;
5629 }
5630
5631 /* Expand the library call ourselves using a stabilized argument
5632 list to avoid re-evaluating the function's arguments twice. */
5633 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5634 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5635 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5636 return expand_call (fn, target, target == const0_rtx);
5637 }
5638
5639 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5640 if that's convenient. */
5641
5642 rtx
5643 expand_builtin_saveregs (void)
5644 {
5645 rtx val;
5646 rtx_insn *seq;
5647
5648 /* Don't do __builtin_saveregs more than once in a function.
5649 Save the result of the first call and reuse it. */
5650 if (saveregs_value != 0)
5651 return saveregs_value;
5652
5653 /* When this function is called, it means that registers must be
5654 saved on entry to this function. So we migrate the call to the
5655 first insn of this function. */
5656
5657 start_sequence ();
5658
5659 /* Do whatever the machine needs done in this case. */
5660 val = targetm.calls.expand_builtin_saveregs ();
5661
5662 seq = get_insns ();
5663 end_sequence ();
5664
5665 saveregs_value = val;
5666
5667 /* Put the insns after the NOTE that starts the function. If this
5668 is inside a start_sequence, make the outer-level insn chain current, so
5669 the code is placed at the start of the function. */
5670 push_topmost_sequence ();
5671 emit_insn_after (seq, entry_of_function ());
5672 pop_topmost_sequence ();
5673
5674 return val;
5675 }
5676
5677 /* Expand a call to __builtin_next_arg. */
5678
5679 static rtx
5680 expand_builtin_next_arg (void)
5681 {
5682 /* Checking arguments is already done in fold_builtin_next_arg
5683 that must be called before this function. */
5684 return expand_binop (ptr_mode, add_optab,
5685 crtl->args.internal_arg_pointer,
5686 crtl->args.arg_offset_rtx,
5687 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5688 }
5689
5690 /* Make it easier for the backends by protecting the valist argument
5691 from multiple evaluations. */
5692
5693 static tree
5694 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5695 {
5696 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5697
5698 /* The current way of determining the type of valist is completely
5699 bogus. We should have the information on the va builtin instead. */
5700 if (!vatype)
5701 vatype = targetm.fn_abi_va_list (cfun->decl);
5702
5703 if (TREE_CODE (vatype) == ARRAY_TYPE)
5704 {
5705 if (TREE_SIDE_EFFECTS (valist))
5706 valist = save_expr (valist);
5707
5708 /* For this case, the backends will be expecting a pointer to
5709 vatype, but it's possible we've actually been given an array
5710 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5711 So fix it. */
5712 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5713 {
5714 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5715 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5716 }
5717 }
5718 else
5719 {
5720 tree pt = build_pointer_type (vatype);
5721
5722 if (! needs_lvalue)
5723 {
5724 if (! TREE_SIDE_EFFECTS (valist))
5725 return valist;
5726
5727 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5728 TREE_SIDE_EFFECTS (valist) = 1;
5729 }
5730
5731 if (TREE_SIDE_EFFECTS (valist))
5732 valist = save_expr (valist);
5733 valist = fold_build2_loc (loc, MEM_REF,
5734 vatype, valist, build_int_cst (pt, 0));
5735 }
5736
5737 return valist;
5738 }
5739
5740 /* The "standard" definition of va_list is void*. */
5741
5742 tree
5743 std_build_builtin_va_list (void)
5744 {
5745 return ptr_type_node;
5746 }
5747
5748 /* The "standard" abi va_list is va_list_type_node. */
5749
5750 tree
5751 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5752 {
5753 return va_list_type_node;
5754 }
5755
5756 /* The "standard" type of va_list is va_list_type_node. */
5757
5758 tree
5759 std_canonical_va_list_type (tree type)
5760 {
5761 tree wtype, htype;
5762
5763 wtype = va_list_type_node;
5764 htype = type;
5765
5766 if (TREE_CODE (wtype) == ARRAY_TYPE)
5767 {
5768 /* If va_list is an array type, the argument may have decayed
5769 to a pointer type, e.g. by being passed to another function.
5770 In that case, unwrap both types so that we can compare the
5771 underlying records. */
5772 if (TREE_CODE (htype) == ARRAY_TYPE
5773 || POINTER_TYPE_P (htype))
5774 {
5775 wtype = TREE_TYPE (wtype);
5776 htype = TREE_TYPE (htype);
5777 }
5778 }
5779 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5780 return va_list_type_node;
5781
5782 return NULL_TREE;
5783 }
5784
5785 /* The "standard" implementation of va_start: just assign `nextarg' to
5786 the variable. */
5787
5788 void
5789 std_expand_builtin_va_start (tree valist, rtx nextarg)
5790 {
5791 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5792 convert_move (va_r, nextarg, 0);
5793 }
5794
5795 /* Expand EXP, a call to __builtin_va_start. */
5796
5797 static rtx
5798 expand_builtin_va_start (tree exp)
5799 {
5800 rtx nextarg;
5801 tree valist;
5802 location_t loc = EXPR_LOCATION (exp);
5803
5804 if (call_expr_nargs (exp) < 2)
5805 {
5806 error_at (loc, "too few arguments to function %<va_start%>");
5807 return const0_rtx;
5808 }
5809
5810 if (fold_builtin_next_arg (exp, true))
5811 return const0_rtx;
5812
5813 nextarg = expand_builtin_next_arg ();
5814 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5815
5816 if (targetm.expand_builtin_va_start)
5817 targetm.expand_builtin_va_start (valist, nextarg);
5818 else
5819 std_expand_builtin_va_start (valist, nextarg);
5820
5821 return const0_rtx;
5822 }
5823
5824 /* Expand EXP, a call to __builtin_va_end. */
5825
5826 static rtx
5827 expand_builtin_va_end (tree exp)
5828 {
5829 tree valist = CALL_EXPR_ARG (exp, 0);
5830
5831 /* Evaluate for side effects, if needed. I hate macros that don't
5832 do that. */
5833 if (TREE_SIDE_EFFECTS (valist))
5834 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5835
5836 return const0_rtx;
5837 }
5838
5839 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5840 builtin rather than just as an assignment in stdarg.h because of the
5841 nastiness of array-type va_list types. */
5842
5843 static rtx
5844 expand_builtin_va_copy (tree exp)
5845 {
5846 tree dst, src, t;
5847 location_t loc = EXPR_LOCATION (exp);
5848
5849 dst = CALL_EXPR_ARG (exp, 0);
5850 src = CALL_EXPR_ARG (exp, 1);
5851
5852 dst = stabilize_va_list_loc (loc, dst, 1);
5853 src = stabilize_va_list_loc (loc, src, 0);
5854
5855 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5856
5857 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5858 {
5859 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5860 TREE_SIDE_EFFECTS (t) = 1;
5861 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5862 }
5863 else
5864 {
5865 rtx dstb, srcb, size;
5866
5867 /* Evaluate to pointers. */
5868 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5869 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5870 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5871 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5872
5873 dstb = convert_memory_address (Pmode, dstb);
5874 srcb = convert_memory_address (Pmode, srcb);
5875
5876 /* "Dereference" to BLKmode memories. */
5877 dstb = gen_rtx_MEM (BLKmode, dstb);
5878 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5879 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5880 srcb = gen_rtx_MEM (BLKmode, srcb);
5881 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5882 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5883
5884 /* Copy. */
5885 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5886 }
5887
5888 return const0_rtx;
5889 }
5890
5891 /* Expand a call to one of the builtin functions __builtin_frame_address or
5892 __builtin_return_address. */
5893
5894 static rtx
5895 expand_builtin_frame_address (tree fndecl, tree exp)
5896 {
5897 /* The argument must be a nonnegative integer constant.
5898 It counts the number of frames to scan up the stack.
5899 The value is either the frame pointer value or the return
5900 address saved in that frame. */
5901 if (call_expr_nargs (exp) == 0)
5902 /* Warning about missing arg was already issued. */
5903 return const0_rtx;
5904 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5905 {
5906 error ("invalid argument to %qD", fndecl);
5907 return const0_rtx;
5908 }
5909 else
5910 {
5911 /* Number of frames to scan up the stack. */
5912 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5913
5914 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5915
5916 /* Some ports cannot access arbitrary stack frames. */
5917 if (tem == NULL)
5918 {
5919 warning (0, "unsupported argument to %qD", fndecl);
5920 return const0_rtx;
5921 }
5922
5923 if (count)
5924 {
5925 /* Warn since no effort is made to ensure that any frame
5926 beyond the current one exists or can be safely reached. */
5927 warning (OPT_Wframe_address, "calling %qD with "
5928 "a nonzero argument is unsafe", fndecl);
5929 }
5930
5931 /* For __builtin_frame_address, return what we've got. */
5932 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5933 return tem;
5934
5935 if (!REG_P (tem)
5936 && ! CONSTANT_P (tem))
5937 tem = copy_addr_to_reg (tem);
5938 return tem;
5939 }
5940 }
5941
5942 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5943 failed and the caller should emit a normal call. */
5944
5945 static rtx
5946 expand_builtin_alloca (tree exp)
5947 {
5948 rtx op0;
5949 rtx result;
5950 unsigned int align;
5951 tree fndecl = get_callee_fndecl (exp);
5952 HOST_WIDE_INT max_size;
5953 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5954 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5955 bool valid_arglist
5956 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5957 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5958 VOID_TYPE)
5959 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5960 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5961 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5962
5963 if (!valid_arglist)
5964 return NULL_RTX;
5965
5966 if ((alloca_for_var
5967 && warn_vla_limit >= HOST_WIDE_INT_MAX
5968 && warn_alloc_size_limit < warn_vla_limit)
5969 || (!alloca_for_var
5970 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5971 && warn_alloc_size_limit < warn_alloca_limit
5972 ))
5973 {
5974 /* -Walloca-larger-than and -Wvla-larger-than settings of
5975 less than HOST_WIDE_INT_MAX override the more general
5976 -Walloc-size-larger-than so unless either of the former
5977 options is smaller than the last one (wchich would imply
5978 that the call was already checked), check the alloca
5979 arguments for overflow. */
5980 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5981 int idx[] = { 0, -1 };
5982 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5983 }
5984
5985 /* Compute the argument. */
5986 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5987
5988 /* Compute the alignment. */
5989 align = (fcode == BUILT_IN_ALLOCA
5990 ? BIGGEST_ALIGNMENT
5991 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5992
5993 /* Compute the maximum size. */
5994 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5995 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5996 : -1);
5997
5998 /* Allocate the desired space. If the allocation stems from the declaration
5999 of a variable-sized object, it cannot accumulate. */
6000 result
6001 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
6002 result = convert_memory_address (ptr_mode, result);
6003
6004 /* Dynamic allocations for variables are recorded during gimplification. */
6005 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
6006 record_dynamic_alloc (exp);
6007
6008 return result;
6009 }
6010
6011 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
6012 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
6013 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
6014 handle_builtin_stack_restore function. */
6015
6016 static rtx
6017 expand_asan_emit_allocas_unpoison (tree exp)
6018 {
6019 tree arg0 = CALL_EXPR_ARG (exp, 0);
6020 tree arg1 = CALL_EXPR_ARG (exp, 1);
6021 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6022 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6023 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
6024 stack_pointer_rtx, NULL_RTX, 0,
6025 OPTAB_LIB_WIDEN);
6026 off = convert_modes (ptr_mode, Pmode, off, 0);
6027 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
6028 OPTAB_LIB_WIDEN);
6029 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
6030 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
6031 top, ptr_mode, bot, ptr_mode);
6032 return ret;
6033 }
6034
6035 /* Expand a call to bswap builtin in EXP.
6036 Return NULL_RTX if a normal call should be emitted rather than expanding the
6037 function in-line. If convenient, the result should be placed in TARGET.
6038 SUBTARGET may be used as the target for computing one of EXP's operands. */
6039
6040 static rtx
6041 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
6042 rtx subtarget)
6043 {
6044 tree arg;
6045 rtx op0;
6046
6047 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6048 return NULL_RTX;
6049
6050 arg = CALL_EXPR_ARG (exp, 0);
6051 op0 = expand_expr (arg,
6052 subtarget && GET_MODE (subtarget) == target_mode
6053 ? subtarget : NULL_RTX,
6054 target_mode, EXPAND_NORMAL);
6055 if (GET_MODE (op0) != target_mode)
6056 op0 = convert_to_mode (target_mode, op0, 1);
6057
6058 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
6059
6060 gcc_assert (target);
6061
6062 return convert_to_mode (target_mode, target, 1);
6063 }
6064
6065 /* Expand a call to a unary builtin in EXP.
6066 Return NULL_RTX if a normal call should be emitted rather than expanding the
6067 function in-line. If convenient, the result should be placed in TARGET.
6068 SUBTARGET may be used as the target for computing one of EXP's operands. */
6069
6070 static rtx
6071 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
6072 rtx subtarget, optab op_optab)
6073 {
6074 rtx op0;
6075
6076 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6077 return NULL_RTX;
6078
6079 /* Compute the argument. */
6080 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
6081 (subtarget
6082 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
6083 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
6084 VOIDmode, EXPAND_NORMAL);
6085 /* Compute op, into TARGET if possible.
6086 Set TARGET to wherever the result comes back. */
6087 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6088 op_optab, op0, target, op_optab != clrsb_optab);
6089 gcc_assert (target);
6090
6091 return convert_to_mode (target_mode, target, 0);
6092 }
6093
6094 /* Expand a call to __builtin_expect. We just return our argument
6095 as the builtin_expect semantic should've been already executed by
6096 tree branch prediction pass. */
6097
6098 static rtx
6099 expand_builtin_expect (tree exp, rtx target)
6100 {
6101 tree arg;
6102
6103 if (call_expr_nargs (exp) < 2)
6104 return const0_rtx;
6105 arg = CALL_EXPR_ARG (exp, 0);
6106
6107 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6108 /* When guessing was done, the hints should be already stripped away. */
6109 gcc_assert (!flag_guess_branch_prob
6110 || optimize == 0 || seen_error ());
6111 return target;
6112 }
6113
6114 /* Expand a call to __builtin_expect_with_probability. We just return our
6115 argument as the builtin_expect semantic should've been already executed by
6116 tree branch prediction pass. */
6117
6118 static rtx
6119 expand_builtin_expect_with_probability (tree exp, rtx target)
6120 {
6121 tree arg;
6122
6123 if (call_expr_nargs (exp) < 3)
6124 return const0_rtx;
6125 arg = CALL_EXPR_ARG (exp, 0);
6126
6127 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6128 /* When guessing was done, the hints should be already stripped away. */
6129 gcc_assert (!flag_guess_branch_prob
6130 || optimize == 0 || seen_error ());
6131 return target;
6132 }
6133
6134
6135 /* Expand a call to __builtin_assume_aligned. We just return our first
6136 argument as the builtin_assume_aligned semantic should've been already
6137 executed by CCP. */
6138
6139 static rtx
6140 expand_builtin_assume_aligned (tree exp, rtx target)
6141 {
6142 if (call_expr_nargs (exp) < 2)
6143 return const0_rtx;
6144 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
6145 EXPAND_NORMAL);
6146 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
6147 && (call_expr_nargs (exp) < 3
6148 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
6149 return target;
6150 }
6151
6152 void
6153 expand_builtin_trap (void)
6154 {
6155 if (targetm.have_trap ())
6156 {
6157 rtx_insn *insn = emit_insn (targetm.gen_trap ());
6158 /* For trap insns when not accumulating outgoing args force
6159 REG_ARGS_SIZE note to prevent crossjumping of calls with
6160 different args sizes. */
6161 if (!ACCUMULATE_OUTGOING_ARGS)
6162 add_args_size_note (insn, stack_pointer_delta);
6163 }
6164 else
6165 {
6166 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
6167 tree call_expr = build_call_expr (fn, 0);
6168 expand_call (call_expr, NULL_RTX, false);
6169 }
6170
6171 emit_barrier ();
6172 }
6173
6174 /* Expand a call to __builtin_unreachable. We do nothing except emit
6175 a barrier saying that control flow will not pass here.
6176
6177 It is the responsibility of the program being compiled to ensure
6178 that control flow does never reach __builtin_unreachable. */
6179 static void
6180 expand_builtin_unreachable (void)
6181 {
6182 emit_barrier ();
6183 }
6184
6185 /* Expand EXP, a call to fabs, fabsf or fabsl.
6186 Return NULL_RTX if a normal call should be emitted rather than expanding
6187 the function inline. If convenient, the result should be placed
6188 in TARGET. SUBTARGET may be used as the target for computing
6189 the operand. */
6190
6191 static rtx
6192 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
6193 {
6194 machine_mode mode;
6195 tree arg;
6196 rtx op0;
6197
6198 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6199 return NULL_RTX;
6200
6201 arg = CALL_EXPR_ARG (exp, 0);
6202 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6203 mode = TYPE_MODE (TREE_TYPE (arg));
6204 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6205 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6206 }
6207
6208 /* Expand EXP, a call to copysign, copysignf, or copysignl.
6209 Return NULL is a normal call should be emitted rather than expanding the
6210 function inline. If convenient, the result should be placed in TARGET.
6211 SUBTARGET may be used as the target for computing the operand. */
6212
6213 static rtx
6214 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6215 {
6216 rtx op0, op1;
6217 tree arg;
6218
6219 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6220 return NULL_RTX;
6221
6222 arg = CALL_EXPR_ARG (exp, 0);
6223 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6224
6225 arg = CALL_EXPR_ARG (exp, 1);
6226 op1 = expand_normal (arg);
6227
6228 return expand_copysign (op0, op1, target);
6229 }
6230
6231 /* Expand a call to __builtin___clear_cache. */
6232
6233 static rtx
6234 expand_builtin___clear_cache (tree exp)
6235 {
6236 if (!targetm.code_for_clear_cache)
6237 {
6238 #ifdef CLEAR_INSN_CACHE
6239 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6240 does something. Just do the default expansion to a call to
6241 __clear_cache(). */
6242 return NULL_RTX;
6243 #else
6244 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6245 does nothing. There is no need to call it. Do nothing. */
6246 return const0_rtx;
6247 #endif /* CLEAR_INSN_CACHE */
6248 }
6249
6250 /* We have a "clear_cache" insn, and it will handle everything. */
6251 tree begin, end;
6252 rtx begin_rtx, end_rtx;
6253
6254 /* We must not expand to a library call. If we did, any
6255 fallback library function in libgcc that might contain a call to
6256 __builtin___clear_cache() would recurse infinitely. */
6257 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6258 {
6259 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6260 return const0_rtx;
6261 }
6262
6263 if (targetm.have_clear_cache ())
6264 {
6265 class expand_operand ops[2];
6266
6267 begin = CALL_EXPR_ARG (exp, 0);
6268 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6269
6270 end = CALL_EXPR_ARG (exp, 1);
6271 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6272
6273 create_address_operand (&ops[0], begin_rtx);
6274 create_address_operand (&ops[1], end_rtx);
6275 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6276 return const0_rtx;
6277 }
6278 return const0_rtx;
6279 }
6280
6281 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6282
6283 static rtx
6284 round_trampoline_addr (rtx tramp)
6285 {
6286 rtx temp, addend, mask;
6287
6288 /* If we don't need too much alignment, we'll have been guaranteed
6289 proper alignment by get_trampoline_type. */
6290 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6291 return tramp;
6292
6293 /* Round address up to desired boundary. */
6294 temp = gen_reg_rtx (Pmode);
6295 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6296 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6297
6298 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6299 temp, 0, OPTAB_LIB_WIDEN);
6300 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6301 temp, 0, OPTAB_LIB_WIDEN);
6302
6303 return tramp;
6304 }
6305
6306 static rtx
6307 expand_builtin_init_trampoline (tree exp, bool onstack)
6308 {
6309 tree t_tramp, t_func, t_chain;
6310 rtx m_tramp, r_tramp, r_chain, tmp;
6311
6312 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6313 POINTER_TYPE, VOID_TYPE))
6314 return NULL_RTX;
6315
6316 t_tramp = CALL_EXPR_ARG (exp, 0);
6317 t_func = CALL_EXPR_ARG (exp, 1);
6318 t_chain = CALL_EXPR_ARG (exp, 2);
6319
6320 r_tramp = expand_normal (t_tramp);
6321 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6322 MEM_NOTRAP_P (m_tramp) = 1;
6323
6324 /* If ONSTACK, the TRAMP argument should be the address of a field
6325 within the local function's FRAME decl. Either way, let's see if
6326 we can fill in the MEM_ATTRs for this memory. */
6327 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6328 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6329
6330 /* Creator of a heap trampoline is responsible for making sure the
6331 address is aligned to at least STACK_BOUNDARY. Normally malloc
6332 will ensure this anyhow. */
6333 tmp = round_trampoline_addr (r_tramp);
6334 if (tmp != r_tramp)
6335 {
6336 m_tramp = change_address (m_tramp, BLKmode, tmp);
6337 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6338 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6339 }
6340
6341 /* The FUNC argument should be the address of the nested function.
6342 Extract the actual function decl to pass to the hook. */
6343 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6344 t_func = TREE_OPERAND (t_func, 0);
6345 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6346
6347 r_chain = expand_normal (t_chain);
6348
6349 /* Generate insns to initialize the trampoline. */
6350 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6351
6352 if (onstack)
6353 {
6354 trampolines_created = 1;
6355
6356 if (targetm.calls.custom_function_descriptors != 0)
6357 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6358 "trampoline generated for nested function %qD", t_func);
6359 }
6360
6361 return const0_rtx;
6362 }
6363
6364 static rtx
6365 expand_builtin_adjust_trampoline (tree exp)
6366 {
6367 rtx tramp;
6368
6369 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6370 return NULL_RTX;
6371
6372 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6373 tramp = round_trampoline_addr (tramp);
6374 if (targetm.calls.trampoline_adjust_address)
6375 tramp = targetm.calls.trampoline_adjust_address (tramp);
6376
6377 return tramp;
6378 }
6379
6380 /* Expand a call to the builtin descriptor initialization routine.
6381 A descriptor is made up of a couple of pointers to the static
6382 chain and the code entry in this order. */
6383
6384 static rtx
6385 expand_builtin_init_descriptor (tree exp)
6386 {
6387 tree t_descr, t_func, t_chain;
6388 rtx m_descr, r_descr, r_func, r_chain;
6389
6390 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6391 VOID_TYPE))
6392 return NULL_RTX;
6393
6394 t_descr = CALL_EXPR_ARG (exp, 0);
6395 t_func = CALL_EXPR_ARG (exp, 1);
6396 t_chain = CALL_EXPR_ARG (exp, 2);
6397
6398 r_descr = expand_normal (t_descr);
6399 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6400 MEM_NOTRAP_P (m_descr) = 1;
6401 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6402
6403 r_func = expand_normal (t_func);
6404 r_chain = expand_normal (t_chain);
6405
6406 /* Generate insns to initialize the descriptor. */
6407 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6408 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6409 POINTER_SIZE / BITS_PER_UNIT), r_func);
6410
6411 return const0_rtx;
6412 }
6413
6414 /* Expand a call to the builtin descriptor adjustment routine. */
6415
6416 static rtx
6417 expand_builtin_adjust_descriptor (tree exp)
6418 {
6419 rtx tramp;
6420
6421 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6422 return NULL_RTX;
6423
6424 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6425
6426 /* Unalign the descriptor to allow runtime identification. */
6427 tramp = plus_constant (ptr_mode, tramp,
6428 targetm.calls.custom_function_descriptors);
6429
6430 return force_operand (tramp, NULL_RTX);
6431 }
6432
6433 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6434 function. The function first checks whether the back end provides
6435 an insn to implement signbit for the respective mode. If not, it
6436 checks whether the floating point format of the value is such that
6437 the sign bit can be extracted. If that is not the case, error out.
6438 EXP is the expression that is a call to the builtin function; if
6439 convenient, the result should be placed in TARGET. */
6440 static rtx
6441 expand_builtin_signbit (tree exp, rtx target)
6442 {
6443 const struct real_format *fmt;
6444 scalar_float_mode fmode;
6445 scalar_int_mode rmode, imode;
6446 tree arg;
6447 int word, bitpos;
6448 enum insn_code icode;
6449 rtx temp;
6450 location_t loc = EXPR_LOCATION (exp);
6451
6452 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6453 return NULL_RTX;
6454
6455 arg = CALL_EXPR_ARG (exp, 0);
6456 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6457 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6458 fmt = REAL_MODE_FORMAT (fmode);
6459
6460 arg = builtin_save_expr (arg);
6461
6462 /* Expand the argument yielding a RTX expression. */
6463 temp = expand_normal (arg);
6464
6465 /* Check if the back end provides an insn that handles signbit for the
6466 argument's mode. */
6467 icode = optab_handler (signbit_optab, fmode);
6468 if (icode != CODE_FOR_nothing)
6469 {
6470 rtx_insn *last = get_last_insn ();
6471 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6472 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6473 return target;
6474 delete_insns_since (last);
6475 }
6476
6477 /* For floating point formats without a sign bit, implement signbit
6478 as "ARG < 0.0". */
6479 bitpos = fmt->signbit_ro;
6480 if (bitpos < 0)
6481 {
6482 /* But we can't do this if the format supports signed zero. */
6483 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6484
6485 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6486 build_real (TREE_TYPE (arg), dconst0));
6487 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6488 }
6489
6490 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6491 {
6492 imode = int_mode_for_mode (fmode).require ();
6493 temp = gen_lowpart (imode, temp);
6494 }
6495 else
6496 {
6497 imode = word_mode;
6498 /* Handle targets with different FP word orders. */
6499 if (FLOAT_WORDS_BIG_ENDIAN)
6500 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6501 else
6502 word = bitpos / BITS_PER_WORD;
6503 temp = operand_subword_force (temp, word, fmode);
6504 bitpos = bitpos % BITS_PER_WORD;
6505 }
6506
6507 /* Force the intermediate word_mode (or narrower) result into a
6508 register. This avoids attempting to create paradoxical SUBREGs
6509 of floating point modes below. */
6510 temp = force_reg (imode, temp);
6511
6512 /* If the bitpos is within the "result mode" lowpart, the operation
6513 can be implement with a single bitwise AND. Otherwise, we need
6514 a right shift and an AND. */
6515
6516 if (bitpos < GET_MODE_BITSIZE (rmode))
6517 {
6518 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6519
6520 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6521 temp = gen_lowpart (rmode, temp);
6522 temp = expand_binop (rmode, and_optab, temp,
6523 immed_wide_int_const (mask, rmode),
6524 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6525 }
6526 else
6527 {
6528 /* Perform a logical right shift to place the signbit in the least
6529 significant bit, then truncate the result to the desired mode
6530 and mask just this bit. */
6531 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6532 temp = gen_lowpart (rmode, temp);
6533 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6534 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6535 }
6536
6537 return temp;
6538 }
6539
6540 /* Expand fork or exec calls. TARGET is the desired target of the
6541 call. EXP is the call. FN is the
6542 identificator of the actual function. IGNORE is nonzero if the
6543 value is to be ignored. */
6544
6545 static rtx
6546 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6547 {
6548 tree id, decl;
6549 tree call;
6550
6551 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6552 {
6553 /* Detect unterminated path. */
6554 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6555 return NULL_RTX;
6556
6557 /* Also detect unterminated first argument. */
6558 switch (DECL_FUNCTION_CODE (fn))
6559 {
6560 case BUILT_IN_EXECL:
6561 case BUILT_IN_EXECLE:
6562 case BUILT_IN_EXECLP:
6563 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6564 return NULL_RTX;
6565 default:
6566 break;
6567 }
6568 }
6569
6570
6571 /* If we are not profiling, just call the function. */
6572 if (!profile_arc_flag)
6573 return NULL_RTX;
6574
6575 /* Otherwise call the wrapper. This should be equivalent for the rest of
6576 compiler, so the code does not diverge, and the wrapper may run the
6577 code necessary for keeping the profiling sane. */
6578
6579 switch (DECL_FUNCTION_CODE (fn))
6580 {
6581 case BUILT_IN_FORK:
6582 id = get_identifier ("__gcov_fork");
6583 break;
6584
6585 case BUILT_IN_EXECL:
6586 id = get_identifier ("__gcov_execl");
6587 break;
6588
6589 case BUILT_IN_EXECV:
6590 id = get_identifier ("__gcov_execv");
6591 break;
6592
6593 case BUILT_IN_EXECLP:
6594 id = get_identifier ("__gcov_execlp");
6595 break;
6596
6597 case BUILT_IN_EXECLE:
6598 id = get_identifier ("__gcov_execle");
6599 break;
6600
6601 case BUILT_IN_EXECVP:
6602 id = get_identifier ("__gcov_execvp");
6603 break;
6604
6605 case BUILT_IN_EXECVE:
6606 id = get_identifier ("__gcov_execve");
6607 break;
6608
6609 default:
6610 gcc_unreachable ();
6611 }
6612
6613 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6614 FUNCTION_DECL, id, TREE_TYPE (fn));
6615 DECL_EXTERNAL (decl) = 1;
6616 TREE_PUBLIC (decl) = 1;
6617 DECL_ARTIFICIAL (decl) = 1;
6618 TREE_NOTHROW (decl) = 1;
6619 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6620 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6621 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6622 return expand_call (call, target, ignore);
6623 }
6624
6625
6626 \f
6627 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6628 the pointer in these functions is void*, the tree optimizers may remove
6629 casts. The mode computed in expand_builtin isn't reliable either, due
6630 to __sync_bool_compare_and_swap.
6631
6632 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6633 group of builtins. This gives us log2 of the mode size. */
6634
6635 static inline machine_mode
6636 get_builtin_sync_mode (int fcode_diff)
6637 {
6638 /* The size is not negotiable, so ask not to get BLKmode in return
6639 if the target indicates that a smaller size would be better. */
6640 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6641 }
6642
6643 /* Expand the memory expression LOC and return the appropriate memory operand
6644 for the builtin_sync operations. */
6645
6646 static rtx
6647 get_builtin_sync_mem (tree loc, machine_mode mode)
6648 {
6649 rtx addr, mem;
6650 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6651 ? TREE_TYPE (TREE_TYPE (loc))
6652 : TREE_TYPE (loc));
6653 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6654
6655 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6656 addr = convert_memory_address (addr_mode, addr);
6657
6658 /* Note that we explicitly do not want any alias information for this
6659 memory, so that we kill all other live memories. Otherwise we don't
6660 satisfy the full barrier semantics of the intrinsic. */
6661 mem = gen_rtx_MEM (mode, addr);
6662
6663 set_mem_addr_space (mem, addr_space);
6664
6665 mem = validize_mem (mem);
6666
6667 /* The alignment needs to be at least according to that of the mode. */
6668 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6669 get_pointer_alignment (loc)));
6670 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6671 MEM_VOLATILE_P (mem) = 1;
6672
6673 return mem;
6674 }
6675
6676 /* Make sure an argument is in the right mode.
6677 EXP is the tree argument.
6678 MODE is the mode it should be in. */
6679
6680 static rtx
6681 expand_expr_force_mode (tree exp, machine_mode mode)
6682 {
6683 rtx val;
6684 machine_mode old_mode;
6685
6686 if (TREE_CODE (exp) == SSA_NAME
6687 && TYPE_MODE (TREE_TYPE (exp)) != mode)
6688 {
6689 /* Undo argument promotion if possible, as combine might not
6690 be able to do it later due to MEM_VOLATILE_P uses in the
6691 patterns. */
6692 gimple *g = get_gimple_for_ssa_name (exp);
6693 if (g && gimple_assign_cast_p (g))
6694 {
6695 tree rhs = gimple_assign_rhs1 (g);
6696 tree_code code = gimple_assign_rhs_code (g);
6697 if (CONVERT_EXPR_CODE_P (code)
6698 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6699 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6700 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6701 && (TYPE_PRECISION (TREE_TYPE (exp))
6702 > TYPE_PRECISION (TREE_TYPE (rhs))))
6703 exp = rhs;
6704 }
6705 }
6706
6707 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6708 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6709 of CONST_INTs, where we know the old_mode only from the call argument. */
6710
6711 old_mode = GET_MODE (val);
6712 if (old_mode == VOIDmode)
6713 old_mode = TYPE_MODE (TREE_TYPE (exp));
6714 val = convert_modes (mode, old_mode, val, 1);
6715 return val;
6716 }
6717
6718
6719 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6720 EXP is the CALL_EXPR. CODE is the rtx code
6721 that corresponds to the arithmetic or logical operation from the name;
6722 an exception here is that NOT actually means NAND. TARGET is an optional
6723 place for us to store the results; AFTER is true if this is the
6724 fetch_and_xxx form. */
6725
6726 static rtx
6727 expand_builtin_sync_operation (machine_mode mode, tree exp,
6728 enum rtx_code code, bool after,
6729 rtx target)
6730 {
6731 rtx val, mem;
6732 location_t loc = EXPR_LOCATION (exp);
6733
6734 if (code == NOT && warn_sync_nand)
6735 {
6736 tree fndecl = get_callee_fndecl (exp);
6737 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6738
6739 static bool warned_f_a_n, warned_n_a_f;
6740
6741 switch (fcode)
6742 {
6743 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6744 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6745 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6746 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6747 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6748 if (warned_f_a_n)
6749 break;
6750
6751 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6752 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6753 warned_f_a_n = true;
6754 break;
6755
6756 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6757 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6758 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6759 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6760 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6761 if (warned_n_a_f)
6762 break;
6763
6764 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6765 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6766 warned_n_a_f = true;
6767 break;
6768
6769 default:
6770 gcc_unreachable ();
6771 }
6772 }
6773
6774 /* Expand the operands. */
6775 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6776 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6777
6778 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6779 after);
6780 }
6781
6782 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6783 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6784 true if this is the boolean form. TARGET is a place for us to store the
6785 results; this is NOT optional if IS_BOOL is true. */
6786
6787 static rtx
6788 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6789 bool is_bool, rtx target)
6790 {
6791 rtx old_val, new_val, mem;
6792 rtx *pbool, *poval;
6793
6794 /* Expand the operands. */
6795 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6796 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6797 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6798
6799 pbool = poval = NULL;
6800 if (target != const0_rtx)
6801 {
6802 if (is_bool)
6803 pbool = &target;
6804 else
6805 poval = &target;
6806 }
6807 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6808 false, MEMMODEL_SYNC_SEQ_CST,
6809 MEMMODEL_SYNC_SEQ_CST))
6810 return NULL_RTX;
6811
6812 return target;
6813 }
6814
6815 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6816 general form is actually an atomic exchange, and some targets only
6817 support a reduced form with the second argument being a constant 1.
6818 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6819 the results. */
6820
6821 static rtx
6822 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6823 rtx target)
6824 {
6825 rtx val, mem;
6826
6827 /* Expand the operands. */
6828 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6829 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6830
6831 return expand_sync_lock_test_and_set (target, mem, val);
6832 }
6833
6834 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6835
6836 static void
6837 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6838 {
6839 rtx mem;
6840
6841 /* Expand the operands. */
6842 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6843
6844 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6845 }
6846
6847 /* Given an integer representing an ``enum memmodel'', verify its
6848 correctness and return the memory model enum. */
6849
6850 static enum memmodel
6851 get_memmodel (tree exp)
6852 {
6853 rtx op;
6854 unsigned HOST_WIDE_INT val;
6855 location_t loc
6856 = expansion_point_location_if_in_system_header (input_location);
6857
6858 /* If the parameter is not a constant, it's a run time value so we'll just
6859 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6860 if (TREE_CODE (exp) != INTEGER_CST)
6861 return MEMMODEL_SEQ_CST;
6862
6863 op = expand_normal (exp);
6864
6865 val = INTVAL (op);
6866 if (targetm.memmodel_check)
6867 val = targetm.memmodel_check (val);
6868 else if (val & ~MEMMODEL_MASK)
6869 {
6870 warning_at (loc, OPT_Winvalid_memory_model,
6871 "unknown architecture specifier in memory model to builtin");
6872 return MEMMODEL_SEQ_CST;
6873 }
6874
6875 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6876 if (memmodel_base (val) >= MEMMODEL_LAST)
6877 {
6878 warning_at (loc, OPT_Winvalid_memory_model,
6879 "invalid memory model argument to builtin");
6880 return MEMMODEL_SEQ_CST;
6881 }
6882
6883 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6884 be conservative and promote consume to acquire. */
6885 if (val == MEMMODEL_CONSUME)
6886 val = MEMMODEL_ACQUIRE;
6887
6888 return (enum memmodel) val;
6889 }
6890
6891 /* Expand the __atomic_exchange intrinsic:
6892 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6893 EXP is the CALL_EXPR.
6894 TARGET is an optional place for us to store the results. */
6895
6896 static rtx
6897 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6898 {
6899 rtx val, mem;
6900 enum memmodel model;
6901
6902 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6903
6904 if (!flag_inline_atomics)
6905 return NULL_RTX;
6906
6907 /* Expand the operands. */
6908 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6909 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6910
6911 return expand_atomic_exchange (target, mem, val, model);
6912 }
6913
6914 /* Expand the __atomic_compare_exchange intrinsic:
6915 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6916 TYPE desired, BOOL weak,
6917 enum memmodel success,
6918 enum memmodel failure)
6919 EXP is the CALL_EXPR.
6920 TARGET is an optional place for us to store the results. */
6921
6922 static rtx
6923 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6924 rtx target)
6925 {
6926 rtx expect, desired, mem, oldval;
6927 rtx_code_label *label;
6928 enum memmodel success, failure;
6929 tree weak;
6930 bool is_weak;
6931 location_t loc
6932 = expansion_point_location_if_in_system_header (input_location);
6933
6934 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6935 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6936
6937 if (failure > success)
6938 {
6939 warning_at (loc, OPT_Winvalid_memory_model,
6940 "failure memory model cannot be stronger than success "
6941 "memory model for %<__atomic_compare_exchange%>");
6942 success = MEMMODEL_SEQ_CST;
6943 }
6944
6945 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6946 {
6947 warning_at (loc, OPT_Winvalid_memory_model,
6948 "invalid failure memory model for "
6949 "%<__atomic_compare_exchange%>");
6950 failure = MEMMODEL_SEQ_CST;
6951 success = MEMMODEL_SEQ_CST;
6952 }
6953
6954
6955 if (!flag_inline_atomics)
6956 return NULL_RTX;
6957
6958 /* Expand the operands. */
6959 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6960
6961 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6962 expect = convert_memory_address (Pmode, expect);
6963 expect = gen_rtx_MEM (mode, expect);
6964 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6965
6966 weak = CALL_EXPR_ARG (exp, 3);
6967 is_weak = false;
6968 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6969 is_weak = true;
6970
6971 if (target == const0_rtx)
6972 target = NULL;
6973
6974 /* Lest the rtl backend create a race condition with an imporoper store
6975 to memory, always create a new pseudo for OLDVAL. */
6976 oldval = NULL;
6977
6978 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6979 is_weak, success, failure))
6980 return NULL_RTX;
6981
6982 /* Conditionally store back to EXPECT, lest we create a race condition
6983 with an improper store to memory. */
6984 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6985 the normal case where EXPECT is totally private, i.e. a register. At
6986 which point the store can be unconditional. */
6987 label = gen_label_rtx ();
6988 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6989 GET_MODE (target), 1, label);
6990 emit_move_insn (expect, oldval);
6991 emit_label (label);
6992
6993 return target;
6994 }
6995
6996 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6997 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6998 call. The weak parameter must be dropped to match the expected parameter
6999 list and the expected argument changed from value to pointer to memory
7000 slot. */
7001
7002 static void
7003 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
7004 {
7005 unsigned int z;
7006 vec<tree, va_gc> *vec;
7007
7008 vec_alloc (vec, 5);
7009 vec->quick_push (gimple_call_arg (call, 0));
7010 tree expected = gimple_call_arg (call, 1);
7011 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
7012 TREE_TYPE (expected));
7013 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
7014 if (expd != x)
7015 emit_move_insn (x, expd);
7016 tree v = make_tree (TREE_TYPE (expected), x);
7017 vec->quick_push (build1 (ADDR_EXPR,
7018 build_pointer_type (TREE_TYPE (expected)), v));
7019 vec->quick_push (gimple_call_arg (call, 2));
7020 /* Skip the boolean weak parameter. */
7021 for (z = 4; z < 6; z++)
7022 vec->quick_push (gimple_call_arg (call, z));
7023 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
7024 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
7025 gcc_assert (bytes_log2 < 5);
7026 built_in_function fncode
7027 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
7028 + bytes_log2);
7029 tree fndecl = builtin_decl_explicit (fncode);
7030 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
7031 fndecl);
7032 tree exp = build_call_vec (boolean_type_node, fn, vec);
7033 tree lhs = gimple_call_lhs (call);
7034 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
7035 if (lhs)
7036 {
7037 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7038 if (GET_MODE (boolret) != mode)
7039 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7040 x = force_reg (mode, x);
7041 write_complex_part (target, boolret, true);
7042 write_complex_part (target, x, false);
7043 }
7044 }
7045
7046 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
7047
7048 void
7049 expand_ifn_atomic_compare_exchange (gcall *call)
7050 {
7051 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
7052 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
7053 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
7054 rtx expect, desired, mem, oldval, boolret;
7055 enum memmodel success, failure;
7056 tree lhs;
7057 bool is_weak;
7058 location_t loc
7059 = expansion_point_location_if_in_system_header (gimple_location (call));
7060
7061 success = get_memmodel (gimple_call_arg (call, 4));
7062 failure = get_memmodel (gimple_call_arg (call, 5));
7063
7064 if (failure > success)
7065 {
7066 warning_at (loc, OPT_Winvalid_memory_model,
7067 "failure memory model cannot be stronger than success "
7068 "memory model for %<__atomic_compare_exchange%>");
7069 success = MEMMODEL_SEQ_CST;
7070 }
7071
7072 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7073 {
7074 warning_at (loc, OPT_Winvalid_memory_model,
7075 "invalid failure memory model for "
7076 "%<__atomic_compare_exchange%>");
7077 failure = MEMMODEL_SEQ_CST;
7078 success = MEMMODEL_SEQ_CST;
7079 }
7080
7081 if (!flag_inline_atomics)
7082 {
7083 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7084 return;
7085 }
7086
7087 /* Expand the operands. */
7088 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
7089
7090 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
7091 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
7092
7093 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
7094
7095 boolret = NULL;
7096 oldval = NULL;
7097
7098 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
7099 is_weak, success, failure))
7100 {
7101 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7102 return;
7103 }
7104
7105 lhs = gimple_call_lhs (call);
7106 if (lhs)
7107 {
7108 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7109 if (GET_MODE (boolret) != mode)
7110 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7111 write_complex_part (target, boolret, true);
7112 write_complex_part (target, oldval, false);
7113 }
7114 }
7115
7116 /* Expand the __atomic_load intrinsic:
7117 TYPE __atomic_load (TYPE *object, enum memmodel)
7118 EXP is the CALL_EXPR.
7119 TARGET is an optional place for us to store the results. */
7120
7121 static rtx
7122 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
7123 {
7124 rtx mem;
7125 enum memmodel model;
7126
7127 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7128 if (is_mm_release (model) || is_mm_acq_rel (model))
7129 {
7130 location_t loc
7131 = expansion_point_location_if_in_system_header (input_location);
7132 warning_at (loc, OPT_Winvalid_memory_model,
7133 "invalid memory model for %<__atomic_load%>");
7134 model = MEMMODEL_SEQ_CST;
7135 }
7136
7137 if (!flag_inline_atomics)
7138 return NULL_RTX;
7139
7140 /* Expand the operand. */
7141 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7142
7143 return expand_atomic_load (target, mem, model);
7144 }
7145
7146
7147 /* Expand the __atomic_store intrinsic:
7148 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
7149 EXP is the CALL_EXPR.
7150 TARGET is an optional place for us to store the results. */
7151
7152 static rtx
7153 expand_builtin_atomic_store (machine_mode mode, tree exp)
7154 {
7155 rtx mem, val;
7156 enum memmodel model;
7157
7158 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7159 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
7160 || is_mm_release (model)))
7161 {
7162 location_t loc
7163 = expansion_point_location_if_in_system_header (input_location);
7164 warning_at (loc, OPT_Winvalid_memory_model,
7165 "invalid memory model for %<__atomic_store%>");
7166 model = MEMMODEL_SEQ_CST;
7167 }
7168
7169 if (!flag_inline_atomics)
7170 return NULL_RTX;
7171
7172 /* Expand the operands. */
7173 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7174 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7175
7176 return expand_atomic_store (mem, val, model, false);
7177 }
7178
7179 /* Expand the __atomic_fetch_XXX intrinsic:
7180 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
7181 EXP is the CALL_EXPR.
7182 TARGET is an optional place for us to store the results.
7183 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
7184 FETCH_AFTER is true if returning the result of the operation.
7185 FETCH_AFTER is false if returning the value before the operation.
7186 IGNORE is true if the result is not used.
7187 EXT_CALL is the correct builtin for an external call if this cannot be
7188 resolved to an instruction sequence. */
7189
7190 static rtx
7191 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
7192 enum rtx_code code, bool fetch_after,
7193 bool ignore, enum built_in_function ext_call)
7194 {
7195 rtx val, mem, ret;
7196 enum memmodel model;
7197 tree fndecl;
7198 tree addr;
7199
7200 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7201
7202 /* Expand the operands. */
7203 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7204 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7205
7206 /* Only try generating instructions if inlining is turned on. */
7207 if (flag_inline_atomics)
7208 {
7209 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7210 if (ret)
7211 return ret;
7212 }
7213
7214 /* Return if a different routine isn't needed for the library call. */
7215 if (ext_call == BUILT_IN_NONE)
7216 return NULL_RTX;
7217
7218 /* Change the call to the specified function. */
7219 fndecl = get_callee_fndecl (exp);
7220 addr = CALL_EXPR_FN (exp);
7221 STRIP_NOPS (addr);
7222
7223 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
7224 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
7225
7226 /* If we will emit code after the call, the call cannot be a tail call.
7227 If it is emitted as a tail call, a barrier is emitted after it, and
7228 then all trailing code is removed. */
7229 if (!ignore)
7230 CALL_EXPR_TAILCALL (exp) = 0;
7231
7232 /* Expand the call here so we can emit trailing code. */
7233 ret = expand_call (exp, target, ignore);
7234
7235 /* Replace the original function just in case it matters. */
7236 TREE_OPERAND (addr, 0) = fndecl;
7237
7238 /* Then issue the arithmetic correction to return the right result. */
7239 if (!ignore)
7240 {
7241 if (code == NOT)
7242 {
7243 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7244 OPTAB_LIB_WIDEN);
7245 ret = expand_simple_unop (mode, NOT, ret, target, true);
7246 }
7247 else
7248 ret = expand_simple_binop (mode, code, ret, val, target, true,
7249 OPTAB_LIB_WIDEN);
7250 }
7251 return ret;
7252 }
7253
7254 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7255
7256 void
7257 expand_ifn_atomic_bit_test_and (gcall *call)
7258 {
7259 tree ptr = gimple_call_arg (call, 0);
7260 tree bit = gimple_call_arg (call, 1);
7261 tree flag = gimple_call_arg (call, 2);
7262 tree lhs = gimple_call_lhs (call);
7263 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7264 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7265 enum rtx_code code;
7266 optab optab;
7267 class expand_operand ops[5];
7268
7269 gcc_assert (flag_inline_atomics);
7270
7271 if (gimple_call_num_args (call) == 4)
7272 model = get_memmodel (gimple_call_arg (call, 3));
7273
7274 rtx mem = get_builtin_sync_mem (ptr, mode);
7275 rtx val = expand_expr_force_mode (bit, mode);
7276
7277 switch (gimple_call_internal_fn (call))
7278 {
7279 case IFN_ATOMIC_BIT_TEST_AND_SET:
7280 code = IOR;
7281 optab = atomic_bit_test_and_set_optab;
7282 break;
7283 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7284 code = XOR;
7285 optab = atomic_bit_test_and_complement_optab;
7286 break;
7287 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7288 code = AND;
7289 optab = atomic_bit_test_and_reset_optab;
7290 break;
7291 default:
7292 gcc_unreachable ();
7293 }
7294
7295 if (lhs == NULL_TREE)
7296 {
7297 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7298 val, NULL_RTX, true, OPTAB_DIRECT);
7299 if (code == AND)
7300 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7301 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7302 return;
7303 }
7304
7305 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7306 enum insn_code icode = direct_optab_handler (optab, mode);
7307 gcc_assert (icode != CODE_FOR_nothing);
7308 create_output_operand (&ops[0], target, mode);
7309 create_fixed_operand (&ops[1], mem);
7310 create_convert_operand_to (&ops[2], val, mode, true);
7311 create_integer_operand (&ops[3], model);
7312 create_integer_operand (&ops[4], integer_onep (flag));
7313 if (maybe_expand_insn (icode, 5, ops))
7314 return;
7315
7316 rtx bitval = val;
7317 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7318 val, NULL_RTX, true, OPTAB_DIRECT);
7319 rtx maskval = val;
7320 if (code == AND)
7321 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7322 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7323 code, model, false);
7324 if (integer_onep (flag))
7325 {
7326 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7327 NULL_RTX, true, OPTAB_DIRECT);
7328 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7329 true, OPTAB_DIRECT);
7330 }
7331 else
7332 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7333 OPTAB_DIRECT);
7334 if (result != target)
7335 emit_move_insn (target, result);
7336 }
7337
7338 /* Expand an atomic clear operation.
7339 void _atomic_clear (BOOL *obj, enum memmodel)
7340 EXP is the call expression. */
7341
7342 static rtx
7343 expand_builtin_atomic_clear (tree exp)
7344 {
7345 machine_mode mode;
7346 rtx mem, ret;
7347 enum memmodel model;
7348
7349 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7350 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7351 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7352
7353 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7354 {
7355 location_t loc
7356 = expansion_point_location_if_in_system_header (input_location);
7357 warning_at (loc, OPT_Winvalid_memory_model,
7358 "invalid memory model for %<__atomic_store%>");
7359 model = MEMMODEL_SEQ_CST;
7360 }
7361
7362 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7363 Failing that, a store is issued by __atomic_store. The only way this can
7364 fail is if the bool type is larger than a word size. Unlikely, but
7365 handle it anyway for completeness. Assume a single threaded model since
7366 there is no atomic support in this case, and no barriers are required. */
7367 ret = expand_atomic_store (mem, const0_rtx, model, true);
7368 if (!ret)
7369 emit_move_insn (mem, const0_rtx);
7370 return const0_rtx;
7371 }
7372
7373 /* Expand an atomic test_and_set operation.
7374 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7375 EXP is the call expression. */
7376
7377 static rtx
7378 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7379 {
7380 rtx mem;
7381 enum memmodel model;
7382 machine_mode mode;
7383
7384 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7385 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7386 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7387
7388 return expand_atomic_test_and_set (target, mem, model);
7389 }
7390
7391
7392 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7393 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7394
7395 static tree
7396 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7397 {
7398 int size;
7399 machine_mode mode;
7400 unsigned int mode_align, type_align;
7401
7402 if (TREE_CODE (arg0) != INTEGER_CST)
7403 return NULL_TREE;
7404
7405 /* We need a corresponding integer mode for the access to be lock-free. */
7406 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7407 if (!int_mode_for_size (size, 0).exists (&mode))
7408 return boolean_false_node;
7409
7410 mode_align = GET_MODE_ALIGNMENT (mode);
7411
7412 if (TREE_CODE (arg1) == INTEGER_CST)
7413 {
7414 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7415
7416 /* Either this argument is null, or it's a fake pointer encoding
7417 the alignment of the object. */
7418 val = least_bit_hwi (val);
7419 val *= BITS_PER_UNIT;
7420
7421 if (val == 0 || mode_align < val)
7422 type_align = mode_align;
7423 else
7424 type_align = val;
7425 }
7426 else
7427 {
7428 tree ttype = TREE_TYPE (arg1);
7429
7430 /* This function is usually invoked and folded immediately by the front
7431 end before anything else has a chance to look at it. The pointer
7432 parameter at this point is usually cast to a void *, so check for that
7433 and look past the cast. */
7434 if (CONVERT_EXPR_P (arg1)
7435 && POINTER_TYPE_P (ttype)
7436 && VOID_TYPE_P (TREE_TYPE (ttype))
7437 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7438 arg1 = TREE_OPERAND (arg1, 0);
7439
7440 ttype = TREE_TYPE (arg1);
7441 gcc_assert (POINTER_TYPE_P (ttype));
7442
7443 /* Get the underlying type of the object. */
7444 ttype = TREE_TYPE (ttype);
7445 type_align = TYPE_ALIGN (ttype);
7446 }
7447
7448 /* If the object has smaller alignment, the lock free routines cannot
7449 be used. */
7450 if (type_align < mode_align)
7451 return boolean_false_node;
7452
7453 /* Check if a compare_and_swap pattern exists for the mode which represents
7454 the required size. The pattern is not allowed to fail, so the existence
7455 of the pattern indicates support is present. Also require that an
7456 atomic load exists for the required size. */
7457 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7458 return boolean_true_node;
7459 else
7460 return boolean_false_node;
7461 }
7462
7463 /* Return true if the parameters to call EXP represent an object which will
7464 always generate lock free instructions. The first argument represents the
7465 size of the object, and the second parameter is a pointer to the object
7466 itself. If NULL is passed for the object, then the result is based on
7467 typical alignment for an object of the specified size. Otherwise return
7468 false. */
7469
7470 static rtx
7471 expand_builtin_atomic_always_lock_free (tree exp)
7472 {
7473 tree size;
7474 tree arg0 = CALL_EXPR_ARG (exp, 0);
7475 tree arg1 = CALL_EXPR_ARG (exp, 1);
7476
7477 if (TREE_CODE (arg0) != INTEGER_CST)
7478 {
7479 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7480 return const0_rtx;
7481 }
7482
7483 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7484 if (size == boolean_true_node)
7485 return const1_rtx;
7486 return const0_rtx;
7487 }
7488
7489 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7490 is lock free on this architecture. */
7491
7492 static tree
7493 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7494 {
7495 if (!flag_inline_atomics)
7496 return NULL_TREE;
7497
7498 /* If it isn't always lock free, don't generate a result. */
7499 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7500 return boolean_true_node;
7501
7502 return NULL_TREE;
7503 }
7504
7505 /* Return true if the parameters to call EXP represent an object which will
7506 always generate lock free instructions. The first argument represents the
7507 size of the object, and the second parameter is a pointer to the object
7508 itself. If NULL is passed for the object, then the result is based on
7509 typical alignment for an object of the specified size. Otherwise return
7510 NULL*/
7511
7512 static rtx
7513 expand_builtin_atomic_is_lock_free (tree exp)
7514 {
7515 tree size;
7516 tree arg0 = CALL_EXPR_ARG (exp, 0);
7517 tree arg1 = CALL_EXPR_ARG (exp, 1);
7518
7519 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7520 {
7521 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7522 return NULL_RTX;
7523 }
7524
7525 if (!flag_inline_atomics)
7526 return NULL_RTX;
7527
7528 /* If the value is known at compile time, return the RTX for it. */
7529 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7530 if (size == boolean_true_node)
7531 return const1_rtx;
7532
7533 return NULL_RTX;
7534 }
7535
7536 /* Expand the __atomic_thread_fence intrinsic:
7537 void __atomic_thread_fence (enum memmodel)
7538 EXP is the CALL_EXPR. */
7539
7540 static void
7541 expand_builtin_atomic_thread_fence (tree exp)
7542 {
7543 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7544 expand_mem_thread_fence (model);
7545 }
7546
7547 /* Expand the __atomic_signal_fence intrinsic:
7548 void __atomic_signal_fence (enum memmodel)
7549 EXP is the CALL_EXPR. */
7550
7551 static void
7552 expand_builtin_atomic_signal_fence (tree exp)
7553 {
7554 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7555 expand_mem_signal_fence (model);
7556 }
7557
7558 /* Expand the __sync_synchronize intrinsic. */
7559
7560 static void
7561 expand_builtin_sync_synchronize (void)
7562 {
7563 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7564 }
7565
7566 static rtx
7567 expand_builtin_thread_pointer (tree exp, rtx target)
7568 {
7569 enum insn_code icode;
7570 if (!validate_arglist (exp, VOID_TYPE))
7571 return const0_rtx;
7572 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7573 if (icode != CODE_FOR_nothing)
7574 {
7575 class expand_operand op;
7576 /* If the target is not sutitable then create a new target. */
7577 if (target == NULL_RTX
7578 || !REG_P (target)
7579 || GET_MODE (target) != Pmode)
7580 target = gen_reg_rtx (Pmode);
7581 create_output_operand (&op, target, Pmode);
7582 expand_insn (icode, 1, &op);
7583 return target;
7584 }
7585 error ("%<__builtin_thread_pointer%> is not supported on this target");
7586 return const0_rtx;
7587 }
7588
7589 static void
7590 expand_builtin_set_thread_pointer (tree exp)
7591 {
7592 enum insn_code icode;
7593 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7594 return;
7595 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7596 if (icode != CODE_FOR_nothing)
7597 {
7598 class expand_operand op;
7599 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7600 Pmode, EXPAND_NORMAL);
7601 create_input_operand (&op, val, Pmode);
7602 expand_insn (icode, 1, &op);
7603 return;
7604 }
7605 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7606 }
7607
7608 \f
7609 /* Emit code to restore the current value of stack. */
7610
7611 static void
7612 expand_stack_restore (tree var)
7613 {
7614 rtx_insn *prev;
7615 rtx sa = expand_normal (var);
7616
7617 sa = convert_memory_address (Pmode, sa);
7618
7619 prev = get_last_insn ();
7620 emit_stack_restore (SAVE_BLOCK, sa);
7621
7622 record_new_stack_level ();
7623
7624 fixup_args_size_notes (prev, get_last_insn (), 0);
7625 }
7626
7627 /* Emit code to save the current value of stack. */
7628
7629 static rtx
7630 expand_stack_save (void)
7631 {
7632 rtx ret = NULL_RTX;
7633
7634 emit_stack_save (SAVE_BLOCK, &ret);
7635 return ret;
7636 }
7637
7638 /* Emit code to get the openacc gang, worker or vector id or size. */
7639
7640 static rtx
7641 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7642 {
7643 const char *name;
7644 rtx fallback_retval;
7645 rtx_insn *(*gen_fn) (rtx, rtx);
7646 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7647 {
7648 case BUILT_IN_GOACC_PARLEVEL_ID:
7649 name = "__builtin_goacc_parlevel_id";
7650 fallback_retval = const0_rtx;
7651 gen_fn = targetm.gen_oacc_dim_pos;
7652 break;
7653 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7654 name = "__builtin_goacc_parlevel_size";
7655 fallback_retval = const1_rtx;
7656 gen_fn = targetm.gen_oacc_dim_size;
7657 break;
7658 default:
7659 gcc_unreachable ();
7660 }
7661
7662 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7663 {
7664 error ("%qs only supported in OpenACC code", name);
7665 return const0_rtx;
7666 }
7667
7668 tree arg = CALL_EXPR_ARG (exp, 0);
7669 if (TREE_CODE (arg) != INTEGER_CST)
7670 {
7671 error ("non-constant argument 0 to %qs", name);
7672 return const0_rtx;
7673 }
7674
7675 int dim = TREE_INT_CST_LOW (arg);
7676 switch (dim)
7677 {
7678 case GOMP_DIM_GANG:
7679 case GOMP_DIM_WORKER:
7680 case GOMP_DIM_VECTOR:
7681 break;
7682 default:
7683 error ("illegal argument 0 to %qs", name);
7684 return const0_rtx;
7685 }
7686
7687 if (ignore)
7688 return target;
7689
7690 if (target == NULL_RTX)
7691 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7692
7693 if (!targetm.have_oacc_dim_size ())
7694 {
7695 emit_move_insn (target, fallback_retval);
7696 return target;
7697 }
7698
7699 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7700 emit_insn (gen_fn (reg, GEN_INT (dim)));
7701 if (reg != target)
7702 emit_move_insn (target, reg);
7703
7704 return target;
7705 }
7706
7707 /* Expand a string compare operation using a sequence of char comparison
7708 to get rid of the calling overhead, with result going to TARGET if
7709 that's convenient.
7710
7711 VAR_STR is the variable string source;
7712 CONST_STR is the constant string source;
7713 LENGTH is the number of chars to compare;
7714 CONST_STR_N indicates which source string is the constant string;
7715 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7716
7717 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7718
7719 target = (int) (unsigned char) var_str[0]
7720 - (int) (unsigned char) const_str[0];
7721 if (target != 0)
7722 goto ne_label;
7723 ...
7724 target = (int) (unsigned char) var_str[length - 2]
7725 - (int) (unsigned char) const_str[length - 2];
7726 if (target != 0)
7727 goto ne_label;
7728 target = (int) (unsigned char) var_str[length - 1]
7729 - (int) (unsigned char) const_str[length - 1];
7730 ne_label:
7731 */
7732
7733 static rtx
7734 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7735 unsigned HOST_WIDE_INT length,
7736 int const_str_n, machine_mode mode)
7737 {
7738 HOST_WIDE_INT offset = 0;
7739 rtx var_rtx_array
7740 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7741 rtx var_rtx = NULL_RTX;
7742 rtx const_rtx = NULL_RTX;
7743 rtx result = target ? target : gen_reg_rtx (mode);
7744 rtx_code_label *ne_label = gen_label_rtx ();
7745 tree unit_type_node = unsigned_char_type_node;
7746 scalar_int_mode unit_mode
7747 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7748
7749 start_sequence ();
7750
7751 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7752 {
7753 var_rtx
7754 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7755 const_rtx = c_readstr (const_str + offset, unit_mode);
7756 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7757 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7758
7759 op0 = convert_modes (mode, unit_mode, op0, 1);
7760 op1 = convert_modes (mode, unit_mode, op1, 1);
7761 result = expand_simple_binop (mode, MINUS, op0, op1,
7762 result, 1, OPTAB_WIDEN);
7763 if (i < length - 1)
7764 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7765 mode, true, ne_label);
7766 offset += GET_MODE_SIZE (unit_mode);
7767 }
7768
7769 emit_label (ne_label);
7770 rtx_insn *insns = get_insns ();
7771 end_sequence ();
7772 emit_insn (insns);
7773
7774 return result;
7775 }
7776
7777 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7778 to TARGET if that's convenient.
7779 If the call is not been inlined, return NULL_RTX. */
7780
7781 static rtx
7782 inline_expand_builtin_bytecmp (tree exp, rtx target)
7783 {
7784 tree fndecl = get_callee_fndecl (exp);
7785 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7786 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7787
7788 /* Do NOT apply this inlining expansion when optimizing for size or
7789 optimization level below 2. */
7790 if (optimize < 2 || optimize_insn_for_size_p ())
7791 return NULL_RTX;
7792
7793 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7794 || fcode == BUILT_IN_STRNCMP
7795 || fcode == BUILT_IN_MEMCMP);
7796
7797 /* On a target where the type of the call (int) has same or narrower presicion
7798 than unsigned char, give up the inlining expansion. */
7799 if (TYPE_PRECISION (unsigned_char_type_node)
7800 >= TYPE_PRECISION (TREE_TYPE (exp)))
7801 return NULL_RTX;
7802
7803 tree arg1 = CALL_EXPR_ARG (exp, 0);
7804 tree arg2 = CALL_EXPR_ARG (exp, 1);
7805 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7806
7807 unsigned HOST_WIDE_INT len1 = 0;
7808 unsigned HOST_WIDE_INT len2 = 0;
7809 unsigned HOST_WIDE_INT len3 = 0;
7810
7811 /* Get the object representation of the initializers of ARG1 and ARG2
7812 as strings, provided they refer to constant objects, with their byte
7813 sizes in LEN1 and LEN2, respectively. */
7814 const char *bytes1 = c_getstr (arg1, &len1);
7815 const char *bytes2 = c_getstr (arg2, &len2);
7816
7817 /* Fail if neither argument refers to an initialized constant. */
7818 if (!bytes1 && !bytes2)
7819 return NULL_RTX;
7820
7821 if (is_ncmp)
7822 {
7823 /* Fail if the memcmp/strncmp bound is not a constant. */
7824 if (!tree_fits_uhwi_p (len3_tree))
7825 return NULL_RTX;
7826
7827 len3 = tree_to_uhwi (len3_tree);
7828
7829 if (fcode == BUILT_IN_MEMCMP)
7830 {
7831 /* Fail if the memcmp bound is greater than the size of either
7832 of the two constant objects. */
7833 if ((bytes1 && len1 < len3)
7834 || (bytes2 && len2 < len3))
7835 return NULL_RTX;
7836 }
7837 }
7838
7839 if (fcode != BUILT_IN_MEMCMP)
7840 {
7841 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7842 and LEN2 to the length of the nul-terminated string stored
7843 in each. */
7844 if (bytes1 != NULL)
7845 len1 = strnlen (bytes1, len1) + 1;
7846 if (bytes2 != NULL)
7847 len2 = strnlen (bytes2, len2) + 1;
7848 }
7849
7850 /* See inline_string_cmp. */
7851 int const_str_n;
7852 if (!len1)
7853 const_str_n = 2;
7854 else if (!len2)
7855 const_str_n = 1;
7856 else if (len2 > len1)
7857 const_str_n = 1;
7858 else
7859 const_str_n = 2;
7860
7861 /* For strncmp only, compute the new bound as the smallest of
7862 the lengths of the two strings (plus 1) and the bound provided
7863 to the function. */
7864 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7865 if (is_ncmp && len3 < bound)
7866 bound = len3;
7867
7868 /* If the bound of the comparison is larger than the threshold,
7869 do nothing. */
7870 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7871 return NULL_RTX;
7872
7873 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7874
7875 /* Now, start inline expansion the call. */
7876 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7877 (const_str_n == 1) ? bytes1 : bytes2, bound,
7878 const_str_n, mode);
7879 }
7880
7881 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7882 represents the size of the first argument to that call, or VOIDmode
7883 if the argument is a pointer. IGNORE will be true if the result
7884 isn't used. */
7885 static rtx
7886 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7887 bool ignore)
7888 {
7889 rtx val, failsafe;
7890 unsigned nargs = call_expr_nargs (exp);
7891
7892 tree arg0 = CALL_EXPR_ARG (exp, 0);
7893
7894 if (mode == VOIDmode)
7895 {
7896 mode = TYPE_MODE (TREE_TYPE (arg0));
7897 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7898 }
7899
7900 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7901
7902 /* An optional second argument can be used as a failsafe value on
7903 some machines. If it isn't present, then the failsafe value is
7904 assumed to be 0. */
7905 if (nargs > 1)
7906 {
7907 tree arg1 = CALL_EXPR_ARG (exp, 1);
7908 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7909 }
7910 else
7911 failsafe = const0_rtx;
7912
7913 /* If the result isn't used, the behavior is undefined. It would be
7914 nice to emit a warning here, but path splitting means this might
7915 happen with legitimate code. So simply drop the builtin
7916 expansion in that case; we've handled any side-effects above. */
7917 if (ignore)
7918 return const0_rtx;
7919
7920 /* If we don't have a suitable target, create one to hold the result. */
7921 if (target == NULL || GET_MODE (target) != mode)
7922 target = gen_reg_rtx (mode);
7923
7924 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7925 val = convert_modes (mode, VOIDmode, val, false);
7926
7927 return targetm.speculation_safe_value (mode, target, val, failsafe);
7928 }
7929
7930 /* Expand an expression EXP that calls a built-in function,
7931 with result going to TARGET if that's convenient
7932 (and in mode MODE if that's convenient).
7933 SUBTARGET may be used as the target for computing one of EXP's operands.
7934 IGNORE is nonzero if the value is to be ignored. */
7935
7936 rtx
7937 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7938 int ignore)
7939 {
7940 tree fndecl = get_callee_fndecl (exp);
7941 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7942 int flags;
7943
7944 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7945 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7946
7947 /* When ASan is enabled, we don't want to expand some memory/string
7948 builtins and rely on libsanitizer's hooks. This allows us to avoid
7949 redundant checks and be sure, that possible overflow will be detected
7950 by ASan. */
7951
7952 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7953 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7954 return expand_call (exp, target, ignore);
7955
7956 /* When not optimizing, generate calls to library functions for a certain
7957 set of builtins. */
7958 if (!optimize
7959 && !called_as_built_in (fndecl)
7960 && fcode != BUILT_IN_FORK
7961 && fcode != BUILT_IN_EXECL
7962 && fcode != BUILT_IN_EXECV
7963 && fcode != BUILT_IN_EXECLP
7964 && fcode != BUILT_IN_EXECLE
7965 && fcode != BUILT_IN_EXECVP
7966 && fcode != BUILT_IN_EXECVE
7967 && !ALLOCA_FUNCTION_CODE_P (fcode)
7968 && fcode != BUILT_IN_FREE)
7969 return expand_call (exp, target, ignore);
7970
7971 /* The built-in function expanders test for target == const0_rtx
7972 to determine whether the function's result will be ignored. */
7973 if (ignore)
7974 target = const0_rtx;
7975
7976 /* If the result of a pure or const built-in function is ignored, and
7977 none of its arguments are volatile, we can avoid expanding the
7978 built-in call and just evaluate the arguments for side-effects. */
7979 if (target == const0_rtx
7980 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7981 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7982 {
7983 bool volatilep = false;
7984 tree arg;
7985 call_expr_arg_iterator iter;
7986
7987 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7988 if (TREE_THIS_VOLATILE (arg))
7989 {
7990 volatilep = true;
7991 break;
7992 }
7993
7994 if (! volatilep)
7995 {
7996 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7997 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7998 return const0_rtx;
7999 }
8000 }
8001
8002 switch (fcode)
8003 {
8004 CASE_FLT_FN (BUILT_IN_FABS):
8005 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8006 case BUILT_IN_FABSD32:
8007 case BUILT_IN_FABSD64:
8008 case BUILT_IN_FABSD128:
8009 target = expand_builtin_fabs (exp, target, subtarget);
8010 if (target)
8011 return target;
8012 break;
8013
8014 CASE_FLT_FN (BUILT_IN_COPYSIGN):
8015 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
8016 target = expand_builtin_copysign (exp, target, subtarget);
8017 if (target)
8018 return target;
8019 break;
8020
8021 /* Just do a normal library call if we were unable to fold
8022 the values. */
8023 CASE_FLT_FN (BUILT_IN_CABS):
8024 break;
8025
8026 CASE_FLT_FN (BUILT_IN_FMA):
8027 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
8028 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
8029 if (target)
8030 return target;
8031 break;
8032
8033 CASE_FLT_FN (BUILT_IN_ILOGB):
8034 if (! flag_unsafe_math_optimizations)
8035 break;
8036 gcc_fallthrough ();
8037 CASE_FLT_FN (BUILT_IN_ISINF):
8038 CASE_FLT_FN (BUILT_IN_FINITE):
8039 case BUILT_IN_ISFINITE:
8040 case BUILT_IN_ISNORMAL:
8041 target = expand_builtin_interclass_mathfn (exp, target);
8042 if (target)
8043 return target;
8044 break;
8045
8046 CASE_FLT_FN (BUILT_IN_ICEIL):
8047 CASE_FLT_FN (BUILT_IN_LCEIL):
8048 CASE_FLT_FN (BUILT_IN_LLCEIL):
8049 CASE_FLT_FN (BUILT_IN_LFLOOR):
8050 CASE_FLT_FN (BUILT_IN_IFLOOR):
8051 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8052 target = expand_builtin_int_roundingfn (exp, target);
8053 if (target)
8054 return target;
8055 break;
8056
8057 CASE_FLT_FN (BUILT_IN_IRINT):
8058 CASE_FLT_FN (BUILT_IN_LRINT):
8059 CASE_FLT_FN (BUILT_IN_LLRINT):
8060 CASE_FLT_FN (BUILT_IN_IROUND):
8061 CASE_FLT_FN (BUILT_IN_LROUND):
8062 CASE_FLT_FN (BUILT_IN_LLROUND):
8063 target = expand_builtin_int_roundingfn_2 (exp, target);
8064 if (target)
8065 return target;
8066 break;
8067
8068 CASE_FLT_FN (BUILT_IN_POWI):
8069 target = expand_builtin_powi (exp, target);
8070 if (target)
8071 return target;
8072 break;
8073
8074 CASE_FLT_FN (BUILT_IN_CEXPI):
8075 target = expand_builtin_cexpi (exp, target);
8076 gcc_assert (target);
8077 return target;
8078
8079 CASE_FLT_FN (BUILT_IN_SIN):
8080 CASE_FLT_FN (BUILT_IN_COS):
8081 if (! flag_unsafe_math_optimizations)
8082 break;
8083 target = expand_builtin_mathfn_3 (exp, target, subtarget);
8084 if (target)
8085 return target;
8086 break;
8087
8088 CASE_FLT_FN (BUILT_IN_SINCOS):
8089 if (! flag_unsafe_math_optimizations)
8090 break;
8091 target = expand_builtin_sincos (exp);
8092 if (target)
8093 return target;
8094 break;
8095
8096 case BUILT_IN_APPLY_ARGS:
8097 return expand_builtin_apply_args ();
8098
8099 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8100 FUNCTION with a copy of the parameters described by
8101 ARGUMENTS, and ARGSIZE. It returns a block of memory
8102 allocated on the stack into which is stored all the registers
8103 that might possibly be used for returning the result of a
8104 function. ARGUMENTS is the value returned by
8105 __builtin_apply_args. ARGSIZE is the number of bytes of
8106 arguments that must be copied. ??? How should this value be
8107 computed? We'll also need a safe worst case value for varargs
8108 functions. */
8109 case BUILT_IN_APPLY:
8110 if (!validate_arglist (exp, POINTER_TYPE,
8111 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8112 && !validate_arglist (exp, REFERENCE_TYPE,
8113 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8114 return const0_rtx;
8115 else
8116 {
8117 rtx ops[3];
8118
8119 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8120 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8121 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8122
8123 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8124 }
8125
8126 /* __builtin_return (RESULT) causes the function to return the
8127 value described by RESULT. RESULT is address of the block of
8128 memory returned by __builtin_apply. */
8129 case BUILT_IN_RETURN:
8130 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8131 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8132 return const0_rtx;
8133
8134 case BUILT_IN_SAVEREGS:
8135 return expand_builtin_saveregs ();
8136
8137 case BUILT_IN_VA_ARG_PACK:
8138 /* All valid uses of __builtin_va_arg_pack () are removed during
8139 inlining. */
8140 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8141 return const0_rtx;
8142
8143 case BUILT_IN_VA_ARG_PACK_LEN:
8144 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8145 inlining. */
8146 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
8147 return const0_rtx;
8148
8149 /* Return the address of the first anonymous stack arg. */
8150 case BUILT_IN_NEXT_ARG:
8151 if (fold_builtin_next_arg (exp, false))
8152 return const0_rtx;
8153 return expand_builtin_next_arg ();
8154
8155 case BUILT_IN_CLEAR_CACHE:
8156 target = expand_builtin___clear_cache (exp);
8157 if (target)
8158 return target;
8159 break;
8160
8161 case BUILT_IN_CLASSIFY_TYPE:
8162 return expand_builtin_classify_type (exp);
8163
8164 case BUILT_IN_CONSTANT_P:
8165 return const0_rtx;
8166
8167 case BUILT_IN_FRAME_ADDRESS:
8168 case BUILT_IN_RETURN_ADDRESS:
8169 return expand_builtin_frame_address (fndecl, exp);
8170
8171 /* Returns the address of the area where the structure is returned.
8172 0 otherwise. */
8173 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8174 if (call_expr_nargs (exp) != 0
8175 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8176 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8177 return const0_rtx;
8178 else
8179 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8180
8181 CASE_BUILT_IN_ALLOCA:
8182 target = expand_builtin_alloca (exp);
8183 if (target)
8184 return target;
8185 break;
8186
8187 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8188 return expand_asan_emit_allocas_unpoison (exp);
8189
8190 case BUILT_IN_STACK_SAVE:
8191 return expand_stack_save ();
8192
8193 case BUILT_IN_STACK_RESTORE:
8194 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8195 return const0_rtx;
8196
8197 case BUILT_IN_BSWAP16:
8198 case BUILT_IN_BSWAP32:
8199 case BUILT_IN_BSWAP64:
8200 case BUILT_IN_BSWAP128:
8201 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8202 if (target)
8203 return target;
8204 break;
8205
8206 CASE_INT_FN (BUILT_IN_FFS):
8207 target = expand_builtin_unop (target_mode, exp, target,
8208 subtarget, ffs_optab);
8209 if (target)
8210 return target;
8211 break;
8212
8213 CASE_INT_FN (BUILT_IN_CLZ):
8214 target = expand_builtin_unop (target_mode, exp, target,
8215 subtarget, clz_optab);
8216 if (target)
8217 return target;
8218 break;
8219
8220 CASE_INT_FN (BUILT_IN_CTZ):
8221 target = expand_builtin_unop (target_mode, exp, target,
8222 subtarget, ctz_optab);
8223 if (target)
8224 return target;
8225 break;
8226
8227 CASE_INT_FN (BUILT_IN_CLRSB):
8228 target = expand_builtin_unop (target_mode, exp, target,
8229 subtarget, clrsb_optab);
8230 if (target)
8231 return target;
8232 break;
8233
8234 CASE_INT_FN (BUILT_IN_POPCOUNT):
8235 target = expand_builtin_unop (target_mode, exp, target,
8236 subtarget, popcount_optab);
8237 if (target)
8238 return target;
8239 break;
8240
8241 CASE_INT_FN (BUILT_IN_PARITY):
8242 target = expand_builtin_unop (target_mode, exp, target,
8243 subtarget, parity_optab);
8244 if (target)
8245 return target;
8246 break;
8247
8248 case BUILT_IN_STRLEN:
8249 target = expand_builtin_strlen (exp, target, target_mode);
8250 if (target)
8251 return target;
8252 break;
8253
8254 case BUILT_IN_STRNLEN:
8255 target = expand_builtin_strnlen (exp, target, target_mode);
8256 if (target)
8257 return target;
8258 break;
8259
8260 case BUILT_IN_STRCAT:
8261 target = expand_builtin_strcat (exp);
8262 if (target)
8263 return target;
8264 break;
8265
8266 case BUILT_IN_GETTEXT:
8267 case BUILT_IN_PUTS:
8268 case BUILT_IN_PUTS_UNLOCKED:
8269 case BUILT_IN_STRDUP:
8270 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8271 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8272 break;
8273
8274 case BUILT_IN_INDEX:
8275 case BUILT_IN_RINDEX:
8276 case BUILT_IN_STRCHR:
8277 case BUILT_IN_STRRCHR:
8278 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8279 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8280 break;
8281
8282 case BUILT_IN_FPUTS:
8283 case BUILT_IN_FPUTS_UNLOCKED:
8284 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8285 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8286 break;
8287
8288 case BUILT_IN_STRNDUP:
8289 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8290 check_nul_terminated_array (exp,
8291 CALL_EXPR_ARG (exp, 0),
8292 CALL_EXPR_ARG (exp, 1));
8293 break;
8294
8295 case BUILT_IN_STRCASECMP:
8296 case BUILT_IN_STRSTR:
8297 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8298 {
8299 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8300 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 1));
8301 }
8302 break;
8303
8304 case BUILT_IN_STRCPY:
8305 target = expand_builtin_strcpy (exp, target);
8306 if (target)
8307 return target;
8308 break;
8309
8310 case BUILT_IN_STRNCAT:
8311 target = expand_builtin_strncat (exp, target);
8312 if (target)
8313 return target;
8314 break;
8315
8316 case BUILT_IN_STRNCPY:
8317 target = expand_builtin_strncpy (exp, target);
8318 if (target)
8319 return target;
8320 break;
8321
8322 case BUILT_IN_STPCPY:
8323 target = expand_builtin_stpcpy (exp, target, mode);
8324 if (target)
8325 return target;
8326 break;
8327
8328 case BUILT_IN_STPNCPY:
8329 target = expand_builtin_stpncpy (exp, target);
8330 if (target)
8331 return target;
8332 break;
8333
8334 case BUILT_IN_MEMCHR:
8335 target = expand_builtin_memchr (exp, target);
8336 if (target)
8337 return target;
8338 break;
8339
8340 case BUILT_IN_MEMCPY:
8341 target = expand_builtin_memcpy (exp, target);
8342 if (target)
8343 return target;
8344 break;
8345
8346 case BUILT_IN_MEMMOVE:
8347 target = expand_builtin_memmove (exp, target);
8348 if (target)
8349 return target;
8350 break;
8351
8352 case BUILT_IN_MEMPCPY:
8353 target = expand_builtin_mempcpy (exp, target);
8354 if (target)
8355 return target;
8356 break;
8357
8358 case BUILT_IN_MEMSET:
8359 target = expand_builtin_memset (exp, target, mode);
8360 if (target)
8361 return target;
8362 break;
8363
8364 case BUILT_IN_BZERO:
8365 target = expand_builtin_bzero (exp);
8366 if (target)
8367 return target;
8368 break;
8369
8370 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8371 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8372 when changing it to a strcmp call. */
8373 case BUILT_IN_STRCMP_EQ:
8374 target = expand_builtin_memcmp (exp, target, true);
8375 if (target)
8376 return target;
8377
8378 /* Change this call back to a BUILT_IN_STRCMP. */
8379 TREE_OPERAND (exp, 1)
8380 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8381
8382 /* Delete the last parameter. */
8383 unsigned int i;
8384 vec<tree, va_gc> *arg_vec;
8385 vec_alloc (arg_vec, 2);
8386 for (i = 0; i < 2; i++)
8387 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8388 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8389 /* FALLTHROUGH */
8390
8391 case BUILT_IN_STRCMP:
8392 target = expand_builtin_strcmp (exp, target);
8393 if (target)
8394 return target;
8395 break;
8396
8397 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8398 back to a BUILT_IN_STRNCMP. */
8399 case BUILT_IN_STRNCMP_EQ:
8400 target = expand_builtin_memcmp (exp, target, true);
8401 if (target)
8402 return target;
8403
8404 /* Change it back to a BUILT_IN_STRNCMP. */
8405 TREE_OPERAND (exp, 1)
8406 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8407 /* FALLTHROUGH */
8408
8409 case BUILT_IN_STRNCMP:
8410 target = expand_builtin_strncmp (exp, target, mode);
8411 if (target)
8412 return target;
8413 break;
8414
8415 case BUILT_IN_BCMP:
8416 case BUILT_IN_MEMCMP:
8417 case BUILT_IN_MEMCMP_EQ:
8418 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8419 if (target)
8420 return target;
8421 if (fcode == BUILT_IN_MEMCMP_EQ)
8422 {
8423 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8424 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8425 }
8426 break;
8427
8428 case BUILT_IN_SETJMP:
8429 /* This should have been lowered to the builtins below. */
8430 gcc_unreachable ();
8431
8432 case BUILT_IN_SETJMP_SETUP:
8433 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8434 and the receiver label. */
8435 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8436 {
8437 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8438 VOIDmode, EXPAND_NORMAL);
8439 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8440 rtx_insn *label_r = label_rtx (label);
8441
8442 /* This is copied from the handling of non-local gotos. */
8443 expand_builtin_setjmp_setup (buf_addr, label_r);
8444 nonlocal_goto_handler_labels
8445 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8446 nonlocal_goto_handler_labels);
8447 /* ??? Do not let expand_label treat us as such since we would
8448 not want to be both on the list of non-local labels and on
8449 the list of forced labels. */
8450 FORCED_LABEL (label) = 0;
8451 return const0_rtx;
8452 }
8453 break;
8454
8455 case BUILT_IN_SETJMP_RECEIVER:
8456 /* __builtin_setjmp_receiver is passed the receiver label. */
8457 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8458 {
8459 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8460 rtx_insn *label_r = label_rtx (label);
8461
8462 expand_builtin_setjmp_receiver (label_r);
8463 return const0_rtx;
8464 }
8465 break;
8466
8467 /* __builtin_longjmp is passed a pointer to an array of five words.
8468 It's similar to the C library longjmp function but works with
8469 __builtin_setjmp above. */
8470 case BUILT_IN_LONGJMP:
8471 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8472 {
8473 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8474 VOIDmode, EXPAND_NORMAL);
8475 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8476
8477 if (value != const1_rtx)
8478 {
8479 error ("%<__builtin_longjmp%> second argument must be 1");
8480 return const0_rtx;
8481 }
8482
8483 expand_builtin_longjmp (buf_addr, value);
8484 return const0_rtx;
8485 }
8486 break;
8487
8488 case BUILT_IN_NONLOCAL_GOTO:
8489 target = expand_builtin_nonlocal_goto (exp);
8490 if (target)
8491 return target;
8492 break;
8493
8494 /* This updates the setjmp buffer that is its argument with the value
8495 of the current stack pointer. */
8496 case BUILT_IN_UPDATE_SETJMP_BUF:
8497 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8498 {
8499 rtx buf_addr
8500 = expand_normal (CALL_EXPR_ARG (exp, 0));
8501
8502 expand_builtin_update_setjmp_buf (buf_addr);
8503 return const0_rtx;
8504 }
8505 break;
8506
8507 case BUILT_IN_TRAP:
8508 expand_builtin_trap ();
8509 return const0_rtx;
8510
8511 case BUILT_IN_UNREACHABLE:
8512 expand_builtin_unreachable ();
8513 return const0_rtx;
8514
8515 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8516 case BUILT_IN_SIGNBITD32:
8517 case BUILT_IN_SIGNBITD64:
8518 case BUILT_IN_SIGNBITD128:
8519 target = expand_builtin_signbit (exp, target);
8520 if (target)
8521 return target;
8522 break;
8523
8524 /* Various hooks for the DWARF 2 __throw routine. */
8525 case BUILT_IN_UNWIND_INIT:
8526 expand_builtin_unwind_init ();
8527 return const0_rtx;
8528 case BUILT_IN_DWARF_CFA:
8529 return virtual_cfa_rtx;
8530 #ifdef DWARF2_UNWIND_INFO
8531 case BUILT_IN_DWARF_SP_COLUMN:
8532 return expand_builtin_dwarf_sp_column ();
8533 case BUILT_IN_INIT_DWARF_REG_SIZES:
8534 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8535 return const0_rtx;
8536 #endif
8537 case BUILT_IN_FROB_RETURN_ADDR:
8538 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8539 case BUILT_IN_EXTRACT_RETURN_ADDR:
8540 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8541 case BUILT_IN_EH_RETURN:
8542 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8543 CALL_EXPR_ARG (exp, 1));
8544 return const0_rtx;
8545 case BUILT_IN_EH_RETURN_DATA_REGNO:
8546 return expand_builtin_eh_return_data_regno (exp);
8547 case BUILT_IN_EXTEND_POINTER:
8548 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8549 case BUILT_IN_EH_POINTER:
8550 return expand_builtin_eh_pointer (exp);
8551 case BUILT_IN_EH_FILTER:
8552 return expand_builtin_eh_filter (exp);
8553 case BUILT_IN_EH_COPY_VALUES:
8554 return expand_builtin_eh_copy_values (exp);
8555
8556 case BUILT_IN_VA_START:
8557 return expand_builtin_va_start (exp);
8558 case BUILT_IN_VA_END:
8559 return expand_builtin_va_end (exp);
8560 case BUILT_IN_VA_COPY:
8561 return expand_builtin_va_copy (exp);
8562 case BUILT_IN_EXPECT:
8563 return expand_builtin_expect (exp, target);
8564 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8565 return expand_builtin_expect_with_probability (exp, target);
8566 case BUILT_IN_ASSUME_ALIGNED:
8567 return expand_builtin_assume_aligned (exp, target);
8568 case BUILT_IN_PREFETCH:
8569 expand_builtin_prefetch (exp);
8570 return const0_rtx;
8571
8572 case BUILT_IN_INIT_TRAMPOLINE:
8573 return expand_builtin_init_trampoline (exp, true);
8574 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8575 return expand_builtin_init_trampoline (exp, false);
8576 case BUILT_IN_ADJUST_TRAMPOLINE:
8577 return expand_builtin_adjust_trampoline (exp);
8578
8579 case BUILT_IN_INIT_DESCRIPTOR:
8580 return expand_builtin_init_descriptor (exp);
8581 case BUILT_IN_ADJUST_DESCRIPTOR:
8582 return expand_builtin_adjust_descriptor (exp);
8583
8584 case BUILT_IN_FORK:
8585 case BUILT_IN_EXECL:
8586 case BUILT_IN_EXECV:
8587 case BUILT_IN_EXECLP:
8588 case BUILT_IN_EXECLE:
8589 case BUILT_IN_EXECVP:
8590 case BUILT_IN_EXECVE:
8591 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8592 if (target)
8593 return target;
8594 break;
8595
8596 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8597 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8598 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8599 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8600 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8601 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8602 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8603 if (target)
8604 return target;
8605 break;
8606
8607 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8608 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8609 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8610 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8611 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8612 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8613 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8614 if (target)
8615 return target;
8616 break;
8617
8618 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8619 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8620 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8621 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8622 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8623 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8624 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8625 if (target)
8626 return target;
8627 break;
8628
8629 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8630 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8631 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8632 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8633 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8634 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8635 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8636 if (target)
8637 return target;
8638 break;
8639
8640 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8641 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8642 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8643 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8644 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8645 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8646 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8647 if (target)
8648 return target;
8649 break;
8650
8651 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8652 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8653 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8654 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8655 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8656 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8657 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8658 if (target)
8659 return target;
8660 break;
8661
8662 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8663 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8664 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8665 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8666 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8667 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8668 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8669 if (target)
8670 return target;
8671 break;
8672
8673 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8674 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8675 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8676 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8677 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8678 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8679 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8680 if (target)
8681 return target;
8682 break;
8683
8684 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8685 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8686 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8687 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8688 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8689 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8690 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8691 if (target)
8692 return target;
8693 break;
8694
8695 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8696 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8697 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8698 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8699 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8700 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8701 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8702 if (target)
8703 return target;
8704 break;
8705
8706 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8707 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8708 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8709 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8710 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8711 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8712 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8713 if (target)
8714 return target;
8715 break;
8716
8717 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8718 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8719 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8720 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8721 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8722 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8723 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8724 if (target)
8725 return target;
8726 break;
8727
8728 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8729 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8730 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8731 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8732 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8733 if (mode == VOIDmode)
8734 mode = TYPE_MODE (boolean_type_node);
8735 if (!target || !register_operand (target, mode))
8736 target = gen_reg_rtx (mode);
8737
8738 mode = get_builtin_sync_mode
8739 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8740 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8741 if (target)
8742 return target;
8743 break;
8744
8745 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8746 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8747 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8748 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8749 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8750 mode = get_builtin_sync_mode
8751 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8752 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8753 if (target)
8754 return target;
8755 break;
8756
8757 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8758 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8759 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8760 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8761 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8762 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8763 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8764 if (target)
8765 return target;
8766 break;
8767
8768 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8769 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8770 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8771 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8772 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8773 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8774 expand_builtin_sync_lock_release (mode, exp);
8775 return const0_rtx;
8776
8777 case BUILT_IN_SYNC_SYNCHRONIZE:
8778 expand_builtin_sync_synchronize ();
8779 return const0_rtx;
8780
8781 case BUILT_IN_ATOMIC_EXCHANGE_1:
8782 case BUILT_IN_ATOMIC_EXCHANGE_2:
8783 case BUILT_IN_ATOMIC_EXCHANGE_4:
8784 case BUILT_IN_ATOMIC_EXCHANGE_8:
8785 case BUILT_IN_ATOMIC_EXCHANGE_16:
8786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8787 target = expand_builtin_atomic_exchange (mode, exp, target);
8788 if (target)
8789 return target;
8790 break;
8791
8792 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8793 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8794 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8795 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8796 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8797 {
8798 unsigned int nargs, z;
8799 vec<tree, va_gc> *vec;
8800
8801 mode =
8802 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8803 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8804 if (target)
8805 return target;
8806
8807 /* If this is turned into an external library call, the weak parameter
8808 must be dropped to match the expected parameter list. */
8809 nargs = call_expr_nargs (exp);
8810 vec_alloc (vec, nargs - 1);
8811 for (z = 0; z < 3; z++)
8812 vec->quick_push (CALL_EXPR_ARG (exp, z));
8813 /* Skip the boolean weak parameter. */
8814 for (z = 4; z < 6; z++)
8815 vec->quick_push (CALL_EXPR_ARG (exp, z));
8816 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8817 break;
8818 }
8819
8820 case BUILT_IN_ATOMIC_LOAD_1:
8821 case BUILT_IN_ATOMIC_LOAD_2:
8822 case BUILT_IN_ATOMIC_LOAD_4:
8823 case BUILT_IN_ATOMIC_LOAD_8:
8824 case BUILT_IN_ATOMIC_LOAD_16:
8825 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8826 target = expand_builtin_atomic_load (mode, exp, target);
8827 if (target)
8828 return target;
8829 break;
8830
8831 case BUILT_IN_ATOMIC_STORE_1:
8832 case BUILT_IN_ATOMIC_STORE_2:
8833 case BUILT_IN_ATOMIC_STORE_4:
8834 case BUILT_IN_ATOMIC_STORE_8:
8835 case BUILT_IN_ATOMIC_STORE_16:
8836 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8837 target = expand_builtin_atomic_store (mode, exp);
8838 if (target)
8839 return const0_rtx;
8840 break;
8841
8842 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8843 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8844 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8845 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8846 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8847 {
8848 enum built_in_function lib;
8849 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8850 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8851 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8852 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8853 ignore, lib);
8854 if (target)
8855 return target;
8856 break;
8857 }
8858 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8859 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8860 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8861 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8862 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8863 {
8864 enum built_in_function lib;
8865 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8866 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8867 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8868 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8869 ignore, lib);
8870 if (target)
8871 return target;
8872 break;
8873 }
8874 case BUILT_IN_ATOMIC_AND_FETCH_1:
8875 case BUILT_IN_ATOMIC_AND_FETCH_2:
8876 case BUILT_IN_ATOMIC_AND_FETCH_4:
8877 case BUILT_IN_ATOMIC_AND_FETCH_8:
8878 case BUILT_IN_ATOMIC_AND_FETCH_16:
8879 {
8880 enum built_in_function lib;
8881 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8882 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8883 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8884 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8885 ignore, lib);
8886 if (target)
8887 return target;
8888 break;
8889 }
8890 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8891 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8892 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8893 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8894 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8895 {
8896 enum built_in_function lib;
8897 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8898 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8899 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8900 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8901 ignore, lib);
8902 if (target)
8903 return target;
8904 break;
8905 }
8906 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8907 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8908 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8909 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8910 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8911 {
8912 enum built_in_function lib;
8913 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8914 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8915 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8916 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8917 ignore, lib);
8918 if (target)
8919 return target;
8920 break;
8921 }
8922 case BUILT_IN_ATOMIC_OR_FETCH_1:
8923 case BUILT_IN_ATOMIC_OR_FETCH_2:
8924 case BUILT_IN_ATOMIC_OR_FETCH_4:
8925 case BUILT_IN_ATOMIC_OR_FETCH_8:
8926 case BUILT_IN_ATOMIC_OR_FETCH_16:
8927 {
8928 enum built_in_function lib;
8929 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8930 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8931 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8932 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8933 ignore, lib);
8934 if (target)
8935 return target;
8936 break;
8937 }
8938 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8939 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8940 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8941 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8942 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8943 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8944 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8945 ignore, BUILT_IN_NONE);
8946 if (target)
8947 return target;
8948 break;
8949
8950 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8951 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8952 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8953 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8954 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8955 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8956 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8957 ignore, BUILT_IN_NONE);
8958 if (target)
8959 return target;
8960 break;
8961
8962 case BUILT_IN_ATOMIC_FETCH_AND_1:
8963 case BUILT_IN_ATOMIC_FETCH_AND_2:
8964 case BUILT_IN_ATOMIC_FETCH_AND_4:
8965 case BUILT_IN_ATOMIC_FETCH_AND_8:
8966 case BUILT_IN_ATOMIC_FETCH_AND_16:
8967 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8968 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8969 ignore, BUILT_IN_NONE);
8970 if (target)
8971 return target;
8972 break;
8973
8974 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8975 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8976 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8977 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8978 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8979 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8980 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8981 ignore, BUILT_IN_NONE);
8982 if (target)
8983 return target;
8984 break;
8985
8986 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8987 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8988 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8989 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8990 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8991 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8992 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8993 ignore, BUILT_IN_NONE);
8994 if (target)
8995 return target;
8996 break;
8997
8998 case BUILT_IN_ATOMIC_FETCH_OR_1:
8999 case BUILT_IN_ATOMIC_FETCH_OR_2:
9000 case BUILT_IN_ATOMIC_FETCH_OR_4:
9001 case BUILT_IN_ATOMIC_FETCH_OR_8:
9002 case BUILT_IN_ATOMIC_FETCH_OR_16:
9003 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
9004 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
9005 ignore, BUILT_IN_NONE);
9006 if (target)
9007 return target;
9008 break;
9009
9010 case BUILT_IN_ATOMIC_TEST_AND_SET:
9011 return expand_builtin_atomic_test_and_set (exp, target);
9012
9013 case BUILT_IN_ATOMIC_CLEAR:
9014 return expand_builtin_atomic_clear (exp);
9015
9016 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9017 return expand_builtin_atomic_always_lock_free (exp);
9018
9019 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9020 target = expand_builtin_atomic_is_lock_free (exp);
9021 if (target)
9022 return target;
9023 break;
9024
9025 case BUILT_IN_ATOMIC_THREAD_FENCE:
9026 expand_builtin_atomic_thread_fence (exp);
9027 return const0_rtx;
9028
9029 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
9030 expand_builtin_atomic_signal_fence (exp);
9031 return const0_rtx;
9032
9033 case BUILT_IN_OBJECT_SIZE:
9034 return expand_builtin_object_size (exp);
9035
9036 case BUILT_IN_MEMCPY_CHK:
9037 case BUILT_IN_MEMPCPY_CHK:
9038 case BUILT_IN_MEMMOVE_CHK:
9039 case BUILT_IN_MEMSET_CHK:
9040 target = expand_builtin_memory_chk (exp, target, mode, fcode);
9041 if (target)
9042 return target;
9043 break;
9044
9045 case BUILT_IN_STRCPY_CHK:
9046 case BUILT_IN_STPCPY_CHK:
9047 case BUILT_IN_STRNCPY_CHK:
9048 case BUILT_IN_STPNCPY_CHK:
9049 case BUILT_IN_STRCAT_CHK:
9050 case BUILT_IN_STRNCAT_CHK:
9051 case BUILT_IN_SNPRINTF_CHK:
9052 case BUILT_IN_VSNPRINTF_CHK:
9053 maybe_emit_chk_warning (exp, fcode);
9054 break;
9055
9056 case BUILT_IN_SPRINTF_CHK:
9057 case BUILT_IN_VSPRINTF_CHK:
9058 maybe_emit_sprintf_chk_warning (exp, fcode);
9059 break;
9060
9061 case BUILT_IN_FREE:
9062 if (warn_free_nonheap_object)
9063 maybe_emit_free_warning (exp);
9064 break;
9065
9066 case BUILT_IN_THREAD_POINTER:
9067 return expand_builtin_thread_pointer (exp, target);
9068
9069 case BUILT_IN_SET_THREAD_POINTER:
9070 expand_builtin_set_thread_pointer (exp);
9071 return const0_rtx;
9072
9073 case BUILT_IN_ACC_ON_DEVICE:
9074 /* Do library call, if we failed to expand the builtin when
9075 folding. */
9076 break;
9077
9078 case BUILT_IN_GOACC_PARLEVEL_ID:
9079 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9080 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
9081
9082 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
9083 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
9084
9085 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
9086 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
9087 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
9088 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
9089 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
9090 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
9091 return expand_speculation_safe_value (mode, exp, target, ignore);
9092
9093 default: /* just do library call, if unknown builtin */
9094 break;
9095 }
9096
9097 /* The switch statement above can drop through to cause the function
9098 to be called normally. */
9099 return expand_call (exp, target, ignore);
9100 }
9101
9102 /* Determine whether a tree node represents a call to a built-in
9103 function. If the tree T is a call to a built-in function with
9104 the right number of arguments of the appropriate types, return
9105 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9106 Otherwise the return value is END_BUILTINS. */
9107
9108 enum built_in_function
9109 builtin_mathfn_code (const_tree t)
9110 {
9111 const_tree fndecl, arg, parmlist;
9112 const_tree argtype, parmtype;
9113 const_call_expr_arg_iterator iter;
9114
9115 if (TREE_CODE (t) != CALL_EXPR)
9116 return END_BUILTINS;
9117
9118 fndecl = get_callee_fndecl (t);
9119 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9120 return END_BUILTINS;
9121
9122 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9123 init_const_call_expr_arg_iterator (t, &iter);
9124 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9125 {
9126 /* If a function doesn't take a variable number of arguments,
9127 the last element in the list will have type `void'. */
9128 parmtype = TREE_VALUE (parmlist);
9129 if (VOID_TYPE_P (parmtype))
9130 {
9131 if (more_const_call_expr_args_p (&iter))
9132 return END_BUILTINS;
9133 return DECL_FUNCTION_CODE (fndecl);
9134 }
9135
9136 if (! more_const_call_expr_args_p (&iter))
9137 return END_BUILTINS;
9138
9139 arg = next_const_call_expr_arg (&iter);
9140 argtype = TREE_TYPE (arg);
9141
9142 if (SCALAR_FLOAT_TYPE_P (parmtype))
9143 {
9144 if (! SCALAR_FLOAT_TYPE_P (argtype))
9145 return END_BUILTINS;
9146 }
9147 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9148 {
9149 if (! COMPLEX_FLOAT_TYPE_P (argtype))
9150 return END_BUILTINS;
9151 }
9152 else if (POINTER_TYPE_P (parmtype))
9153 {
9154 if (! POINTER_TYPE_P (argtype))
9155 return END_BUILTINS;
9156 }
9157 else if (INTEGRAL_TYPE_P (parmtype))
9158 {
9159 if (! INTEGRAL_TYPE_P (argtype))
9160 return END_BUILTINS;
9161 }
9162 else
9163 return END_BUILTINS;
9164 }
9165
9166 /* Variable-length argument list. */
9167 return DECL_FUNCTION_CODE (fndecl);
9168 }
9169
9170 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9171 evaluate to a constant. */
9172
9173 static tree
9174 fold_builtin_constant_p (tree arg)
9175 {
9176 /* We return 1 for a numeric type that's known to be a constant
9177 value at compile-time or for an aggregate type that's a
9178 literal constant. */
9179 STRIP_NOPS (arg);
9180
9181 /* If we know this is a constant, emit the constant of one. */
9182 if (CONSTANT_CLASS_P (arg)
9183 || (TREE_CODE (arg) == CONSTRUCTOR
9184 && TREE_CONSTANT (arg)))
9185 return integer_one_node;
9186 if (TREE_CODE (arg) == ADDR_EXPR)
9187 {
9188 tree op = TREE_OPERAND (arg, 0);
9189 if (TREE_CODE (op) == STRING_CST
9190 || (TREE_CODE (op) == ARRAY_REF
9191 && integer_zerop (TREE_OPERAND (op, 1))
9192 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9193 return integer_one_node;
9194 }
9195
9196 /* If this expression has side effects, show we don't know it to be a
9197 constant. Likewise if it's a pointer or aggregate type since in
9198 those case we only want literals, since those are only optimized
9199 when generating RTL, not later.
9200 And finally, if we are compiling an initializer, not code, we
9201 need to return a definite result now; there's not going to be any
9202 more optimization done. */
9203 if (TREE_SIDE_EFFECTS (arg)
9204 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9205 || POINTER_TYPE_P (TREE_TYPE (arg))
9206 || cfun == 0
9207 || folding_initializer
9208 || force_folding_builtin_constant_p)
9209 return integer_zero_node;
9210
9211 return NULL_TREE;
9212 }
9213
9214 /* Create builtin_expect or builtin_expect_with_probability
9215 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9216 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9217 builtin_expect_with_probability instead uses third argument as PROBABILITY
9218 value. */
9219
9220 static tree
9221 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9222 tree predictor, tree probability)
9223 {
9224 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9225
9226 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9227 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9228 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9229 ret_type = TREE_TYPE (TREE_TYPE (fn));
9230 pred_type = TREE_VALUE (arg_types);
9231 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9232
9233 pred = fold_convert_loc (loc, pred_type, pred);
9234 expected = fold_convert_loc (loc, expected_type, expected);
9235
9236 if (probability)
9237 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9238 else
9239 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9240 predictor);
9241
9242 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9243 build_int_cst (ret_type, 0));
9244 }
9245
9246 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9247 NULL_TREE if no simplification is possible. */
9248
9249 tree
9250 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9251 tree arg3)
9252 {
9253 tree inner, fndecl, inner_arg0;
9254 enum tree_code code;
9255
9256 /* Distribute the expected value over short-circuiting operators.
9257 See through the cast from truthvalue_type_node to long. */
9258 inner_arg0 = arg0;
9259 while (CONVERT_EXPR_P (inner_arg0)
9260 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9261 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9262 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9263
9264 /* If this is a builtin_expect within a builtin_expect keep the
9265 inner one. See through a comparison against a constant. It
9266 might have been added to create a thruthvalue. */
9267 inner = inner_arg0;
9268
9269 if (COMPARISON_CLASS_P (inner)
9270 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9271 inner = TREE_OPERAND (inner, 0);
9272
9273 if (TREE_CODE (inner) == CALL_EXPR
9274 && (fndecl = get_callee_fndecl (inner))
9275 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9276 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
9277 return arg0;
9278
9279 inner = inner_arg0;
9280 code = TREE_CODE (inner);
9281 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9282 {
9283 tree op0 = TREE_OPERAND (inner, 0);
9284 tree op1 = TREE_OPERAND (inner, 1);
9285 arg1 = save_expr (arg1);
9286
9287 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9288 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9289 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9290
9291 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9292 }
9293
9294 /* If the argument isn't invariant then there's nothing else we can do. */
9295 if (!TREE_CONSTANT (inner_arg0))
9296 return NULL_TREE;
9297
9298 /* If we expect that a comparison against the argument will fold to
9299 a constant return the constant. In practice, this means a true
9300 constant or the address of a non-weak symbol. */
9301 inner = inner_arg0;
9302 STRIP_NOPS (inner);
9303 if (TREE_CODE (inner) == ADDR_EXPR)
9304 {
9305 do
9306 {
9307 inner = TREE_OPERAND (inner, 0);
9308 }
9309 while (TREE_CODE (inner) == COMPONENT_REF
9310 || TREE_CODE (inner) == ARRAY_REF);
9311 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9312 return NULL_TREE;
9313 }
9314
9315 /* Otherwise, ARG0 already has the proper type for the return value. */
9316 return arg0;
9317 }
9318
9319 /* Fold a call to __builtin_classify_type with argument ARG. */
9320
9321 static tree
9322 fold_builtin_classify_type (tree arg)
9323 {
9324 if (arg == 0)
9325 return build_int_cst (integer_type_node, no_type_class);
9326
9327 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9328 }
9329
9330 /* Fold a call to __builtin_strlen with argument ARG. */
9331
9332 static tree
9333 fold_builtin_strlen (location_t loc, tree type, tree arg)
9334 {
9335 if (!validate_arg (arg, POINTER_TYPE))
9336 return NULL_TREE;
9337 else
9338 {
9339 c_strlen_data lendata = { };
9340 tree len = c_strlen (arg, 0, &lendata);
9341
9342 if (len)
9343 return fold_convert_loc (loc, type, len);
9344
9345 if (!lendata.decl)
9346 c_strlen (arg, 1, &lendata);
9347
9348 if (lendata.decl)
9349 {
9350 if (EXPR_HAS_LOCATION (arg))
9351 loc = EXPR_LOCATION (arg);
9352 else if (loc == UNKNOWN_LOCATION)
9353 loc = input_location;
9354 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
9355 }
9356
9357 return NULL_TREE;
9358 }
9359 }
9360
9361 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9362
9363 static tree
9364 fold_builtin_inf (location_t loc, tree type, int warn)
9365 {
9366 REAL_VALUE_TYPE real;
9367
9368 /* __builtin_inff is intended to be usable to define INFINITY on all
9369 targets. If an infinity is not available, INFINITY expands "to a
9370 positive constant of type float that overflows at translation
9371 time", footnote "In this case, using INFINITY will violate the
9372 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9373 Thus we pedwarn to ensure this constraint violation is
9374 diagnosed. */
9375 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9376 pedwarn (loc, 0, "target format does not support infinity");
9377
9378 real_inf (&real);
9379 return build_real (type, real);
9380 }
9381
9382 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9383 NULL_TREE if no simplification can be made. */
9384
9385 static tree
9386 fold_builtin_sincos (location_t loc,
9387 tree arg0, tree arg1, tree arg2)
9388 {
9389 tree type;
9390 tree fndecl, call = NULL_TREE;
9391
9392 if (!validate_arg (arg0, REAL_TYPE)
9393 || !validate_arg (arg1, POINTER_TYPE)
9394 || !validate_arg (arg2, POINTER_TYPE))
9395 return NULL_TREE;
9396
9397 type = TREE_TYPE (arg0);
9398
9399 /* Calculate the result when the argument is a constant. */
9400 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9401 if (fn == END_BUILTINS)
9402 return NULL_TREE;
9403
9404 /* Canonicalize sincos to cexpi. */
9405 if (TREE_CODE (arg0) == REAL_CST)
9406 {
9407 tree complex_type = build_complex_type (type);
9408 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9409 }
9410 if (!call)
9411 {
9412 if (!targetm.libc_has_function (function_c99_math_complex)
9413 || !builtin_decl_implicit_p (fn))
9414 return NULL_TREE;
9415 fndecl = builtin_decl_explicit (fn);
9416 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9417 call = builtin_save_expr (call);
9418 }
9419
9420 tree ptype = build_pointer_type (type);
9421 arg1 = fold_convert (ptype, arg1);
9422 arg2 = fold_convert (ptype, arg2);
9423 return build2 (COMPOUND_EXPR, void_type_node,
9424 build2 (MODIFY_EXPR, void_type_node,
9425 build_fold_indirect_ref_loc (loc, arg1),
9426 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9427 build2 (MODIFY_EXPR, void_type_node,
9428 build_fold_indirect_ref_loc (loc, arg2),
9429 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9430 }
9431
9432 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9433 Return NULL_TREE if no simplification can be made. */
9434
9435 static tree
9436 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9437 {
9438 if (!validate_arg (arg1, POINTER_TYPE)
9439 || !validate_arg (arg2, POINTER_TYPE)
9440 || !validate_arg (len, INTEGER_TYPE))
9441 return NULL_TREE;
9442
9443 /* If the LEN parameter is zero, return zero. */
9444 if (integer_zerop (len))
9445 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9446 arg1, arg2);
9447
9448 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9449 if (operand_equal_p (arg1, arg2, 0))
9450 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9451
9452 /* If len parameter is one, return an expression corresponding to
9453 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9454 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9455 {
9456 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9457 tree cst_uchar_ptr_node
9458 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9459
9460 tree ind1
9461 = fold_convert_loc (loc, integer_type_node,
9462 build1 (INDIRECT_REF, cst_uchar_node,
9463 fold_convert_loc (loc,
9464 cst_uchar_ptr_node,
9465 arg1)));
9466 tree ind2
9467 = fold_convert_loc (loc, integer_type_node,
9468 build1 (INDIRECT_REF, cst_uchar_node,
9469 fold_convert_loc (loc,
9470 cst_uchar_ptr_node,
9471 arg2)));
9472 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9473 }
9474
9475 return NULL_TREE;
9476 }
9477
9478 /* Fold a call to builtin isascii with argument ARG. */
9479
9480 static tree
9481 fold_builtin_isascii (location_t loc, tree arg)
9482 {
9483 if (!validate_arg (arg, INTEGER_TYPE))
9484 return NULL_TREE;
9485 else
9486 {
9487 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9488 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9489 build_int_cst (integer_type_node,
9490 ~ (unsigned HOST_WIDE_INT) 0x7f));
9491 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9492 arg, integer_zero_node);
9493 }
9494 }
9495
9496 /* Fold a call to builtin toascii with argument ARG. */
9497
9498 static tree
9499 fold_builtin_toascii (location_t loc, tree arg)
9500 {
9501 if (!validate_arg (arg, INTEGER_TYPE))
9502 return NULL_TREE;
9503
9504 /* Transform toascii(c) -> (c & 0x7f). */
9505 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9506 build_int_cst (integer_type_node, 0x7f));
9507 }
9508
9509 /* Fold a call to builtin isdigit with argument ARG. */
9510
9511 static tree
9512 fold_builtin_isdigit (location_t loc, tree arg)
9513 {
9514 if (!validate_arg (arg, INTEGER_TYPE))
9515 return NULL_TREE;
9516 else
9517 {
9518 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9519 /* According to the C standard, isdigit is unaffected by locale.
9520 However, it definitely is affected by the target character set. */
9521 unsigned HOST_WIDE_INT target_digit0
9522 = lang_hooks.to_target_charset ('0');
9523
9524 if (target_digit0 == 0)
9525 return NULL_TREE;
9526
9527 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9528 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9529 build_int_cst (unsigned_type_node, target_digit0));
9530 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9531 build_int_cst (unsigned_type_node, 9));
9532 }
9533 }
9534
9535 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9536
9537 static tree
9538 fold_builtin_fabs (location_t loc, tree arg, tree type)
9539 {
9540 if (!validate_arg (arg, REAL_TYPE))
9541 return NULL_TREE;
9542
9543 arg = fold_convert_loc (loc, type, arg);
9544 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9545 }
9546
9547 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9548
9549 static tree
9550 fold_builtin_abs (location_t loc, tree arg, tree type)
9551 {
9552 if (!validate_arg (arg, INTEGER_TYPE))
9553 return NULL_TREE;
9554
9555 arg = fold_convert_loc (loc, type, arg);
9556 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9557 }
9558
9559 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9560
9561 static tree
9562 fold_builtin_carg (location_t loc, tree arg, tree type)
9563 {
9564 if (validate_arg (arg, COMPLEX_TYPE)
9565 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9566 {
9567 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9568
9569 if (atan2_fn)
9570 {
9571 tree new_arg = builtin_save_expr (arg);
9572 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9573 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9574 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9575 }
9576 }
9577
9578 return NULL_TREE;
9579 }
9580
9581 /* Fold a call to builtin frexp, we can assume the base is 2. */
9582
9583 static tree
9584 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9585 {
9586 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9587 return NULL_TREE;
9588
9589 STRIP_NOPS (arg0);
9590
9591 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9592 return NULL_TREE;
9593
9594 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9595
9596 /* Proceed if a valid pointer type was passed in. */
9597 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9598 {
9599 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9600 tree frac, exp;
9601
9602 switch (value->cl)
9603 {
9604 case rvc_zero:
9605 /* For +-0, return (*exp = 0, +-0). */
9606 exp = integer_zero_node;
9607 frac = arg0;
9608 break;
9609 case rvc_nan:
9610 case rvc_inf:
9611 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9612 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9613 case rvc_normal:
9614 {
9615 /* Since the frexp function always expects base 2, and in
9616 GCC normalized significands are already in the range
9617 [0.5, 1.0), we have exactly what frexp wants. */
9618 REAL_VALUE_TYPE frac_rvt = *value;
9619 SET_REAL_EXP (&frac_rvt, 0);
9620 frac = build_real (rettype, frac_rvt);
9621 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9622 }
9623 break;
9624 default:
9625 gcc_unreachable ();
9626 }
9627
9628 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9629 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9630 TREE_SIDE_EFFECTS (arg1) = 1;
9631 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9632 }
9633
9634 return NULL_TREE;
9635 }
9636
9637 /* Fold a call to builtin modf. */
9638
9639 static tree
9640 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9641 {
9642 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9643 return NULL_TREE;
9644
9645 STRIP_NOPS (arg0);
9646
9647 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9648 return NULL_TREE;
9649
9650 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9651
9652 /* Proceed if a valid pointer type was passed in. */
9653 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9654 {
9655 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9656 REAL_VALUE_TYPE trunc, frac;
9657
9658 switch (value->cl)
9659 {
9660 case rvc_nan:
9661 case rvc_zero:
9662 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9663 trunc = frac = *value;
9664 break;
9665 case rvc_inf:
9666 /* For +-Inf, return (*arg1 = arg0, +-0). */
9667 frac = dconst0;
9668 frac.sign = value->sign;
9669 trunc = *value;
9670 break;
9671 case rvc_normal:
9672 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9673 real_trunc (&trunc, VOIDmode, value);
9674 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9675 /* If the original number was negative and already
9676 integral, then the fractional part is -0.0. */
9677 if (value->sign && frac.cl == rvc_zero)
9678 frac.sign = value->sign;
9679 break;
9680 }
9681
9682 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9683 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9684 build_real (rettype, trunc));
9685 TREE_SIDE_EFFECTS (arg1) = 1;
9686 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9687 build_real (rettype, frac));
9688 }
9689
9690 return NULL_TREE;
9691 }
9692
9693 /* Given a location LOC, an interclass builtin function decl FNDECL
9694 and its single argument ARG, return an folded expression computing
9695 the same, or NULL_TREE if we either couldn't or didn't want to fold
9696 (the latter happen if there's an RTL instruction available). */
9697
9698 static tree
9699 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9700 {
9701 machine_mode mode;
9702
9703 if (!validate_arg (arg, REAL_TYPE))
9704 return NULL_TREE;
9705
9706 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9707 return NULL_TREE;
9708
9709 mode = TYPE_MODE (TREE_TYPE (arg));
9710
9711 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9712
9713 /* If there is no optab, try generic code. */
9714 switch (DECL_FUNCTION_CODE (fndecl))
9715 {
9716 tree result;
9717
9718 CASE_FLT_FN (BUILT_IN_ISINF):
9719 {
9720 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9721 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9722 tree type = TREE_TYPE (arg);
9723 REAL_VALUE_TYPE r;
9724 char buf[128];
9725
9726 if (is_ibm_extended)
9727 {
9728 /* NaN and Inf are encoded in the high-order double value
9729 only. The low-order value is not significant. */
9730 type = double_type_node;
9731 mode = DFmode;
9732 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9733 }
9734 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9735 real_from_string (&r, buf);
9736 result = build_call_expr (isgr_fn, 2,
9737 fold_build1_loc (loc, ABS_EXPR, type, arg),
9738 build_real (type, r));
9739 return result;
9740 }
9741 CASE_FLT_FN (BUILT_IN_FINITE):
9742 case BUILT_IN_ISFINITE:
9743 {
9744 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9745 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9746 tree type = TREE_TYPE (arg);
9747 REAL_VALUE_TYPE r;
9748 char buf[128];
9749
9750 if (is_ibm_extended)
9751 {
9752 /* NaN and Inf are encoded in the high-order double value
9753 only. The low-order value is not significant. */
9754 type = double_type_node;
9755 mode = DFmode;
9756 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9757 }
9758 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9759 real_from_string (&r, buf);
9760 result = build_call_expr (isle_fn, 2,
9761 fold_build1_loc (loc, ABS_EXPR, type, arg),
9762 build_real (type, r));
9763 /*result = fold_build2_loc (loc, UNGT_EXPR,
9764 TREE_TYPE (TREE_TYPE (fndecl)),
9765 fold_build1_loc (loc, ABS_EXPR, type, arg),
9766 build_real (type, r));
9767 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9768 TREE_TYPE (TREE_TYPE (fndecl)),
9769 result);*/
9770 return result;
9771 }
9772 case BUILT_IN_ISNORMAL:
9773 {
9774 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9775 islessequal(fabs(x),DBL_MAX). */
9776 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9777 tree type = TREE_TYPE (arg);
9778 tree orig_arg, max_exp, min_exp;
9779 machine_mode orig_mode = mode;
9780 REAL_VALUE_TYPE rmax, rmin;
9781 char buf[128];
9782
9783 orig_arg = arg = builtin_save_expr (arg);
9784 if (is_ibm_extended)
9785 {
9786 /* Use double to test the normal range of IBM extended
9787 precision. Emin for IBM extended precision is
9788 different to emin for IEEE double, being 53 higher
9789 since the low double exponent is at least 53 lower
9790 than the high double exponent. */
9791 type = double_type_node;
9792 mode = DFmode;
9793 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9794 }
9795 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9796
9797 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9798 real_from_string (&rmax, buf);
9799 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9800 real_from_string (&rmin, buf);
9801 max_exp = build_real (type, rmax);
9802 min_exp = build_real (type, rmin);
9803
9804 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9805 if (is_ibm_extended)
9806 {
9807 /* Testing the high end of the range is done just using
9808 the high double, using the same test as isfinite().
9809 For the subnormal end of the range we first test the
9810 high double, then if its magnitude is equal to the
9811 limit of 0x1p-969, we test whether the low double is
9812 non-zero and opposite sign to the high double. */
9813 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9814 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9815 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9816 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9817 arg, min_exp);
9818 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9819 complex_double_type_node, orig_arg);
9820 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9821 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9822 tree zero = build_real (type, dconst0);
9823 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9824 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9825 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9826 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9827 fold_build3 (COND_EXPR,
9828 integer_type_node,
9829 hilt, logt, lolt));
9830 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9831 eq_min, ok_lo);
9832 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9833 gt_min, eq_min);
9834 }
9835 else
9836 {
9837 tree const isge_fn
9838 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9839 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9840 }
9841 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9842 max_exp, min_exp);
9843 return result;
9844 }
9845 default:
9846 break;
9847 }
9848
9849 return NULL_TREE;
9850 }
9851
9852 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9853 ARG is the argument for the call. */
9854
9855 static tree
9856 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9857 {
9858 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9859
9860 if (!validate_arg (arg, REAL_TYPE))
9861 return NULL_TREE;
9862
9863 switch (builtin_index)
9864 {
9865 case BUILT_IN_ISINF:
9866 if (!HONOR_INFINITIES (arg))
9867 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9868
9869 return NULL_TREE;
9870
9871 case BUILT_IN_ISINF_SIGN:
9872 {
9873 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9874 /* In a boolean context, GCC will fold the inner COND_EXPR to
9875 1. So e.g. "if (isinf_sign(x))" would be folded to just
9876 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9877 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9878 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9879 tree tmp = NULL_TREE;
9880
9881 arg = builtin_save_expr (arg);
9882
9883 if (signbit_fn && isinf_fn)
9884 {
9885 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9886 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9887
9888 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9889 signbit_call, integer_zero_node);
9890 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9891 isinf_call, integer_zero_node);
9892
9893 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9894 integer_minus_one_node, integer_one_node);
9895 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9896 isinf_call, tmp,
9897 integer_zero_node);
9898 }
9899
9900 return tmp;
9901 }
9902
9903 case BUILT_IN_ISFINITE:
9904 if (!HONOR_NANS (arg)
9905 && !HONOR_INFINITIES (arg))
9906 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9907
9908 return NULL_TREE;
9909
9910 case BUILT_IN_ISNAN:
9911 if (!HONOR_NANS (arg))
9912 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9913
9914 {
9915 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9916 if (is_ibm_extended)
9917 {
9918 /* NaN and Inf are encoded in the high-order double value
9919 only. The low-order value is not significant. */
9920 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9921 }
9922 }
9923 arg = builtin_save_expr (arg);
9924 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9925
9926 default:
9927 gcc_unreachable ();
9928 }
9929 }
9930
9931 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9932 This builtin will generate code to return the appropriate floating
9933 point classification depending on the value of the floating point
9934 number passed in. The possible return values must be supplied as
9935 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9936 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9937 one floating point argument which is "type generic". */
9938
9939 static tree
9940 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9941 {
9942 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9943 arg, type, res, tmp;
9944 machine_mode mode;
9945 REAL_VALUE_TYPE r;
9946 char buf[128];
9947
9948 /* Verify the required arguments in the original call. */
9949 if (nargs != 6
9950 || !validate_arg (args[0], INTEGER_TYPE)
9951 || !validate_arg (args[1], INTEGER_TYPE)
9952 || !validate_arg (args[2], INTEGER_TYPE)
9953 || !validate_arg (args[3], INTEGER_TYPE)
9954 || !validate_arg (args[4], INTEGER_TYPE)
9955 || !validate_arg (args[5], REAL_TYPE))
9956 return NULL_TREE;
9957
9958 fp_nan = args[0];
9959 fp_infinite = args[1];
9960 fp_normal = args[2];
9961 fp_subnormal = args[3];
9962 fp_zero = args[4];
9963 arg = args[5];
9964 type = TREE_TYPE (arg);
9965 mode = TYPE_MODE (type);
9966 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9967
9968 /* fpclassify(x) ->
9969 isnan(x) ? FP_NAN :
9970 (fabs(x) == Inf ? FP_INFINITE :
9971 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9972 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9973
9974 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9975 build_real (type, dconst0));
9976 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9977 tmp, fp_zero, fp_subnormal);
9978
9979 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9980 real_from_string (&r, buf);
9981 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9982 arg, build_real (type, r));
9983 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9984
9985 if (HONOR_INFINITIES (mode))
9986 {
9987 real_inf (&r);
9988 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9989 build_real (type, r));
9990 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9991 fp_infinite, res);
9992 }
9993
9994 if (HONOR_NANS (mode))
9995 {
9996 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9997 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9998 }
9999
10000 return res;
10001 }
10002
10003 /* Fold a call to an unordered comparison function such as
10004 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10005 being called and ARG0 and ARG1 are the arguments for the call.
10006 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10007 the opposite of the desired result. UNORDERED_CODE is used
10008 for modes that can hold NaNs and ORDERED_CODE is used for
10009 the rest. */
10010
10011 static tree
10012 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10013 enum tree_code unordered_code,
10014 enum tree_code ordered_code)
10015 {
10016 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10017 enum tree_code code;
10018 tree type0, type1;
10019 enum tree_code code0, code1;
10020 tree cmp_type = NULL_TREE;
10021
10022 type0 = TREE_TYPE (arg0);
10023 type1 = TREE_TYPE (arg1);
10024
10025 code0 = TREE_CODE (type0);
10026 code1 = TREE_CODE (type1);
10027
10028 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10029 /* Choose the wider of two real types. */
10030 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10031 ? type0 : type1;
10032 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10033 cmp_type = type0;
10034 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10035 cmp_type = type1;
10036
10037 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10038 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10039
10040 if (unordered_code == UNORDERED_EXPR)
10041 {
10042 if (!HONOR_NANS (arg0))
10043 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10044 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10045 }
10046
10047 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
10048 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10049 fold_build2_loc (loc, code, type, arg0, arg1));
10050 }
10051
10052 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10053 arithmetics if it can never overflow, or into internal functions that
10054 return both result of arithmetics and overflowed boolean flag in
10055 a complex integer result, or some other check for overflow.
10056 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10057 checking part of that. */
10058
10059 static tree
10060 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10061 tree arg0, tree arg1, tree arg2)
10062 {
10063 enum internal_fn ifn = IFN_LAST;
10064 /* The code of the expression corresponding to the built-in. */
10065 enum tree_code opcode = ERROR_MARK;
10066 bool ovf_only = false;
10067
10068 switch (fcode)
10069 {
10070 case BUILT_IN_ADD_OVERFLOW_P:
10071 ovf_only = true;
10072 /* FALLTHRU */
10073 case BUILT_IN_ADD_OVERFLOW:
10074 case BUILT_IN_SADD_OVERFLOW:
10075 case BUILT_IN_SADDL_OVERFLOW:
10076 case BUILT_IN_SADDLL_OVERFLOW:
10077 case BUILT_IN_UADD_OVERFLOW:
10078 case BUILT_IN_UADDL_OVERFLOW:
10079 case BUILT_IN_UADDLL_OVERFLOW:
10080 opcode = PLUS_EXPR;
10081 ifn = IFN_ADD_OVERFLOW;
10082 break;
10083 case BUILT_IN_SUB_OVERFLOW_P:
10084 ovf_only = true;
10085 /* FALLTHRU */
10086 case BUILT_IN_SUB_OVERFLOW:
10087 case BUILT_IN_SSUB_OVERFLOW:
10088 case BUILT_IN_SSUBL_OVERFLOW:
10089 case BUILT_IN_SSUBLL_OVERFLOW:
10090 case BUILT_IN_USUB_OVERFLOW:
10091 case BUILT_IN_USUBL_OVERFLOW:
10092 case BUILT_IN_USUBLL_OVERFLOW:
10093 opcode = MINUS_EXPR;
10094 ifn = IFN_SUB_OVERFLOW;
10095 break;
10096 case BUILT_IN_MUL_OVERFLOW_P:
10097 ovf_only = true;
10098 /* FALLTHRU */
10099 case BUILT_IN_MUL_OVERFLOW:
10100 case BUILT_IN_SMUL_OVERFLOW:
10101 case BUILT_IN_SMULL_OVERFLOW:
10102 case BUILT_IN_SMULLL_OVERFLOW:
10103 case BUILT_IN_UMUL_OVERFLOW:
10104 case BUILT_IN_UMULL_OVERFLOW:
10105 case BUILT_IN_UMULLL_OVERFLOW:
10106 opcode = MULT_EXPR;
10107 ifn = IFN_MUL_OVERFLOW;
10108 break;
10109 default:
10110 gcc_unreachable ();
10111 }
10112
10113 /* For the "generic" overloads, the first two arguments can have different
10114 types and the last argument determines the target type to use to check
10115 for overflow. The arguments of the other overloads all have the same
10116 type. */
10117 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10118
10119 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10120 arguments are constant, attempt to fold the built-in call into a constant
10121 expression indicating whether or not it detected an overflow. */
10122 if (ovf_only
10123 && TREE_CODE (arg0) == INTEGER_CST
10124 && TREE_CODE (arg1) == INTEGER_CST)
10125 /* Perform the computation in the target type and check for overflow. */
10126 return omit_one_operand_loc (loc, boolean_type_node,
10127 arith_overflowed_p (opcode, type, arg0, arg1)
10128 ? boolean_true_node : boolean_false_node,
10129 arg2);
10130
10131 tree intres, ovfres;
10132 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10133 {
10134 intres = fold_binary_loc (loc, opcode, type,
10135 fold_convert_loc (loc, type, arg0),
10136 fold_convert_loc (loc, type, arg1));
10137 if (TREE_OVERFLOW (intres))
10138 intres = drop_tree_overflow (intres);
10139 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10140 ? boolean_true_node : boolean_false_node);
10141 }
10142 else
10143 {
10144 tree ctype = build_complex_type (type);
10145 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10146 arg0, arg1);
10147 tree tgt = save_expr (call);
10148 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10149 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10150 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10151 }
10152
10153 if (ovf_only)
10154 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10155
10156 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10157 tree store
10158 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10159 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10160 }
10161
10162 /* Fold a call to __builtin_FILE to a constant string. */
10163
10164 static inline tree
10165 fold_builtin_FILE (location_t loc)
10166 {
10167 if (const char *fname = LOCATION_FILE (loc))
10168 {
10169 /* The documentation says this builtin is equivalent to the preprocessor
10170 __FILE__ macro so it appears appropriate to use the same file prefix
10171 mappings. */
10172 fname = remap_macro_filename (fname);
10173 return build_string_literal (strlen (fname) + 1, fname);
10174 }
10175
10176 return build_string_literal (1, "");
10177 }
10178
10179 /* Fold a call to __builtin_FUNCTION to a constant string. */
10180
10181 static inline tree
10182 fold_builtin_FUNCTION ()
10183 {
10184 const char *name = "";
10185
10186 if (current_function_decl)
10187 name = lang_hooks.decl_printable_name (current_function_decl, 0);
10188
10189 return build_string_literal (strlen (name) + 1, name);
10190 }
10191
10192 /* Fold a call to __builtin_LINE to an integer constant. */
10193
10194 static inline tree
10195 fold_builtin_LINE (location_t loc, tree type)
10196 {
10197 return build_int_cst (type, LOCATION_LINE (loc));
10198 }
10199
10200 /* Fold a call to built-in function FNDECL with 0 arguments.
10201 This function returns NULL_TREE if no simplification was possible. */
10202
10203 static tree
10204 fold_builtin_0 (location_t loc, tree fndecl)
10205 {
10206 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10207 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10208 switch (fcode)
10209 {
10210 case BUILT_IN_FILE:
10211 return fold_builtin_FILE (loc);
10212
10213 case BUILT_IN_FUNCTION:
10214 return fold_builtin_FUNCTION ();
10215
10216 case BUILT_IN_LINE:
10217 return fold_builtin_LINE (loc, type);
10218
10219 CASE_FLT_FN (BUILT_IN_INF):
10220 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10221 case BUILT_IN_INFD32:
10222 case BUILT_IN_INFD64:
10223 case BUILT_IN_INFD128:
10224 return fold_builtin_inf (loc, type, true);
10225
10226 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10227 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10228 return fold_builtin_inf (loc, type, false);
10229
10230 case BUILT_IN_CLASSIFY_TYPE:
10231 return fold_builtin_classify_type (NULL_TREE);
10232
10233 default:
10234 break;
10235 }
10236 return NULL_TREE;
10237 }
10238
10239 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10240 This function returns NULL_TREE if no simplification was possible. */
10241
10242 static tree
10243 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
10244 {
10245 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10246 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10247
10248 if (TREE_CODE (arg0) == ERROR_MARK)
10249 return NULL_TREE;
10250
10251 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10252 return ret;
10253
10254 switch (fcode)
10255 {
10256 case BUILT_IN_CONSTANT_P:
10257 {
10258 tree val = fold_builtin_constant_p (arg0);
10259
10260 /* Gimplification will pull the CALL_EXPR for the builtin out of
10261 an if condition. When not optimizing, we'll not CSE it back.
10262 To avoid link error types of regressions, return false now. */
10263 if (!val && !optimize)
10264 val = integer_zero_node;
10265
10266 return val;
10267 }
10268
10269 case BUILT_IN_CLASSIFY_TYPE:
10270 return fold_builtin_classify_type (arg0);
10271
10272 case BUILT_IN_STRLEN:
10273 return fold_builtin_strlen (loc, type, arg0);
10274
10275 CASE_FLT_FN (BUILT_IN_FABS):
10276 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10277 case BUILT_IN_FABSD32:
10278 case BUILT_IN_FABSD64:
10279 case BUILT_IN_FABSD128:
10280 return fold_builtin_fabs (loc, arg0, type);
10281
10282 case BUILT_IN_ABS:
10283 case BUILT_IN_LABS:
10284 case BUILT_IN_LLABS:
10285 case BUILT_IN_IMAXABS:
10286 return fold_builtin_abs (loc, arg0, type);
10287
10288 CASE_FLT_FN (BUILT_IN_CONJ):
10289 if (validate_arg (arg0, COMPLEX_TYPE)
10290 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10291 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10292 break;
10293
10294 CASE_FLT_FN (BUILT_IN_CREAL):
10295 if (validate_arg (arg0, COMPLEX_TYPE)
10296 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10297 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10298 break;
10299
10300 CASE_FLT_FN (BUILT_IN_CIMAG):
10301 if (validate_arg (arg0, COMPLEX_TYPE)
10302 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10303 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10304 break;
10305
10306 CASE_FLT_FN (BUILT_IN_CARG):
10307 return fold_builtin_carg (loc, arg0, type);
10308
10309 case BUILT_IN_ISASCII:
10310 return fold_builtin_isascii (loc, arg0);
10311
10312 case BUILT_IN_TOASCII:
10313 return fold_builtin_toascii (loc, arg0);
10314
10315 case BUILT_IN_ISDIGIT:
10316 return fold_builtin_isdigit (loc, arg0);
10317
10318 CASE_FLT_FN (BUILT_IN_FINITE):
10319 case BUILT_IN_FINITED32:
10320 case BUILT_IN_FINITED64:
10321 case BUILT_IN_FINITED128:
10322 case BUILT_IN_ISFINITE:
10323 {
10324 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10325 if (ret)
10326 return ret;
10327 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10328 }
10329
10330 CASE_FLT_FN (BUILT_IN_ISINF):
10331 case BUILT_IN_ISINFD32:
10332 case BUILT_IN_ISINFD64:
10333 case BUILT_IN_ISINFD128:
10334 {
10335 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10336 if (ret)
10337 return ret;
10338 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10339 }
10340
10341 case BUILT_IN_ISNORMAL:
10342 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10343
10344 case BUILT_IN_ISINF_SIGN:
10345 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10346
10347 CASE_FLT_FN (BUILT_IN_ISNAN):
10348 case BUILT_IN_ISNAND32:
10349 case BUILT_IN_ISNAND64:
10350 case BUILT_IN_ISNAND128:
10351 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10352
10353 case BUILT_IN_FREE:
10354 if (integer_zerop (arg0))
10355 return build_empty_stmt (loc);
10356 break;
10357
10358 default:
10359 break;
10360 }
10361
10362 return NULL_TREE;
10363
10364 }
10365
10366 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10367 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10368 if no simplification was possible. */
10369
10370 static tree
10371 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10372 {
10373 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10374 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10375
10376 if (TREE_CODE (arg0) == ERROR_MARK
10377 || TREE_CODE (arg1) == ERROR_MARK)
10378 return NULL_TREE;
10379
10380 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10381 return ret;
10382
10383 switch (fcode)
10384 {
10385 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10386 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10387 if (validate_arg (arg0, REAL_TYPE)
10388 && validate_arg (arg1, POINTER_TYPE))
10389 return do_mpfr_lgamma_r (arg0, arg1, type);
10390 break;
10391
10392 CASE_FLT_FN (BUILT_IN_FREXP):
10393 return fold_builtin_frexp (loc, arg0, arg1, type);
10394
10395 CASE_FLT_FN (BUILT_IN_MODF):
10396 return fold_builtin_modf (loc, arg0, arg1, type);
10397
10398 case BUILT_IN_STRSPN:
10399 return fold_builtin_strspn (loc, expr, arg0, arg1);
10400
10401 case BUILT_IN_STRCSPN:
10402 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10403
10404 case BUILT_IN_STRPBRK:
10405 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10406
10407 case BUILT_IN_EXPECT:
10408 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10409
10410 case BUILT_IN_ISGREATER:
10411 return fold_builtin_unordered_cmp (loc, fndecl,
10412 arg0, arg1, UNLE_EXPR, LE_EXPR);
10413 case BUILT_IN_ISGREATEREQUAL:
10414 return fold_builtin_unordered_cmp (loc, fndecl,
10415 arg0, arg1, UNLT_EXPR, LT_EXPR);
10416 case BUILT_IN_ISLESS:
10417 return fold_builtin_unordered_cmp (loc, fndecl,
10418 arg0, arg1, UNGE_EXPR, GE_EXPR);
10419 case BUILT_IN_ISLESSEQUAL:
10420 return fold_builtin_unordered_cmp (loc, fndecl,
10421 arg0, arg1, UNGT_EXPR, GT_EXPR);
10422 case BUILT_IN_ISLESSGREATER:
10423 return fold_builtin_unordered_cmp (loc, fndecl,
10424 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10425 case BUILT_IN_ISUNORDERED:
10426 return fold_builtin_unordered_cmp (loc, fndecl,
10427 arg0, arg1, UNORDERED_EXPR,
10428 NOP_EXPR);
10429
10430 /* We do the folding for va_start in the expander. */
10431 case BUILT_IN_VA_START:
10432 break;
10433
10434 case BUILT_IN_OBJECT_SIZE:
10435 return fold_builtin_object_size (arg0, arg1);
10436
10437 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10438 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10439
10440 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10441 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10442
10443 default:
10444 break;
10445 }
10446 return NULL_TREE;
10447 }
10448
10449 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10450 and ARG2.
10451 This function returns NULL_TREE if no simplification was possible. */
10452
10453 static tree
10454 fold_builtin_3 (location_t loc, tree fndecl,
10455 tree arg0, tree arg1, tree arg2)
10456 {
10457 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10458 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10459
10460 if (TREE_CODE (arg0) == ERROR_MARK
10461 || TREE_CODE (arg1) == ERROR_MARK
10462 || TREE_CODE (arg2) == ERROR_MARK)
10463 return NULL_TREE;
10464
10465 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10466 arg0, arg1, arg2))
10467 return ret;
10468
10469 switch (fcode)
10470 {
10471
10472 CASE_FLT_FN (BUILT_IN_SINCOS):
10473 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10474
10475 CASE_FLT_FN (BUILT_IN_REMQUO):
10476 if (validate_arg (arg0, REAL_TYPE)
10477 && validate_arg (arg1, REAL_TYPE)
10478 && validate_arg (arg2, POINTER_TYPE))
10479 return do_mpfr_remquo (arg0, arg1, arg2);
10480 break;
10481
10482 case BUILT_IN_MEMCMP:
10483 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10484
10485 case BUILT_IN_EXPECT:
10486 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10487
10488 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10489 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10490
10491 case BUILT_IN_ADD_OVERFLOW:
10492 case BUILT_IN_SUB_OVERFLOW:
10493 case BUILT_IN_MUL_OVERFLOW:
10494 case BUILT_IN_ADD_OVERFLOW_P:
10495 case BUILT_IN_SUB_OVERFLOW_P:
10496 case BUILT_IN_MUL_OVERFLOW_P:
10497 case BUILT_IN_SADD_OVERFLOW:
10498 case BUILT_IN_SADDL_OVERFLOW:
10499 case BUILT_IN_SADDLL_OVERFLOW:
10500 case BUILT_IN_SSUB_OVERFLOW:
10501 case BUILT_IN_SSUBL_OVERFLOW:
10502 case BUILT_IN_SSUBLL_OVERFLOW:
10503 case BUILT_IN_SMUL_OVERFLOW:
10504 case BUILT_IN_SMULL_OVERFLOW:
10505 case BUILT_IN_SMULLL_OVERFLOW:
10506 case BUILT_IN_UADD_OVERFLOW:
10507 case BUILT_IN_UADDL_OVERFLOW:
10508 case BUILT_IN_UADDLL_OVERFLOW:
10509 case BUILT_IN_USUB_OVERFLOW:
10510 case BUILT_IN_USUBL_OVERFLOW:
10511 case BUILT_IN_USUBLL_OVERFLOW:
10512 case BUILT_IN_UMUL_OVERFLOW:
10513 case BUILT_IN_UMULL_OVERFLOW:
10514 case BUILT_IN_UMULLL_OVERFLOW:
10515 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10516
10517 default:
10518 break;
10519 }
10520 return NULL_TREE;
10521 }
10522
10523 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10524 ARGS is an array of NARGS arguments. IGNORE is true if the result
10525 of the function call is ignored. This function returns NULL_TREE
10526 if no simplification was possible. */
10527
10528 static tree
10529 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10530 int nargs, bool)
10531 {
10532 tree ret = NULL_TREE;
10533
10534 switch (nargs)
10535 {
10536 case 0:
10537 ret = fold_builtin_0 (loc, fndecl);
10538 break;
10539 case 1:
10540 ret = fold_builtin_1 (loc, fndecl, args[0]);
10541 break;
10542 case 2:
10543 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10544 break;
10545 case 3:
10546 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10547 break;
10548 default:
10549 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10550 break;
10551 }
10552 if (ret)
10553 {
10554 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10555 SET_EXPR_LOCATION (ret, loc);
10556 return ret;
10557 }
10558 return NULL_TREE;
10559 }
10560
10561 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10562 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10563 of arguments in ARGS to be omitted. OLDNARGS is the number of
10564 elements in ARGS. */
10565
10566 static tree
10567 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10568 int skip, tree fndecl, int n, va_list newargs)
10569 {
10570 int nargs = oldnargs - skip + n;
10571 tree *buffer;
10572
10573 if (n > 0)
10574 {
10575 int i, j;
10576
10577 buffer = XALLOCAVEC (tree, nargs);
10578 for (i = 0; i < n; i++)
10579 buffer[i] = va_arg (newargs, tree);
10580 for (j = skip; j < oldnargs; j++, i++)
10581 buffer[i] = args[j];
10582 }
10583 else
10584 buffer = args + skip;
10585
10586 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10587 }
10588
10589 /* Return true if FNDECL shouldn't be folded right now.
10590 If a built-in function has an inline attribute always_inline
10591 wrapper, defer folding it after always_inline functions have
10592 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10593 might not be performed. */
10594
10595 bool
10596 avoid_folding_inline_builtin (tree fndecl)
10597 {
10598 return (DECL_DECLARED_INLINE_P (fndecl)
10599 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10600 && cfun
10601 && !cfun->always_inline_functions_inlined
10602 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10603 }
10604
10605 /* A wrapper function for builtin folding that prevents warnings for
10606 "statement without effect" and the like, caused by removing the
10607 call node earlier than the warning is generated. */
10608
10609 tree
10610 fold_call_expr (location_t loc, tree exp, bool ignore)
10611 {
10612 tree ret = NULL_TREE;
10613 tree fndecl = get_callee_fndecl (exp);
10614 if (fndecl && fndecl_built_in_p (fndecl)
10615 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10616 yet. Defer folding until we see all the arguments
10617 (after inlining). */
10618 && !CALL_EXPR_VA_ARG_PACK (exp))
10619 {
10620 int nargs = call_expr_nargs (exp);
10621
10622 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10623 instead last argument is __builtin_va_arg_pack (). Defer folding
10624 even in that case, until arguments are finalized. */
10625 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10626 {
10627 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10628 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10629 return NULL_TREE;
10630 }
10631
10632 if (avoid_folding_inline_builtin (fndecl))
10633 return NULL_TREE;
10634
10635 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10636 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10637 CALL_EXPR_ARGP (exp), ignore);
10638 else
10639 {
10640 tree *args = CALL_EXPR_ARGP (exp);
10641 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10642 if (ret)
10643 return ret;
10644 }
10645 }
10646 return NULL_TREE;
10647 }
10648
10649 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10650 N arguments are passed in the array ARGARRAY. Return a folded
10651 expression or NULL_TREE if no simplification was possible. */
10652
10653 tree
10654 fold_builtin_call_array (location_t loc, tree,
10655 tree fn,
10656 int n,
10657 tree *argarray)
10658 {
10659 if (TREE_CODE (fn) != ADDR_EXPR)
10660 return NULL_TREE;
10661
10662 tree fndecl = TREE_OPERAND (fn, 0);
10663 if (TREE_CODE (fndecl) == FUNCTION_DECL
10664 && fndecl_built_in_p (fndecl))
10665 {
10666 /* If last argument is __builtin_va_arg_pack (), arguments to this
10667 function are not finalized yet. Defer folding until they are. */
10668 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10669 {
10670 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10671 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10672 return NULL_TREE;
10673 }
10674 if (avoid_folding_inline_builtin (fndecl))
10675 return NULL_TREE;
10676 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10677 return targetm.fold_builtin (fndecl, n, argarray, false);
10678 else
10679 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10680 }
10681
10682 return NULL_TREE;
10683 }
10684
10685 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10686 along with N new arguments specified as the "..." parameters. SKIP
10687 is the number of arguments in EXP to be omitted. This function is used
10688 to do varargs-to-varargs transformations. */
10689
10690 static tree
10691 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10692 {
10693 va_list ap;
10694 tree t;
10695
10696 va_start (ap, n);
10697 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10698 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10699 va_end (ap);
10700
10701 return t;
10702 }
10703
10704 /* Validate a single argument ARG against a tree code CODE representing
10705 a type. Return true when argument is valid. */
10706
10707 static bool
10708 validate_arg (const_tree arg, enum tree_code code)
10709 {
10710 if (!arg)
10711 return false;
10712 else if (code == POINTER_TYPE)
10713 return POINTER_TYPE_P (TREE_TYPE (arg));
10714 else if (code == INTEGER_TYPE)
10715 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10716 return code == TREE_CODE (TREE_TYPE (arg));
10717 }
10718
10719 /* This function validates the types of a function call argument list
10720 against a specified list of tree_codes. If the last specifier is a 0,
10721 that represents an ellipses, otherwise the last specifier must be a
10722 VOID_TYPE.
10723
10724 This is the GIMPLE version of validate_arglist. Eventually we want to
10725 completely convert builtins.c to work from GIMPLEs and the tree based
10726 validate_arglist will then be removed. */
10727
10728 bool
10729 validate_gimple_arglist (const gcall *call, ...)
10730 {
10731 enum tree_code code;
10732 bool res = 0;
10733 va_list ap;
10734 const_tree arg;
10735 size_t i;
10736
10737 va_start (ap, call);
10738 i = 0;
10739
10740 do
10741 {
10742 code = (enum tree_code) va_arg (ap, int);
10743 switch (code)
10744 {
10745 case 0:
10746 /* This signifies an ellipses, any further arguments are all ok. */
10747 res = true;
10748 goto end;
10749 case VOID_TYPE:
10750 /* This signifies an endlink, if no arguments remain, return
10751 true, otherwise return false. */
10752 res = (i == gimple_call_num_args (call));
10753 goto end;
10754 default:
10755 /* If no parameters remain or the parameter's code does not
10756 match the specified code, return false. Otherwise continue
10757 checking any remaining arguments. */
10758 arg = gimple_call_arg (call, i++);
10759 if (!validate_arg (arg, code))
10760 goto end;
10761 break;
10762 }
10763 }
10764 while (1);
10765
10766 /* We need gotos here since we can only have one VA_CLOSE in a
10767 function. */
10768 end: ;
10769 va_end (ap);
10770
10771 return res;
10772 }
10773
10774 /* Default target-specific builtin expander that does nothing. */
10775
10776 rtx
10777 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10778 rtx target ATTRIBUTE_UNUSED,
10779 rtx subtarget ATTRIBUTE_UNUSED,
10780 machine_mode mode ATTRIBUTE_UNUSED,
10781 int ignore ATTRIBUTE_UNUSED)
10782 {
10783 return NULL_RTX;
10784 }
10785
10786 /* Returns true is EXP represents data that would potentially reside
10787 in a readonly section. */
10788
10789 bool
10790 readonly_data_expr (tree exp)
10791 {
10792 STRIP_NOPS (exp);
10793
10794 if (TREE_CODE (exp) != ADDR_EXPR)
10795 return false;
10796
10797 exp = get_base_address (TREE_OPERAND (exp, 0));
10798 if (!exp)
10799 return false;
10800
10801 /* Make sure we call decl_readonly_section only for trees it
10802 can handle (since it returns true for everything it doesn't
10803 understand). */
10804 if (TREE_CODE (exp) == STRING_CST
10805 || TREE_CODE (exp) == CONSTRUCTOR
10806 || (VAR_P (exp) && TREE_STATIC (exp)))
10807 return decl_readonly_section (exp, 0);
10808 else
10809 return false;
10810 }
10811
10812 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10813 to the call, and TYPE is its return type.
10814
10815 Return NULL_TREE if no simplification was possible, otherwise return the
10816 simplified form of the call as a tree.
10817
10818 The simplified form may be a constant or other expression which
10819 computes the same value, but in a more efficient manner (including
10820 calls to other builtin functions).
10821
10822 The call may contain arguments which need to be evaluated, but
10823 which are not useful to determine the result of the call. In
10824 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10825 COMPOUND_EXPR will be an argument which must be evaluated.
10826 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10827 COMPOUND_EXPR in the chain will contain the tree for the simplified
10828 form of the builtin function call. */
10829
10830 static tree
10831 fold_builtin_strpbrk (location_t loc, tree expr, tree s1, tree s2, tree type)
10832 {
10833 if (!validate_arg (s1, POINTER_TYPE)
10834 || !validate_arg (s2, POINTER_TYPE))
10835 return NULL_TREE;
10836
10837 if (!check_nul_terminated_array (expr, s1)
10838 || !check_nul_terminated_array (expr, s2))
10839 return NULL_TREE;
10840
10841 tree fn;
10842 const char *p1, *p2;
10843
10844 p2 = c_getstr (s2);
10845 if (p2 == NULL)
10846 return NULL_TREE;
10847
10848 p1 = c_getstr (s1);
10849 if (p1 != NULL)
10850 {
10851 const char *r = strpbrk (p1, p2);
10852 tree tem;
10853
10854 if (r == NULL)
10855 return build_int_cst (TREE_TYPE (s1), 0);
10856
10857 /* Return an offset into the constant string argument. */
10858 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10859 return fold_convert_loc (loc, type, tem);
10860 }
10861
10862 if (p2[0] == '\0')
10863 /* strpbrk(x, "") == NULL.
10864 Evaluate and ignore s1 in case it had side-effects. */
10865 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10866
10867 if (p2[1] != '\0')
10868 return NULL_TREE; /* Really call strpbrk. */
10869
10870 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10871 if (!fn)
10872 return NULL_TREE;
10873
10874 /* New argument list transforming strpbrk(s1, s2) to
10875 strchr(s1, s2[0]). */
10876 return build_call_expr_loc (loc, fn, 2, s1,
10877 build_int_cst (integer_type_node, p2[0]));
10878 }
10879
10880 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10881 to the call.
10882
10883 Return NULL_TREE if no simplification was possible, otherwise return the
10884 simplified form of the call as a tree.
10885
10886 The simplified form may be a constant or other expression which
10887 computes the same value, but in a more efficient manner (including
10888 calls to other builtin functions).
10889
10890 The call may contain arguments which need to be evaluated, but
10891 which are not useful to determine the result of the call. In
10892 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10893 COMPOUND_EXPR will be an argument which must be evaluated.
10894 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10895 COMPOUND_EXPR in the chain will contain the tree for the simplified
10896 form of the builtin function call. */
10897
10898 static tree
10899 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10900 {
10901 if (!validate_arg (s1, POINTER_TYPE)
10902 || !validate_arg (s2, POINTER_TYPE))
10903 return NULL_TREE;
10904
10905 if (!check_nul_terminated_array (expr, s1)
10906 || !check_nul_terminated_array (expr, s2))
10907 return NULL_TREE;
10908
10909 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10910
10911 /* If either argument is "", return NULL_TREE. */
10912 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10913 /* Evaluate and ignore both arguments in case either one has
10914 side-effects. */
10915 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10916 s1, s2);
10917 return NULL_TREE;
10918 }
10919
10920 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10921 to the call.
10922
10923 Return NULL_TREE if no simplification was possible, otherwise return the
10924 simplified form of the call as a tree.
10925
10926 The simplified form may be a constant or other expression which
10927 computes the same value, but in a more efficient manner (including
10928 calls to other builtin functions).
10929
10930 The call may contain arguments which need to be evaluated, but
10931 which are not useful to determine the result of the call. In
10932 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10933 COMPOUND_EXPR will be an argument which must be evaluated.
10934 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10935 COMPOUND_EXPR in the chain will contain the tree for the simplified
10936 form of the builtin function call. */
10937
10938 static tree
10939 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10940 {
10941 if (!validate_arg (s1, POINTER_TYPE)
10942 || !validate_arg (s2, POINTER_TYPE))
10943 return NULL_TREE;
10944
10945 if (!check_nul_terminated_array (expr, s1)
10946 || !check_nul_terminated_array (expr, s2))
10947 return NULL_TREE;
10948
10949 /* If the first argument is "", return NULL_TREE. */
10950 const char *p1 = c_getstr (s1);
10951 if (p1 && *p1 == '\0')
10952 {
10953 /* Evaluate and ignore argument s2 in case it has
10954 side-effects. */
10955 return omit_one_operand_loc (loc, size_type_node,
10956 size_zero_node, s2);
10957 }
10958
10959 /* If the second argument is "", return __builtin_strlen(s1). */
10960 const char *p2 = c_getstr (s2);
10961 if (p2 && *p2 == '\0')
10962 {
10963 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10964
10965 /* If the replacement _DECL isn't initialized, don't do the
10966 transformation. */
10967 if (!fn)
10968 return NULL_TREE;
10969
10970 return build_call_expr_loc (loc, fn, 1, s1);
10971 }
10972 return NULL_TREE;
10973 }
10974
10975 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10976 produced. False otherwise. This is done so that we don't output the error
10977 or warning twice or three times. */
10978
10979 bool
10980 fold_builtin_next_arg (tree exp, bool va_start_p)
10981 {
10982 tree fntype = TREE_TYPE (current_function_decl);
10983 int nargs = call_expr_nargs (exp);
10984 tree arg;
10985 /* There is good chance the current input_location points inside the
10986 definition of the va_start macro (perhaps on the token for
10987 builtin) in a system header, so warnings will not be emitted.
10988 Use the location in real source code. */
10989 location_t current_location =
10990 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10991 NULL);
10992
10993 if (!stdarg_p (fntype))
10994 {
10995 error ("%<va_start%> used in function with fixed arguments");
10996 return true;
10997 }
10998
10999 if (va_start_p)
11000 {
11001 if (va_start_p && (nargs != 2))
11002 {
11003 error ("wrong number of arguments to function %<va_start%>");
11004 return true;
11005 }
11006 arg = CALL_EXPR_ARG (exp, 1);
11007 }
11008 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11009 when we checked the arguments and if needed issued a warning. */
11010 else
11011 {
11012 if (nargs == 0)
11013 {
11014 /* Evidently an out of date version of <stdarg.h>; can't validate
11015 va_start's second argument, but can still work as intended. */
11016 warning_at (current_location,
11017 OPT_Wvarargs,
11018 "%<__builtin_next_arg%> called without an argument");
11019 return true;
11020 }
11021 else if (nargs > 1)
11022 {
11023 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11024 return true;
11025 }
11026 arg = CALL_EXPR_ARG (exp, 0);
11027 }
11028
11029 if (TREE_CODE (arg) == SSA_NAME)
11030 arg = SSA_NAME_VAR (arg);
11031
11032 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11033 or __builtin_next_arg (0) the first time we see it, after checking
11034 the arguments and if needed issuing a warning. */
11035 if (!integer_zerop (arg))
11036 {
11037 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11038
11039 /* Strip off all nops for the sake of the comparison. This
11040 is not quite the same as STRIP_NOPS. It does more.
11041 We must also strip off INDIRECT_EXPR for C++ reference
11042 parameters. */
11043 while (CONVERT_EXPR_P (arg)
11044 || TREE_CODE (arg) == INDIRECT_REF)
11045 arg = TREE_OPERAND (arg, 0);
11046 if (arg != last_parm)
11047 {
11048 /* FIXME: Sometimes with the tree optimizers we can get the
11049 not the last argument even though the user used the last
11050 argument. We just warn and set the arg to be the last
11051 argument so that we will get wrong-code because of
11052 it. */
11053 warning_at (current_location,
11054 OPT_Wvarargs,
11055 "second parameter of %<va_start%> not last named argument");
11056 }
11057
11058 /* Undefined by C99 7.15.1.4p4 (va_start):
11059 "If the parameter parmN is declared with the register storage
11060 class, with a function or array type, or with a type that is
11061 not compatible with the type that results after application of
11062 the default argument promotions, the behavior is undefined."
11063 */
11064 else if (DECL_REGISTER (arg))
11065 {
11066 warning_at (current_location,
11067 OPT_Wvarargs,
11068 "undefined behavior when second parameter of "
11069 "%<va_start%> is declared with %<register%> storage");
11070 }
11071
11072 /* We want to verify the second parameter just once before the tree
11073 optimizers are run and then avoid keeping it in the tree,
11074 as otherwise we could warn even for correct code like:
11075 void foo (int i, ...)
11076 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11077 if (va_start_p)
11078 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11079 else
11080 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11081 }
11082 return false;
11083 }
11084
11085
11086 /* Expand a call EXP to __builtin_object_size. */
11087
11088 static rtx
11089 expand_builtin_object_size (tree exp)
11090 {
11091 tree ost;
11092 int object_size_type;
11093 tree fndecl = get_callee_fndecl (exp);
11094
11095 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11096 {
11097 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
11098 exp, fndecl);
11099 expand_builtin_trap ();
11100 return const0_rtx;
11101 }
11102
11103 ost = CALL_EXPR_ARG (exp, 1);
11104 STRIP_NOPS (ost);
11105
11106 if (TREE_CODE (ost) != INTEGER_CST
11107 || tree_int_cst_sgn (ost) < 0
11108 || compare_tree_int (ost, 3) > 0)
11109 {
11110 error ("%Klast argument of %qD is not integer constant between 0 and 3",
11111 exp, fndecl);
11112 expand_builtin_trap ();
11113 return const0_rtx;
11114 }
11115
11116 object_size_type = tree_to_shwi (ost);
11117
11118 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11119 }
11120
11121 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11122 FCODE is the BUILT_IN_* to use.
11123 Return NULL_RTX if we failed; the caller should emit a normal call,
11124 otherwise try to get the result in TARGET, if convenient (and in
11125 mode MODE if that's convenient). */
11126
11127 static rtx
11128 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11129 enum built_in_function fcode)
11130 {
11131 if (!validate_arglist (exp,
11132 POINTER_TYPE,
11133 fcode == BUILT_IN_MEMSET_CHK
11134 ? INTEGER_TYPE : POINTER_TYPE,
11135 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11136 return NULL_RTX;
11137
11138 tree dest = CALL_EXPR_ARG (exp, 0);
11139 tree src = CALL_EXPR_ARG (exp, 1);
11140 tree len = CALL_EXPR_ARG (exp, 2);
11141 tree size = CALL_EXPR_ARG (exp, 3);
11142
11143 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
11144 /*str=*/NULL_TREE, size);
11145
11146 if (!tree_fits_uhwi_p (size))
11147 return NULL_RTX;
11148
11149 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11150 {
11151 /* Avoid transforming the checking call to an ordinary one when
11152 an overflow has been detected or when the call couldn't be
11153 validated because the size is not constant. */
11154 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11155 return NULL_RTX;
11156
11157 tree fn = NULL_TREE;
11158 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11159 mem{cpy,pcpy,move,set} is available. */
11160 switch (fcode)
11161 {
11162 case BUILT_IN_MEMCPY_CHK:
11163 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11164 break;
11165 case BUILT_IN_MEMPCPY_CHK:
11166 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11167 break;
11168 case BUILT_IN_MEMMOVE_CHK:
11169 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11170 break;
11171 case BUILT_IN_MEMSET_CHK:
11172 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11173 break;
11174 default:
11175 break;
11176 }
11177
11178 if (! fn)
11179 return NULL_RTX;
11180
11181 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11182 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11183 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11184 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11185 }
11186 else if (fcode == BUILT_IN_MEMSET_CHK)
11187 return NULL_RTX;
11188 else
11189 {
11190 unsigned int dest_align = get_pointer_alignment (dest);
11191
11192 /* If DEST is not a pointer type, call the normal function. */
11193 if (dest_align == 0)
11194 return NULL_RTX;
11195
11196 /* If SRC and DEST are the same (and not volatile), do nothing. */
11197 if (operand_equal_p (src, dest, 0))
11198 {
11199 tree expr;
11200
11201 if (fcode != BUILT_IN_MEMPCPY_CHK)
11202 {
11203 /* Evaluate and ignore LEN in case it has side-effects. */
11204 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11205 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11206 }
11207
11208 expr = fold_build_pointer_plus (dest, len);
11209 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11210 }
11211
11212 /* __memmove_chk special case. */
11213 if (fcode == BUILT_IN_MEMMOVE_CHK)
11214 {
11215 unsigned int src_align = get_pointer_alignment (src);
11216
11217 if (src_align == 0)
11218 return NULL_RTX;
11219
11220 /* If src is categorized for a readonly section we can use
11221 normal __memcpy_chk. */
11222 if (readonly_data_expr (src))
11223 {
11224 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11225 if (!fn)
11226 return NULL_RTX;
11227 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11228 dest, src, len, size);
11229 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11230 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11231 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11232 }
11233 }
11234 return NULL_RTX;
11235 }
11236 }
11237
11238 /* Emit warning if a buffer overflow is detected at compile time. */
11239
11240 static void
11241 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11242 {
11243 /* The source string. */
11244 tree srcstr = NULL_TREE;
11245 /* The size of the destination object. */
11246 tree objsize = NULL_TREE;
11247 /* The string that is being concatenated with (as in __strcat_chk)
11248 or null if it isn't. */
11249 tree catstr = NULL_TREE;
11250 /* The maximum length of the source sequence in a bounded operation
11251 (such as __strncat_chk) or null if the operation isn't bounded
11252 (such as __strcat_chk). */
11253 tree maxread = NULL_TREE;
11254 /* The exact size of the access (such as in __strncpy_chk). */
11255 tree size = NULL_TREE;
11256
11257 switch (fcode)
11258 {
11259 case BUILT_IN_STRCPY_CHK:
11260 case BUILT_IN_STPCPY_CHK:
11261 srcstr = CALL_EXPR_ARG (exp, 1);
11262 objsize = CALL_EXPR_ARG (exp, 2);
11263 break;
11264
11265 case BUILT_IN_STRCAT_CHK:
11266 /* For __strcat_chk the warning will be emitted only if overflowing
11267 by at least strlen (dest) + 1 bytes. */
11268 catstr = CALL_EXPR_ARG (exp, 0);
11269 srcstr = CALL_EXPR_ARG (exp, 1);
11270 objsize = CALL_EXPR_ARG (exp, 2);
11271 break;
11272
11273 case BUILT_IN_STRNCAT_CHK:
11274 catstr = CALL_EXPR_ARG (exp, 0);
11275 srcstr = CALL_EXPR_ARG (exp, 1);
11276 maxread = CALL_EXPR_ARG (exp, 2);
11277 objsize = CALL_EXPR_ARG (exp, 3);
11278 break;
11279
11280 case BUILT_IN_STRNCPY_CHK:
11281 case BUILT_IN_STPNCPY_CHK:
11282 srcstr = CALL_EXPR_ARG (exp, 1);
11283 size = CALL_EXPR_ARG (exp, 2);
11284 objsize = CALL_EXPR_ARG (exp, 3);
11285 break;
11286
11287 case BUILT_IN_SNPRINTF_CHK:
11288 case BUILT_IN_VSNPRINTF_CHK:
11289 maxread = CALL_EXPR_ARG (exp, 1);
11290 objsize = CALL_EXPR_ARG (exp, 3);
11291 break;
11292 default:
11293 gcc_unreachable ();
11294 }
11295
11296 if (catstr && maxread)
11297 {
11298 /* Check __strncat_chk. There is no way to determine the length
11299 of the string to which the source string is being appended so
11300 just warn when the length of the source string is not known. */
11301 check_strncat_sizes (exp, objsize);
11302 return;
11303 }
11304
11305 /* The destination argument is the first one for all built-ins above. */
11306 tree dst = CALL_EXPR_ARG (exp, 0);
11307
11308 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
11309 }
11310
11311 /* Emit warning if a buffer overflow is detected at compile time
11312 in __sprintf_chk/__vsprintf_chk calls. */
11313
11314 static void
11315 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11316 {
11317 tree size, len, fmt;
11318 const char *fmt_str;
11319 int nargs = call_expr_nargs (exp);
11320
11321 /* Verify the required arguments in the original call. */
11322
11323 if (nargs < 4)
11324 return;
11325 size = CALL_EXPR_ARG (exp, 2);
11326 fmt = CALL_EXPR_ARG (exp, 3);
11327
11328 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11329 return;
11330
11331 /* Check whether the format is a literal string constant. */
11332 fmt_str = c_getstr (fmt);
11333 if (fmt_str == NULL)
11334 return;
11335
11336 if (!init_target_chars ())
11337 return;
11338
11339 /* If the format doesn't contain % args or %%, we know its size. */
11340 if (strchr (fmt_str, target_percent) == 0)
11341 len = build_int_cstu (size_type_node, strlen (fmt_str));
11342 /* If the format is "%s" and first ... argument is a string literal,
11343 we know it too. */
11344 else if (fcode == BUILT_IN_SPRINTF_CHK
11345 && strcmp (fmt_str, target_percent_s) == 0)
11346 {
11347 tree arg;
11348
11349 if (nargs < 5)
11350 return;
11351 arg = CALL_EXPR_ARG (exp, 4);
11352 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11353 return;
11354
11355 len = c_strlen (arg, 1);
11356 if (!len || ! tree_fits_uhwi_p (len))
11357 return;
11358 }
11359 else
11360 return;
11361
11362 /* Add one for the terminating nul. */
11363 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11364
11365 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
11366 /*maxread=*/NULL_TREE, len, size);
11367 }
11368
11369 /* Emit warning if a free is called with address of a variable. */
11370
11371 static void
11372 maybe_emit_free_warning (tree exp)
11373 {
11374 if (call_expr_nargs (exp) != 1)
11375 return;
11376
11377 tree arg = CALL_EXPR_ARG (exp, 0);
11378
11379 STRIP_NOPS (arg);
11380 if (TREE_CODE (arg) != ADDR_EXPR)
11381 return;
11382
11383 arg = get_base_address (TREE_OPERAND (arg, 0));
11384 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11385 return;
11386
11387 if (SSA_VAR_P (arg))
11388 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11389 "%Kattempt to free a non-heap object %qD", exp, arg);
11390 else
11391 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11392 "%Kattempt to free a non-heap object", exp);
11393 }
11394
11395 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11396 if possible. */
11397
11398 static tree
11399 fold_builtin_object_size (tree ptr, tree ost)
11400 {
11401 unsigned HOST_WIDE_INT bytes;
11402 int object_size_type;
11403
11404 if (!validate_arg (ptr, POINTER_TYPE)
11405 || !validate_arg (ost, INTEGER_TYPE))
11406 return NULL_TREE;
11407
11408 STRIP_NOPS (ost);
11409
11410 if (TREE_CODE (ost) != INTEGER_CST
11411 || tree_int_cst_sgn (ost) < 0
11412 || compare_tree_int (ost, 3) > 0)
11413 return NULL_TREE;
11414
11415 object_size_type = tree_to_shwi (ost);
11416
11417 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11418 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11419 and (size_t) 0 for types 2 and 3. */
11420 if (TREE_SIDE_EFFECTS (ptr))
11421 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11422
11423 if (TREE_CODE (ptr) == ADDR_EXPR)
11424 {
11425 compute_builtin_object_size (ptr, object_size_type, &bytes);
11426 if (wi::fits_to_tree_p (bytes, size_type_node))
11427 return build_int_cstu (size_type_node, bytes);
11428 }
11429 else if (TREE_CODE (ptr) == SSA_NAME)
11430 {
11431 /* If object size is not known yet, delay folding until
11432 later. Maybe subsequent passes will help determining
11433 it. */
11434 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11435 && wi::fits_to_tree_p (bytes, size_type_node))
11436 return build_int_cstu (size_type_node, bytes);
11437 }
11438
11439 return NULL_TREE;
11440 }
11441
11442 /* Builtins with folding operations that operate on "..." arguments
11443 need special handling; we need to store the arguments in a convenient
11444 data structure before attempting any folding. Fortunately there are
11445 only a few builtins that fall into this category. FNDECL is the
11446 function, EXP is the CALL_EXPR for the call. */
11447
11448 static tree
11449 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11450 {
11451 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11452 tree ret = NULL_TREE;
11453
11454 switch (fcode)
11455 {
11456 case BUILT_IN_FPCLASSIFY:
11457 ret = fold_builtin_fpclassify (loc, args, nargs);
11458 break;
11459
11460 default:
11461 break;
11462 }
11463 if (ret)
11464 {
11465 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11466 SET_EXPR_LOCATION (ret, loc);
11467 TREE_NO_WARNING (ret) = 1;
11468 return ret;
11469 }
11470 return NULL_TREE;
11471 }
11472
11473 /* Initialize format string characters in the target charset. */
11474
11475 bool
11476 init_target_chars (void)
11477 {
11478 static bool init;
11479 if (!init)
11480 {
11481 target_newline = lang_hooks.to_target_charset ('\n');
11482 target_percent = lang_hooks.to_target_charset ('%');
11483 target_c = lang_hooks.to_target_charset ('c');
11484 target_s = lang_hooks.to_target_charset ('s');
11485 if (target_newline == 0 || target_percent == 0 || target_c == 0
11486 || target_s == 0)
11487 return false;
11488
11489 target_percent_c[0] = target_percent;
11490 target_percent_c[1] = target_c;
11491 target_percent_c[2] = '\0';
11492
11493 target_percent_s[0] = target_percent;
11494 target_percent_s[1] = target_s;
11495 target_percent_s[2] = '\0';
11496
11497 target_percent_s_newline[0] = target_percent;
11498 target_percent_s_newline[1] = target_s;
11499 target_percent_s_newline[2] = target_newline;
11500 target_percent_s_newline[3] = '\0';
11501
11502 init = true;
11503 }
11504 return true;
11505 }
11506
11507 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11508 and no overflow/underflow occurred. INEXACT is true if M was not
11509 exactly calculated. TYPE is the tree type for the result. This
11510 function assumes that you cleared the MPFR flags and then
11511 calculated M to see if anything subsequently set a flag prior to
11512 entering this function. Return NULL_TREE if any checks fail. */
11513
11514 static tree
11515 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11516 {
11517 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11518 overflow/underflow occurred. If -frounding-math, proceed iff the
11519 result of calling FUNC was exact. */
11520 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11521 && (!flag_rounding_math || !inexact))
11522 {
11523 REAL_VALUE_TYPE rr;
11524
11525 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11526 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11527 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11528 but the mpft_t is not, then we underflowed in the
11529 conversion. */
11530 if (real_isfinite (&rr)
11531 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11532 {
11533 REAL_VALUE_TYPE rmode;
11534
11535 real_convert (&rmode, TYPE_MODE (type), &rr);
11536 /* Proceed iff the specified mode can hold the value. */
11537 if (real_identical (&rmode, &rr))
11538 return build_real (type, rmode);
11539 }
11540 }
11541 return NULL_TREE;
11542 }
11543
11544 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11545 number and no overflow/underflow occurred. INEXACT is true if M
11546 was not exactly calculated. TYPE is the tree type for the result.
11547 This function assumes that you cleared the MPFR flags and then
11548 calculated M to see if anything subsequently set a flag prior to
11549 entering this function. Return NULL_TREE if any checks fail, if
11550 FORCE_CONVERT is true, then bypass the checks. */
11551
11552 static tree
11553 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11554 {
11555 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11556 overflow/underflow occurred. If -frounding-math, proceed iff the
11557 result of calling FUNC was exact. */
11558 if (force_convert
11559 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11560 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11561 && (!flag_rounding_math || !inexact)))
11562 {
11563 REAL_VALUE_TYPE re, im;
11564
11565 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11566 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11567 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11568 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11569 but the mpft_t is not, then we underflowed in the
11570 conversion. */
11571 if (force_convert
11572 || (real_isfinite (&re) && real_isfinite (&im)
11573 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11574 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11575 {
11576 REAL_VALUE_TYPE re_mode, im_mode;
11577
11578 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11579 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11580 /* Proceed iff the specified mode can hold the value. */
11581 if (force_convert
11582 || (real_identical (&re_mode, &re)
11583 && real_identical (&im_mode, &im)))
11584 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11585 build_real (TREE_TYPE (type), im_mode));
11586 }
11587 }
11588 return NULL_TREE;
11589 }
11590
11591 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11592 the pointer *(ARG_QUO) and return the result. The type is taken
11593 from the type of ARG0 and is used for setting the precision of the
11594 calculation and results. */
11595
11596 static tree
11597 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11598 {
11599 tree const type = TREE_TYPE (arg0);
11600 tree result = NULL_TREE;
11601
11602 STRIP_NOPS (arg0);
11603 STRIP_NOPS (arg1);
11604
11605 /* To proceed, MPFR must exactly represent the target floating point
11606 format, which only happens when the target base equals two. */
11607 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11608 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11609 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11610 {
11611 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11612 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11613
11614 if (real_isfinite (ra0) && real_isfinite (ra1))
11615 {
11616 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11617 const int prec = fmt->p;
11618 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11619 tree result_rem;
11620 long integer_quo;
11621 mpfr_t m0, m1;
11622
11623 mpfr_inits2 (prec, m0, m1, NULL);
11624 mpfr_from_real (m0, ra0, MPFR_RNDN);
11625 mpfr_from_real (m1, ra1, MPFR_RNDN);
11626 mpfr_clear_flags ();
11627 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11628 /* Remquo is independent of the rounding mode, so pass
11629 inexact=0 to do_mpfr_ckconv(). */
11630 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11631 mpfr_clears (m0, m1, NULL);
11632 if (result_rem)
11633 {
11634 /* MPFR calculates quo in the host's long so it may
11635 return more bits in quo than the target int can hold
11636 if sizeof(host long) > sizeof(target int). This can
11637 happen even for native compilers in LP64 mode. In
11638 these cases, modulo the quo value with the largest
11639 number that the target int can hold while leaving one
11640 bit for the sign. */
11641 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11642 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11643
11644 /* Dereference the quo pointer argument. */
11645 arg_quo = build_fold_indirect_ref (arg_quo);
11646 /* Proceed iff a valid pointer type was passed in. */
11647 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11648 {
11649 /* Set the value. */
11650 tree result_quo
11651 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11652 build_int_cst (TREE_TYPE (arg_quo),
11653 integer_quo));
11654 TREE_SIDE_EFFECTS (result_quo) = 1;
11655 /* Combine the quo assignment with the rem. */
11656 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11657 result_quo, result_rem));
11658 }
11659 }
11660 }
11661 }
11662 return result;
11663 }
11664
11665 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11666 resulting value as a tree with type TYPE. The mpfr precision is
11667 set to the precision of TYPE. We assume that this mpfr function
11668 returns zero if the result could be calculated exactly within the
11669 requested precision. In addition, the integer pointer represented
11670 by ARG_SG will be dereferenced and set to the appropriate signgam
11671 (-1,1) value. */
11672
11673 static tree
11674 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11675 {
11676 tree result = NULL_TREE;
11677
11678 STRIP_NOPS (arg);
11679
11680 /* To proceed, MPFR must exactly represent the target floating point
11681 format, which only happens when the target base equals two. Also
11682 verify ARG is a constant and that ARG_SG is an int pointer. */
11683 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11684 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11685 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11686 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11687 {
11688 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11689
11690 /* In addition to NaN and Inf, the argument cannot be zero or a
11691 negative integer. */
11692 if (real_isfinite (ra)
11693 && ra->cl != rvc_zero
11694 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11695 {
11696 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11697 const int prec = fmt->p;
11698 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11699 int inexact, sg;
11700 mpfr_t m;
11701 tree result_lg;
11702
11703 mpfr_init2 (m, prec);
11704 mpfr_from_real (m, ra, MPFR_RNDN);
11705 mpfr_clear_flags ();
11706 inexact = mpfr_lgamma (m, &sg, m, rnd);
11707 result_lg = do_mpfr_ckconv (m, type, inexact);
11708 mpfr_clear (m);
11709 if (result_lg)
11710 {
11711 tree result_sg;
11712
11713 /* Dereference the arg_sg pointer argument. */
11714 arg_sg = build_fold_indirect_ref (arg_sg);
11715 /* Assign the signgam value into *arg_sg. */
11716 result_sg = fold_build2 (MODIFY_EXPR,
11717 TREE_TYPE (arg_sg), arg_sg,
11718 build_int_cst (TREE_TYPE (arg_sg), sg));
11719 TREE_SIDE_EFFECTS (result_sg) = 1;
11720 /* Combine the signgam assignment with the lgamma result. */
11721 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11722 result_sg, result_lg));
11723 }
11724 }
11725 }
11726
11727 return result;
11728 }
11729
11730 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11731 mpc function FUNC on it and return the resulting value as a tree
11732 with type TYPE. The mpfr precision is set to the precision of
11733 TYPE. We assume that function FUNC returns zero if the result
11734 could be calculated exactly within the requested precision. If
11735 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11736 in the arguments and/or results. */
11737
11738 tree
11739 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11740 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11741 {
11742 tree result = NULL_TREE;
11743
11744 STRIP_NOPS (arg0);
11745 STRIP_NOPS (arg1);
11746
11747 /* To proceed, MPFR must exactly represent the target floating point
11748 format, which only happens when the target base equals two. */
11749 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11750 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11751 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11752 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11753 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11754 {
11755 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11756 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11757 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11758 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11759
11760 if (do_nonfinite
11761 || (real_isfinite (re0) && real_isfinite (im0)
11762 && real_isfinite (re1) && real_isfinite (im1)))
11763 {
11764 const struct real_format *const fmt =
11765 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11766 const int prec = fmt->p;
11767 const mpfr_rnd_t rnd = fmt->round_towards_zero
11768 ? MPFR_RNDZ : MPFR_RNDN;
11769 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11770 int inexact;
11771 mpc_t m0, m1;
11772
11773 mpc_init2 (m0, prec);
11774 mpc_init2 (m1, prec);
11775 mpfr_from_real (mpc_realref (m0), re0, rnd);
11776 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11777 mpfr_from_real (mpc_realref (m1), re1, rnd);
11778 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11779 mpfr_clear_flags ();
11780 inexact = func (m0, m0, m1, crnd);
11781 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11782 mpc_clear (m0);
11783 mpc_clear (m1);
11784 }
11785 }
11786
11787 return result;
11788 }
11789
11790 /* A wrapper function for builtin folding that prevents warnings for
11791 "statement without effect" and the like, caused by removing the
11792 call node earlier than the warning is generated. */
11793
11794 tree
11795 fold_call_stmt (gcall *stmt, bool ignore)
11796 {
11797 tree ret = NULL_TREE;
11798 tree fndecl = gimple_call_fndecl (stmt);
11799 location_t loc = gimple_location (stmt);
11800 if (fndecl && fndecl_built_in_p (fndecl)
11801 && !gimple_call_va_arg_pack_p (stmt))
11802 {
11803 int nargs = gimple_call_num_args (stmt);
11804 tree *args = (nargs > 0
11805 ? gimple_call_arg_ptr (stmt, 0)
11806 : &error_mark_node);
11807
11808 if (avoid_folding_inline_builtin (fndecl))
11809 return NULL_TREE;
11810 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11811 {
11812 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11813 }
11814 else
11815 {
11816 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11817 if (ret)
11818 {
11819 /* Propagate location information from original call to
11820 expansion of builtin. Otherwise things like
11821 maybe_emit_chk_warning, that operate on the expansion
11822 of a builtin, will use the wrong location information. */
11823 if (gimple_has_location (stmt))
11824 {
11825 tree realret = ret;
11826 if (TREE_CODE (ret) == NOP_EXPR)
11827 realret = TREE_OPERAND (ret, 0);
11828 if (CAN_HAVE_LOCATION_P (realret)
11829 && !EXPR_HAS_LOCATION (realret))
11830 SET_EXPR_LOCATION (realret, loc);
11831 return realret;
11832 }
11833 return ret;
11834 }
11835 }
11836 }
11837 return NULL_TREE;
11838 }
11839
11840 /* Look up the function in builtin_decl that corresponds to DECL
11841 and set ASMSPEC as its user assembler name. DECL must be a
11842 function decl that declares a builtin. */
11843
11844 void
11845 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11846 {
11847 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11848 && asmspec != 0);
11849
11850 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11851 set_user_assembler_name (builtin, asmspec);
11852
11853 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11854 && INT_TYPE_SIZE < BITS_PER_WORD)
11855 {
11856 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11857 set_user_assembler_libfunc ("ffs", asmspec);
11858 set_optab_libfunc (ffs_optab, mode, "ffs");
11859 }
11860 }
11861
11862 /* Return true if DECL is a builtin that expands to a constant or similarly
11863 simple code. */
11864 bool
11865 is_simple_builtin (tree decl)
11866 {
11867 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11868 switch (DECL_FUNCTION_CODE (decl))
11869 {
11870 /* Builtins that expand to constants. */
11871 case BUILT_IN_CONSTANT_P:
11872 case BUILT_IN_EXPECT:
11873 case BUILT_IN_OBJECT_SIZE:
11874 case BUILT_IN_UNREACHABLE:
11875 /* Simple register moves or loads from stack. */
11876 case BUILT_IN_ASSUME_ALIGNED:
11877 case BUILT_IN_RETURN_ADDRESS:
11878 case BUILT_IN_EXTRACT_RETURN_ADDR:
11879 case BUILT_IN_FROB_RETURN_ADDR:
11880 case BUILT_IN_RETURN:
11881 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11882 case BUILT_IN_FRAME_ADDRESS:
11883 case BUILT_IN_VA_END:
11884 case BUILT_IN_STACK_SAVE:
11885 case BUILT_IN_STACK_RESTORE:
11886 /* Exception state returns or moves registers around. */
11887 case BUILT_IN_EH_FILTER:
11888 case BUILT_IN_EH_POINTER:
11889 case BUILT_IN_EH_COPY_VALUES:
11890 return true;
11891
11892 default:
11893 return false;
11894 }
11895
11896 return false;
11897 }
11898
11899 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11900 most probably expanded inline into reasonably simple code. This is a
11901 superset of is_simple_builtin. */
11902 bool
11903 is_inexpensive_builtin (tree decl)
11904 {
11905 if (!decl)
11906 return false;
11907 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11908 return true;
11909 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11910 switch (DECL_FUNCTION_CODE (decl))
11911 {
11912 case BUILT_IN_ABS:
11913 CASE_BUILT_IN_ALLOCA:
11914 case BUILT_IN_BSWAP16:
11915 case BUILT_IN_BSWAP32:
11916 case BUILT_IN_BSWAP64:
11917 case BUILT_IN_BSWAP128:
11918 case BUILT_IN_CLZ:
11919 case BUILT_IN_CLZIMAX:
11920 case BUILT_IN_CLZL:
11921 case BUILT_IN_CLZLL:
11922 case BUILT_IN_CTZ:
11923 case BUILT_IN_CTZIMAX:
11924 case BUILT_IN_CTZL:
11925 case BUILT_IN_CTZLL:
11926 case BUILT_IN_FFS:
11927 case BUILT_IN_FFSIMAX:
11928 case BUILT_IN_FFSL:
11929 case BUILT_IN_FFSLL:
11930 case BUILT_IN_IMAXABS:
11931 case BUILT_IN_FINITE:
11932 case BUILT_IN_FINITEF:
11933 case BUILT_IN_FINITEL:
11934 case BUILT_IN_FINITED32:
11935 case BUILT_IN_FINITED64:
11936 case BUILT_IN_FINITED128:
11937 case BUILT_IN_FPCLASSIFY:
11938 case BUILT_IN_ISFINITE:
11939 case BUILT_IN_ISINF_SIGN:
11940 case BUILT_IN_ISINF:
11941 case BUILT_IN_ISINFF:
11942 case BUILT_IN_ISINFL:
11943 case BUILT_IN_ISINFD32:
11944 case BUILT_IN_ISINFD64:
11945 case BUILT_IN_ISINFD128:
11946 case BUILT_IN_ISNAN:
11947 case BUILT_IN_ISNANF:
11948 case BUILT_IN_ISNANL:
11949 case BUILT_IN_ISNAND32:
11950 case BUILT_IN_ISNAND64:
11951 case BUILT_IN_ISNAND128:
11952 case BUILT_IN_ISNORMAL:
11953 case BUILT_IN_ISGREATER:
11954 case BUILT_IN_ISGREATEREQUAL:
11955 case BUILT_IN_ISLESS:
11956 case BUILT_IN_ISLESSEQUAL:
11957 case BUILT_IN_ISLESSGREATER:
11958 case BUILT_IN_ISUNORDERED:
11959 case BUILT_IN_VA_ARG_PACK:
11960 case BUILT_IN_VA_ARG_PACK_LEN:
11961 case BUILT_IN_VA_COPY:
11962 case BUILT_IN_TRAP:
11963 case BUILT_IN_SAVEREGS:
11964 case BUILT_IN_POPCOUNTL:
11965 case BUILT_IN_POPCOUNTLL:
11966 case BUILT_IN_POPCOUNTIMAX:
11967 case BUILT_IN_POPCOUNT:
11968 case BUILT_IN_PARITYL:
11969 case BUILT_IN_PARITYLL:
11970 case BUILT_IN_PARITYIMAX:
11971 case BUILT_IN_PARITY:
11972 case BUILT_IN_LABS:
11973 case BUILT_IN_LLABS:
11974 case BUILT_IN_PREFETCH:
11975 case BUILT_IN_ACC_ON_DEVICE:
11976 return true;
11977
11978 default:
11979 return is_simple_builtin (decl);
11980 }
11981
11982 return false;
11983 }
11984
11985 /* Return true if T is a constant and the value cast to a target char
11986 can be represented by a host char.
11987 Store the casted char constant in *P if so. */
11988
11989 bool
11990 target_char_cst_p (tree t, char *p)
11991 {
11992 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11993 return false;
11994
11995 *p = (char)tree_to_uhwi (t);
11996 return true;
11997 }
11998
11999 /* Return true if the builtin DECL is implemented in a standard library.
12000 Otherwise returns false which doesn't guarantee it is not (thus the list of
12001 handled builtins below may be incomplete). */
12002
12003 bool
12004 builtin_with_linkage_p (tree decl)
12005 {
12006 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12007 switch (DECL_FUNCTION_CODE (decl))
12008 {
12009 CASE_FLT_FN (BUILT_IN_ACOS):
12010 CASE_FLT_FN (BUILT_IN_ACOSH):
12011 CASE_FLT_FN (BUILT_IN_ASIN):
12012 CASE_FLT_FN (BUILT_IN_ASINH):
12013 CASE_FLT_FN (BUILT_IN_ATAN):
12014 CASE_FLT_FN (BUILT_IN_ATANH):
12015 CASE_FLT_FN (BUILT_IN_ATAN2):
12016 CASE_FLT_FN (BUILT_IN_CBRT):
12017 CASE_FLT_FN (BUILT_IN_CEIL):
12018 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12019 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12020 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12021 CASE_FLT_FN (BUILT_IN_COS):
12022 CASE_FLT_FN (BUILT_IN_COSH):
12023 CASE_FLT_FN (BUILT_IN_ERF):
12024 CASE_FLT_FN (BUILT_IN_ERFC):
12025 CASE_FLT_FN (BUILT_IN_EXP):
12026 CASE_FLT_FN (BUILT_IN_EXP2):
12027 CASE_FLT_FN (BUILT_IN_EXPM1):
12028 CASE_FLT_FN (BUILT_IN_FABS):
12029 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12030 CASE_FLT_FN (BUILT_IN_FDIM):
12031 CASE_FLT_FN (BUILT_IN_FLOOR):
12032 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12033 CASE_FLT_FN (BUILT_IN_FMA):
12034 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12035 CASE_FLT_FN (BUILT_IN_FMAX):
12036 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12037 CASE_FLT_FN (BUILT_IN_FMIN):
12038 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12039 CASE_FLT_FN (BUILT_IN_FMOD):
12040 CASE_FLT_FN (BUILT_IN_FREXP):
12041 CASE_FLT_FN (BUILT_IN_HYPOT):
12042 CASE_FLT_FN (BUILT_IN_ILOGB):
12043 CASE_FLT_FN (BUILT_IN_LDEXP):
12044 CASE_FLT_FN (BUILT_IN_LGAMMA):
12045 CASE_FLT_FN (BUILT_IN_LLRINT):
12046 CASE_FLT_FN (BUILT_IN_LLROUND):
12047 CASE_FLT_FN (BUILT_IN_LOG):
12048 CASE_FLT_FN (BUILT_IN_LOG10):
12049 CASE_FLT_FN (BUILT_IN_LOG1P):
12050 CASE_FLT_FN (BUILT_IN_LOG2):
12051 CASE_FLT_FN (BUILT_IN_LOGB):
12052 CASE_FLT_FN (BUILT_IN_LRINT):
12053 CASE_FLT_FN (BUILT_IN_LROUND):
12054 CASE_FLT_FN (BUILT_IN_MODF):
12055 CASE_FLT_FN (BUILT_IN_NAN):
12056 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12057 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12058 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12059 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12060 CASE_FLT_FN (BUILT_IN_POW):
12061 CASE_FLT_FN (BUILT_IN_REMAINDER):
12062 CASE_FLT_FN (BUILT_IN_REMQUO):
12063 CASE_FLT_FN (BUILT_IN_RINT):
12064 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12065 CASE_FLT_FN (BUILT_IN_ROUND):
12066 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12067 CASE_FLT_FN (BUILT_IN_SCALBLN):
12068 CASE_FLT_FN (BUILT_IN_SCALBN):
12069 CASE_FLT_FN (BUILT_IN_SIN):
12070 CASE_FLT_FN (BUILT_IN_SINH):
12071 CASE_FLT_FN (BUILT_IN_SINCOS):
12072 CASE_FLT_FN (BUILT_IN_SQRT):
12073 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12074 CASE_FLT_FN (BUILT_IN_TAN):
12075 CASE_FLT_FN (BUILT_IN_TANH):
12076 CASE_FLT_FN (BUILT_IN_TGAMMA):
12077 CASE_FLT_FN (BUILT_IN_TRUNC):
12078 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12079 return true;
12080 default:
12081 break;
12082 }
12083 return false;
12084 }