c++: Correct the handling of alignof(expr) [PR88115]
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-ssa.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
79 #include "attr-fnspec.h"
80
81 struct target_builtins default_target_builtins;
82 #if SWITCHABLE_TARGET
83 struct target_builtins *this_target_builtins = &default_target_builtins;
84 #endif
85
86 /* Define the names of the builtin function types and codes. */
87 const char *const built_in_class_names[BUILT_IN_LAST]
88 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
89
90 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
91 const char * built_in_names[(int) END_BUILTINS] =
92 {
93 #include "builtins.def"
94 };
95
96 /* Setup an array of builtin_info_type, make sure each element decl is
97 initialized to NULL_TREE. */
98 builtin_info_type builtin_info[(int)END_BUILTINS];
99
100 /* Non-zero if __builtin_constant_p should be folded right away. */
101 bool force_folding_builtin_constant_p;
102
103 static int target_char_cast (tree, char *);
104 static rtx get_memory_rtx (tree, tree);
105 static int apply_args_size (void);
106 static int apply_result_size (void);
107 static rtx result_vector (int, rtx);
108 static void expand_builtin_prefetch (tree);
109 static rtx expand_builtin_apply_args (void);
110 static rtx expand_builtin_apply_args_1 (void);
111 static rtx expand_builtin_apply (rtx, rtx, rtx);
112 static void expand_builtin_return (rtx);
113 static enum type_class type_to_class (tree);
114 static rtx expand_builtin_classify_type (tree);
115 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
116 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
117 static rtx expand_builtin_interclass_mathfn (tree, rtx);
118 static rtx expand_builtin_sincos (tree);
119 static rtx expand_builtin_cexpi (tree, rtx);
120 static rtx expand_builtin_int_roundingfn (tree, rtx);
121 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
122 static rtx expand_builtin_next_arg (void);
123 static rtx expand_builtin_va_start (tree);
124 static rtx expand_builtin_va_end (tree);
125 static rtx expand_builtin_va_copy (tree);
126 static rtx inline_expand_builtin_bytecmp (tree, rtx);
127 static rtx expand_builtin_strcmp (tree, rtx);
128 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
129 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
130 static rtx expand_builtin_memchr (tree, rtx);
131 static rtx expand_builtin_memcpy (tree, rtx);
132 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
133 rtx target, tree exp,
134 memop_ret retmode,
135 bool might_overlap);
136 static rtx expand_builtin_memmove (tree, rtx);
137 static rtx expand_builtin_mempcpy (tree, rtx);
138 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
139 static rtx expand_builtin_strcat (tree);
140 static rtx expand_builtin_strcpy (tree, rtx);
141 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
142 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
143 static rtx expand_builtin_stpncpy (tree, rtx);
144 static rtx expand_builtin_strncat (tree, rtx);
145 static rtx expand_builtin_strncpy (tree, rtx);
146 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
147 static rtx expand_builtin_memset (tree, rtx, machine_mode);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 static rtx expand_builtin_alloca (tree);
153 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 static rtx expand_builtin_frame_address (tree, tree);
155 static tree stabilize_va_list_loc (location_t, tree, int);
156 static rtx expand_builtin_expect (tree, rtx);
157 static rtx expand_builtin_expect_with_probability (tree, rtx);
158 static tree fold_builtin_constant_p (tree);
159 static tree fold_builtin_classify_type (tree);
160 static tree fold_builtin_strlen (location_t, tree, tree, tree);
161 static tree fold_builtin_inf (location_t, tree, int);
162 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
163 static bool validate_arg (const_tree, enum tree_code code);
164 static rtx expand_builtin_fabs (tree, rtx, rtx);
165 static rtx expand_builtin_signbit (tree, rtx);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175
176 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
177 static tree fold_builtin_strspn (location_t, tree, tree, tree);
178 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
179
180 static rtx expand_builtin_object_size (tree);
181 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
182 enum built_in_function);
183 static void maybe_emit_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
185 static void maybe_emit_free_warning (tree);
186 static tree fold_builtin_object_size (tree, tree);
187 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
188
189 unsigned HOST_WIDE_INT target_newline;
190 unsigned HOST_WIDE_INT target_percent;
191 static unsigned HOST_WIDE_INT target_c;
192 static unsigned HOST_WIDE_INT target_s;
193 char target_percent_c[3];
194 char target_percent_s[3];
195 char target_percent_s_newline[4];
196 static tree do_mpfr_remquo (tree, tree, tree);
197 static tree do_mpfr_lgamma_r (tree, tree, tree);
198 static void expand_builtin_sync_synchronize (void);
199
200 access_ref::access_ref (tree bound /* = NULL_TREE */,
201 bool minaccess /* = false */)
202 : ref (), eval ([](tree x){ return x; }), trail1special (true), base0 (true)
203 {
204 /* Set to valid. */
205 offrng[0] = offrng[1] = 0;
206 /* Invalidate. */
207 sizrng[0] = sizrng[1] = -1;
208
209 /* Set the default bounds of the access and adjust below. */
210 bndrng[0] = minaccess ? 1 : 0;
211 bndrng[1] = HOST_WIDE_INT_M1U;
212
213 /* When BOUND is nonnull and a range can be extracted from it,
214 set the bounds of the access to reflect both it and MINACCESS.
215 BNDRNG[0] is the size of the minimum access. */
216 tree rng[2];
217 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
218 {
219 bndrng[0] = wi::to_offset (rng[0]);
220 bndrng[1] = wi::to_offset (rng[1]);
221 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
222 }
223 }
224
225 /* Return the maximum amount of space remaining and if non-null, set
226 argument to the minimum. */
227
228 offset_int
229 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
230 {
231 offset_int minbuf;
232 if (!pmin)
233 pmin = &minbuf;
234
235 /* add_offset() ensures the offset range isn't inverted. */
236 gcc_checking_assert (offrng[0] <= offrng[1]);
237
238 if (base0)
239 {
240 /* The offset into referenced object is zero-based (i.e., it's
241 not referenced by a pointer into middle of some unknown object). */
242 if (offrng[0] < 0 && offrng[1] < 0)
243 {
244 /* If the offset is negative the remaining size is zero. */
245 *pmin = 0;
246 return 0;
247 }
248
249 if (sizrng[1] <= offrng[0])
250 {
251 /* If the starting offset is greater than or equal to the upper
252 bound on the size of the object, the space remaining is zero.
253 As a special case, if it's equal, set *PMIN to -1 to let
254 the caller know the offset is valid and just past the end. */
255 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
256 return 0;
257 }
258
259 /* Otherwise return the size minus the lower bound of the offset. */
260 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
261
262 *pmin = sizrng[0] - or0;
263 return sizrng[1] - or0;
264 }
265
266 /* The offset to the referenced object isn't zero-based (i.e., it may
267 refer to a byte other than the first. The size of such an object
268 is constrained only by the size of the address space (the result
269 of max_object_size()). */
270 if (sizrng[1] <= offrng[0])
271 {
272 *pmin = 0;
273 return 0;
274 }
275
276 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
277
278 *pmin = sizrng[0] - or0;
279 return sizrng[1] - or0;
280 }
281
282 /* Add the range [MIN, MAX] to the offset range. For known objects (with
283 zero-based offsets) at least one of whose offset's bounds is in range,
284 constrain the other (or both) to the bounds of the object (i.e., zero
285 and the upper bound of its size). This improves the quality of
286 diagnostics. */
287
288 void access_ref::add_offset (const offset_int &min, const offset_int &max)
289 {
290 if (min <= max)
291 {
292 /* To add an ordinary range just add it to the bounds. */
293 offrng[0] += min;
294 offrng[1] += max;
295 }
296 else if (!base0)
297 {
298 /* To add an inverted range to an offset to an unknown object
299 expand it to the maximum. */
300 add_max_offset ();
301 return;
302 }
303 else
304 {
305 /* To add an inverted range to an offset to an known object set
306 the upper bound to the maximum representable offset value
307 (which may be greater than MAX_OBJECT_SIZE).
308 The lower bound is either the sum of the current offset and
309 MIN when abs(MAX) is greater than the former, or zero otherwise.
310 Zero because then then inverted range includes the negative of
311 the lower bound. */
312 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
313 offrng[1] = maxoff;
314
315 if (max >= 0)
316 {
317 offrng[0] = 0;
318 return;
319 }
320
321 offrng[1] = maxoff;
322 offset_int absmax = wi::abs (max);
323 if (offrng[0] < absmax)
324 {
325 offrng[0] += min;
326 /* Cap the lower bound at the upper (set to MAXOFF above)
327 to avoid inadvertently recreating an inverted range. */
328 if (offrng[1] < offrng[0])
329 offrng[0] = offrng[1];
330 }
331 else
332 offrng[0] = 0;
333 }
334
335 if (!base0)
336 return;
337
338 /* When referencing a known object check to see if the offset computed
339 so far is in bounds... */
340 offset_int remrng[2];
341 remrng[1] = size_remaining (remrng);
342 if (remrng[1] > 0 || remrng[0] < 0)
343 {
344 /* ...if so, constrain it so that neither bound exceeds the size of
345 the object. Out of bounds offsets are left unchanged, and, for
346 better or worse, become in bounds later. They should be detected
347 and diagnosed at the point they first become invalid by
348 -Warray-bounds. */
349 if (offrng[0] < 0)
350 offrng[0] = 0;
351 if (offrng[1] > sizrng[1])
352 offrng[1] = sizrng[1];
353 }
354 }
355
356 /* Return true if NAME starts with __builtin_ or __sync_. */
357
358 static bool
359 is_builtin_name (const char *name)
360 {
361 if (strncmp (name, "__builtin_", 10) == 0)
362 return true;
363 if (strncmp (name, "__sync_", 7) == 0)
364 return true;
365 if (strncmp (name, "__atomic_", 9) == 0)
366 return true;
367 return false;
368 }
369
370 /* Return true if NODE should be considered for inline expansion regardless
371 of the optimization level. This means whenever a function is invoked with
372 its "internal" name, which normally contains the prefix "__builtin". */
373
374 bool
375 called_as_built_in (tree node)
376 {
377 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
378 we want the name used to call the function, not the name it
379 will have. */
380 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
381 return is_builtin_name (name);
382 }
383
384 /* Compute values M and N such that M divides (address of EXP - N) and such
385 that N < M. If these numbers can be determined, store M in alignp and N in
386 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
387 *alignp and any bit-offset to *bitposp.
388
389 Note that the address (and thus the alignment) computed here is based
390 on the address to which a symbol resolves, whereas DECL_ALIGN is based
391 on the address at which an object is actually located. These two
392 addresses are not always the same. For example, on ARM targets,
393 the address &foo of a Thumb function foo() has the lowest bit set,
394 whereas foo() itself starts on an even address.
395
396 If ADDR_P is true we are taking the address of the memory reference EXP
397 and thus cannot rely on the access taking place. */
398
399 static bool
400 get_object_alignment_2 (tree exp, unsigned int *alignp,
401 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
402 {
403 poly_int64 bitsize, bitpos;
404 tree offset;
405 machine_mode mode;
406 int unsignedp, reversep, volatilep;
407 unsigned int align = BITS_PER_UNIT;
408 bool known_alignment = false;
409
410 /* Get the innermost object and the constant (bitpos) and possibly
411 variable (offset) offset of the access. */
412 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
413 &unsignedp, &reversep, &volatilep);
414
415 /* Extract alignment information from the innermost object and
416 possibly adjust bitpos and offset. */
417 if (TREE_CODE (exp) == FUNCTION_DECL)
418 {
419 /* Function addresses can encode extra information besides their
420 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
421 allows the low bit to be used as a virtual bit, we know
422 that the address itself must be at least 2-byte aligned. */
423 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
424 align = 2 * BITS_PER_UNIT;
425 }
426 else if (TREE_CODE (exp) == LABEL_DECL)
427 ;
428 else if (TREE_CODE (exp) == CONST_DECL)
429 {
430 /* The alignment of a CONST_DECL is determined by its initializer. */
431 exp = DECL_INITIAL (exp);
432 align = TYPE_ALIGN (TREE_TYPE (exp));
433 if (CONSTANT_CLASS_P (exp))
434 align = targetm.constant_alignment (exp, align);
435
436 known_alignment = true;
437 }
438 else if (DECL_P (exp))
439 {
440 align = DECL_ALIGN (exp);
441 known_alignment = true;
442 }
443 else if (TREE_CODE (exp) == INDIRECT_REF
444 || TREE_CODE (exp) == MEM_REF
445 || TREE_CODE (exp) == TARGET_MEM_REF)
446 {
447 tree addr = TREE_OPERAND (exp, 0);
448 unsigned ptr_align;
449 unsigned HOST_WIDE_INT ptr_bitpos;
450 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
451
452 /* If the address is explicitely aligned, handle that. */
453 if (TREE_CODE (addr) == BIT_AND_EXPR
454 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
455 {
456 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
457 ptr_bitmask *= BITS_PER_UNIT;
458 align = least_bit_hwi (ptr_bitmask);
459 addr = TREE_OPERAND (addr, 0);
460 }
461
462 known_alignment
463 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
464 align = MAX (ptr_align, align);
465
466 /* Re-apply explicit alignment to the bitpos. */
467 ptr_bitpos &= ptr_bitmask;
468
469 /* The alignment of the pointer operand in a TARGET_MEM_REF
470 has to take the variable offset parts into account. */
471 if (TREE_CODE (exp) == TARGET_MEM_REF)
472 {
473 if (TMR_INDEX (exp))
474 {
475 unsigned HOST_WIDE_INT step = 1;
476 if (TMR_STEP (exp))
477 step = TREE_INT_CST_LOW (TMR_STEP (exp));
478 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
479 }
480 if (TMR_INDEX2 (exp))
481 align = BITS_PER_UNIT;
482 known_alignment = false;
483 }
484
485 /* When EXP is an actual memory reference then we can use
486 TYPE_ALIGN of a pointer indirection to derive alignment.
487 Do so only if get_pointer_alignment_1 did not reveal absolute
488 alignment knowledge and if using that alignment would
489 improve the situation. */
490 unsigned int talign;
491 if (!addr_p && !known_alignment
492 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
493 && talign > align)
494 align = talign;
495 else
496 {
497 /* Else adjust bitpos accordingly. */
498 bitpos += ptr_bitpos;
499 if (TREE_CODE (exp) == MEM_REF
500 || TREE_CODE (exp) == TARGET_MEM_REF)
501 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
502 }
503 }
504 else if (TREE_CODE (exp) == STRING_CST)
505 {
506 /* STRING_CST are the only constant objects we allow to be not
507 wrapped inside a CONST_DECL. */
508 align = TYPE_ALIGN (TREE_TYPE (exp));
509 if (CONSTANT_CLASS_P (exp))
510 align = targetm.constant_alignment (exp, align);
511
512 known_alignment = true;
513 }
514
515 /* If there is a non-constant offset part extract the maximum
516 alignment that can prevail. */
517 if (offset)
518 {
519 unsigned int trailing_zeros = tree_ctz (offset);
520 if (trailing_zeros < HOST_BITS_PER_INT)
521 {
522 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
523 if (inner)
524 align = MIN (align, inner);
525 }
526 }
527
528 /* Account for the alignment of runtime coefficients, so that the constant
529 bitpos is guaranteed to be accurate. */
530 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
531 if (alt_align != 0 && alt_align < align)
532 {
533 align = alt_align;
534 known_alignment = false;
535 }
536
537 *alignp = align;
538 *bitposp = bitpos.coeffs[0] & (align - 1);
539 return known_alignment;
540 }
541
542 /* For a memory reference expression EXP compute values M and N such that M
543 divides (&EXP - N) and such that N < M. If these numbers can be determined,
544 store M in alignp and N in *BITPOSP and return true. Otherwise return false
545 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
546
547 bool
548 get_object_alignment_1 (tree exp, unsigned int *alignp,
549 unsigned HOST_WIDE_INT *bitposp)
550 {
551 return get_object_alignment_2 (exp, alignp, bitposp, false);
552 }
553
554 /* Return the alignment in bits of EXP, an object. */
555
556 unsigned int
557 get_object_alignment (tree exp)
558 {
559 unsigned HOST_WIDE_INT bitpos = 0;
560 unsigned int align;
561
562 get_object_alignment_1 (exp, &align, &bitpos);
563
564 /* align and bitpos now specify known low bits of the pointer.
565 ptr & (align - 1) == bitpos. */
566
567 if (bitpos != 0)
568 align = least_bit_hwi (bitpos);
569 return align;
570 }
571
572 /* For a pointer valued expression EXP compute values M and N such that M
573 divides (EXP - N) and such that N < M. If these numbers can be determined,
574 store M in alignp and N in *BITPOSP and return true. Return false if
575 the results are just a conservative approximation.
576
577 If EXP is not a pointer, false is returned too. */
578
579 bool
580 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
581 unsigned HOST_WIDE_INT *bitposp)
582 {
583 STRIP_NOPS (exp);
584
585 if (TREE_CODE (exp) == ADDR_EXPR)
586 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
587 alignp, bitposp, true);
588 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
589 {
590 unsigned int align;
591 unsigned HOST_WIDE_INT bitpos;
592 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
593 &align, &bitpos);
594 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
595 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
596 else
597 {
598 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
599 if (trailing_zeros < HOST_BITS_PER_INT)
600 {
601 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
602 if (inner)
603 align = MIN (align, inner);
604 }
605 }
606 *alignp = align;
607 *bitposp = bitpos & (align - 1);
608 return res;
609 }
610 else if (TREE_CODE (exp) == SSA_NAME
611 && POINTER_TYPE_P (TREE_TYPE (exp)))
612 {
613 unsigned int ptr_align, ptr_misalign;
614 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
615
616 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
617 {
618 *bitposp = ptr_misalign * BITS_PER_UNIT;
619 *alignp = ptr_align * BITS_PER_UNIT;
620 /* Make sure to return a sensible alignment when the multiplication
621 by BITS_PER_UNIT overflowed. */
622 if (*alignp == 0)
623 *alignp = 1u << (HOST_BITS_PER_INT - 1);
624 /* We cannot really tell whether this result is an approximation. */
625 return false;
626 }
627 else
628 {
629 *bitposp = 0;
630 *alignp = BITS_PER_UNIT;
631 return false;
632 }
633 }
634 else if (TREE_CODE (exp) == INTEGER_CST)
635 {
636 *alignp = BIGGEST_ALIGNMENT;
637 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
638 & (BIGGEST_ALIGNMENT - 1));
639 return true;
640 }
641
642 *bitposp = 0;
643 *alignp = BITS_PER_UNIT;
644 return false;
645 }
646
647 /* Return the alignment in bits of EXP, a pointer valued expression.
648 The alignment returned is, by default, the alignment of the thing that
649 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
650
651 Otherwise, look at the expression to see if we can do better, i.e., if the
652 expression is actually pointing at an object whose alignment is tighter. */
653
654 unsigned int
655 get_pointer_alignment (tree exp)
656 {
657 unsigned HOST_WIDE_INT bitpos = 0;
658 unsigned int align;
659
660 get_pointer_alignment_1 (exp, &align, &bitpos);
661
662 /* align and bitpos now specify known low bits of the pointer.
663 ptr & (align - 1) == bitpos. */
664
665 if (bitpos != 0)
666 align = least_bit_hwi (bitpos);
667
668 return align;
669 }
670
671 /* Return the number of leading non-zero elements in the sequence
672 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
673 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
674
675 unsigned
676 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
677 {
678 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
679
680 unsigned n;
681
682 if (eltsize == 1)
683 {
684 /* Optimize the common case of plain char. */
685 for (n = 0; n < maxelts; n++)
686 {
687 const char *elt = (const char*) ptr + n;
688 if (!*elt)
689 break;
690 }
691 }
692 else
693 {
694 for (n = 0; n < maxelts; n++)
695 {
696 const char *elt = (const char*) ptr + n * eltsize;
697 if (!memcmp (elt, "\0\0\0\0", eltsize))
698 break;
699 }
700 }
701 return n;
702 }
703
704 /* For a call EXPR at LOC to a function FNAME that expects a string
705 in the argument ARG, issue a diagnostic due to it being a called
706 with an argument that is a character array with no terminating
707 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
708 of characters in which the NUL is expected. Either EXPR or FNAME
709 may be null but noth both. SIZE may be null when BNDRNG is null. */
710
711 void
712 warn_string_no_nul (location_t loc, tree expr, const char *fname,
713 tree arg, tree decl, tree size /* = NULL_TREE */,
714 bool exact /* = false */,
715 const wide_int bndrng[2] /* = NULL */)
716 {
717 if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
718 return;
719
720 loc = expansion_point_location_if_in_system_header (loc);
721 bool warned;
722
723 /* Format the bound range as a string to keep the nuber of messages
724 from exploding. */
725 char bndstr[80];
726 *bndstr = 0;
727 if (bndrng)
728 {
729 if (bndrng[0] == bndrng[1])
730 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
731 else
732 sprintf (bndstr, "[%llu, %llu]",
733 (unsigned long long) bndrng[0].to_uhwi (),
734 (unsigned long long) bndrng[1].to_uhwi ());
735 }
736
737 const tree maxobjsize = max_object_size ();
738 const wide_int maxsiz = wi::to_wide (maxobjsize);
739 if (expr)
740 {
741 tree func = get_callee_fndecl (expr);
742 if (bndrng)
743 {
744 if (wi::ltu_p (maxsiz, bndrng[0]))
745 warned = warning_at (loc, OPT_Wstringop_overread,
746 "%K%qD specified bound %s exceeds "
747 "maximum object size %E",
748 expr, func, bndstr, maxobjsize);
749 else
750 {
751 bool maybe = wi::to_wide (size) == bndrng[0];
752 warned = warning_at (loc, OPT_Wstringop_overread,
753 exact
754 ? G_("%K%qD specified bound %s exceeds "
755 "the size %E of unterminated array")
756 : (maybe
757 ? G_("%K%qD specified bound %s may "
758 "exceed the size of at most %E "
759 "of unterminated array")
760 : G_("%K%qD specified bound %s exceeds "
761 "the size of at most %E "
762 "of unterminated array")),
763 expr, func, bndstr, size);
764 }
765 }
766 else
767 warned = warning_at (loc, OPT_Wstringop_overread,
768 "%K%qD argument missing terminating nul",
769 expr, func);
770 }
771 else
772 {
773 if (bndrng)
774 {
775 if (wi::ltu_p (maxsiz, bndrng[0]))
776 warned = warning_at (loc, OPT_Wstringop_overread,
777 "%qs specified bound %s exceeds "
778 "maximum object size %E",
779 fname, bndstr, maxobjsize);
780 else
781 {
782 bool maybe = wi::to_wide (size) == bndrng[0];
783 warned = warning_at (loc, OPT_Wstringop_overread,
784 exact
785 ? G_("%qs specified bound %s exceeds "
786 "the size %E of unterminated array")
787 : (maybe
788 ? G_("%qs specified bound %s may "
789 "exceed the size of at most %E "
790 "of unterminated array")
791 : G_("%qs specified bound %s exceeds "
792 "the size of at most %E "
793 "of unterminated array")),
794 fname, bndstr, size);
795 }
796 }
797 else
798 warned = warning_at (loc, OPT_Wstringop_overread,
799 "%qsargument missing terminating nul",
800 fname);
801 }
802
803 if (warned)
804 {
805 inform (DECL_SOURCE_LOCATION (decl),
806 "referenced argument declared here");
807 TREE_NO_WARNING (arg) = 1;
808 if (expr)
809 TREE_NO_WARNING (expr) = 1;
810 }
811 }
812
813 /* For a call EXPR (which may be null) that expects a string argument
814 SRC as an argument, returns false if SRC is a character array with
815 no terminating NUL. When nonnull, BOUND is the number of characters
816 in which to expect the terminating NUL. RDONLY is true for read-only
817 accesses such as strcmp, false for read-write such as strcpy. When
818 EXPR is also issues a warning. */
819
820 bool
821 check_nul_terminated_array (tree expr, tree src,
822 tree bound /* = NULL_TREE */)
823 {
824 /* The constant size of the array SRC points to. The actual size
825 may be less of EXACT is true, but not more. */
826 tree size;
827 /* True if SRC involves a non-constant offset into the array. */
828 bool exact;
829 /* The unterminated constant array SRC points to. */
830 tree nonstr = unterminated_array (src, &size, &exact);
831 if (!nonstr)
832 return true;
833
834 /* NONSTR refers to the non-nul terminated constant array and SIZE
835 is the constant size of the array in bytes. EXACT is true when
836 SIZE is exact. */
837
838 wide_int bndrng[2];
839 if (bound)
840 {
841 if (TREE_CODE (bound) == INTEGER_CST)
842 bndrng[0] = bndrng[1] = wi::to_wide (bound);
843 else
844 {
845 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
846 if (rng != VR_RANGE)
847 return true;
848 }
849
850 if (exact)
851 {
852 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
853 return true;
854 }
855 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
856 return true;
857 }
858
859 if (expr)
860 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
861 size, exact, bound ? bndrng : NULL);
862
863 return false;
864 }
865
866 /* If EXP refers to an unterminated constant character array return
867 the declaration of the object of which the array is a member or
868 element and if SIZE is not null, set *SIZE to the size of
869 the unterminated array and set *EXACT if the size is exact or
870 clear it otherwise. Otherwise return null. */
871
872 tree
873 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
874 {
875 /* C_STRLEN will return NULL and set DECL in the info
876 structure if EXP references a unterminated array. */
877 c_strlen_data lendata = { };
878 tree len = c_strlen (exp, 1, &lendata);
879 if (len == NULL_TREE && lendata.minlen && lendata.decl)
880 {
881 if (size)
882 {
883 len = lendata.minlen;
884 if (lendata.off)
885 {
886 /* Constant offsets are already accounted for in LENDATA.MINLEN,
887 but not in a SSA_NAME + CST expression. */
888 if (TREE_CODE (lendata.off) == INTEGER_CST)
889 *exact = true;
890 else if (TREE_CODE (lendata.off) == PLUS_EXPR
891 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
892 {
893 /* Subtract the offset from the size of the array. */
894 *exact = false;
895 tree temp = TREE_OPERAND (lendata.off, 1);
896 temp = fold_convert (ssizetype, temp);
897 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
898 }
899 else
900 *exact = false;
901 }
902 else
903 *exact = true;
904
905 *size = len;
906 }
907 return lendata.decl;
908 }
909
910 return NULL_TREE;
911 }
912
913 /* Compute the length of a null-terminated character string or wide
914 character string handling character sizes of 1, 2, and 4 bytes.
915 TREE_STRING_LENGTH is not the right way because it evaluates to
916 the size of the character array in bytes (as opposed to characters)
917 and because it can contain a zero byte in the middle.
918
919 ONLY_VALUE should be nonzero if the result is not going to be emitted
920 into the instruction stream and zero if it is going to be expanded.
921 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
922 is returned, otherwise NULL, since
923 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
924 evaluate the side-effects.
925
926 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
927 accesses. Note that this implies the result is not going to be emitted
928 into the instruction stream.
929
930 Additional information about the string accessed may be recorded
931 in DATA. For example, if ARG references an unterminated string,
932 then the declaration will be stored in the DECL field. If the
933 length of the unterminated string can be determined, it'll be
934 stored in the LEN field. Note this length could well be different
935 than what a C strlen call would return.
936
937 ELTSIZE is 1 for normal single byte character strings, and 2 or
938 4 for wide characer strings. ELTSIZE is by default 1.
939
940 The value returned is of type `ssizetype'. */
941
942 tree
943 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
944 {
945 /* If we were not passed a DATA pointer, then get one to a local
946 structure. That avoids having to check DATA for NULL before
947 each time we want to use it. */
948 c_strlen_data local_strlen_data = { };
949 if (!data)
950 data = &local_strlen_data;
951
952 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
953
954 tree src = STRIP_NOPS (arg);
955 if (TREE_CODE (src) == COND_EXPR
956 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
957 {
958 tree len1, len2;
959
960 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
961 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
962 if (tree_int_cst_equal (len1, len2))
963 return len1;
964 }
965
966 if (TREE_CODE (src) == COMPOUND_EXPR
967 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
968 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
969
970 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
971
972 /* Offset from the beginning of the string in bytes. */
973 tree byteoff;
974 tree memsize;
975 tree decl;
976 src = string_constant (src, &byteoff, &memsize, &decl);
977 if (src == 0)
978 return NULL_TREE;
979
980 /* Determine the size of the string element. */
981 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
982 return NULL_TREE;
983
984 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
985 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
986 in case the latter is less than the size of the array, such as when
987 SRC refers to a short string literal used to initialize a large array.
988 In that case, the elements of the array after the terminating NUL are
989 all NUL. */
990 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
991 strelts = strelts / eltsize;
992
993 if (!tree_fits_uhwi_p (memsize))
994 return NULL_TREE;
995
996 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
997
998 /* PTR can point to the byte representation of any string type, including
999 char* and wchar_t*. */
1000 const char *ptr = TREE_STRING_POINTER (src);
1001
1002 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
1003 {
1004 /* The code below works only for single byte character types. */
1005 if (eltsize != 1)
1006 return NULL_TREE;
1007
1008 /* If the string has an internal NUL character followed by any
1009 non-NUL characters (e.g., "foo\0bar"), we can't compute
1010 the offset to the following NUL if we don't know where to
1011 start searching for it. */
1012 unsigned len = string_length (ptr, eltsize, strelts);
1013
1014 /* Return when an embedded null character is found or none at all.
1015 In the latter case, set the DECL/LEN field in the DATA structure
1016 so that callers may examine them. */
1017 if (len + 1 < strelts)
1018 return NULL_TREE;
1019 else if (len >= maxelts)
1020 {
1021 data->decl = decl;
1022 data->off = byteoff;
1023 data->minlen = ssize_int (len);
1024 return NULL_TREE;
1025 }
1026
1027 /* For empty strings the result should be zero. */
1028 if (len == 0)
1029 return ssize_int (0);
1030
1031 /* We don't know the starting offset, but we do know that the string
1032 has no internal zero bytes. If the offset falls within the bounds
1033 of the string subtract the offset from the length of the string,
1034 and return that. Otherwise the length is zero. Take care to
1035 use SAVE_EXPR in case the OFFSET has side-effects. */
1036 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
1037 : byteoff;
1038 offsave = fold_convert_loc (loc, sizetype, offsave);
1039 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
1040 size_int (len));
1041 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
1042 offsave);
1043 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
1044 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
1045 build_zero_cst (ssizetype));
1046 }
1047
1048 /* Offset from the beginning of the string in elements. */
1049 HOST_WIDE_INT eltoff;
1050
1051 /* We have a known offset into the string. Start searching there for
1052 a null character if we can represent it as a single HOST_WIDE_INT. */
1053 if (byteoff == 0)
1054 eltoff = 0;
1055 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1056 eltoff = -1;
1057 else
1058 eltoff = tree_to_uhwi (byteoff) / eltsize;
1059
1060 /* If the offset is known to be out of bounds, warn, and call strlen at
1061 runtime. */
1062 if (eltoff < 0 || eltoff >= maxelts)
1063 {
1064 /* Suppress multiple warnings for propagated constant strings. */
1065 if (only_value != 2
1066 && !TREE_NO_WARNING (arg)
1067 && warning_at (loc, OPT_Warray_bounds,
1068 "offset %qwi outside bounds of constant string",
1069 eltoff))
1070 {
1071 if (decl)
1072 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
1073 TREE_NO_WARNING (arg) = 1;
1074 }
1075 return NULL_TREE;
1076 }
1077
1078 /* If eltoff is larger than strelts but less than maxelts the
1079 string length is zero, since the excess memory will be zero. */
1080 if (eltoff > strelts)
1081 return ssize_int (0);
1082
1083 /* Use strlen to search for the first zero byte. Since any strings
1084 constructed with build_string will have nulls appended, we win even
1085 if we get handed something like (char[4])"abcd".
1086
1087 Since ELTOFF is our starting index into the string, no further
1088 calculation is needed. */
1089 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
1090 strelts - eltoff);
1091
1092 /* Don't know what to return if there was no zero termination.
1093 Ideally this would turn into a gcc_checking_assert over time.
1094 Set DECL/LEN so callers can examine them. */
1095 if (len >= maxelts - eltoff)
1096 {
1097 data->decl = decl;
1098 data->off = byteoff;
1099 data->minlen = ssize_int (len);
1100 return NULL_TREE;
1101 }
1102
1103 return ssize_int (len);
1104 }
1105
1106 /* Return a constant integer corresponding to target reading
1107 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1108 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1109 are assumed to be zero, otherwise it reads as many characters
1110 as needed. */
1111
1112 rtx
1113 c_readstr (const char *str, scalar_int_mode mode,
1114 bool null_terminated_p/*=true*/)
1115 {
1116 HOST_WIDE_INT ch;
1117 unsigned int i, j;
1118 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
1119
1120 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
1121 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
1122 / HOST_BITS_PER_WIDE_INT;
1123
1124 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
1125 for (i = 0; i < len; i++)
1126 tmp[i] = 0;
1127
1128 ch = 1;
1129 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1130 {
1131 j = i;
1132 if (WORDS_BIG_ENDIAN)
1133 j = GET_MODE_SIZE (mode) - i - 1;
1134 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1135 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1136 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1137 j *= BITS_PER_UNIT;
1138
1139 if (ch || !null_terminated_p)
1140 ch = (unsigned char) str[i];
1141 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1142 }
1143
1144 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1145 return immed_wide_int_const (c, mode);
1146 }
1147
1148 /* Cast a target constant CST to target CHAR and if that value fits into
1149 host char type, return zero and put that value into variable pointed to by
1150 P. */
1151
1152 static int
1153 target_char_cast (tree cst, char *p)
1154 {
1155 unsigned HOST_WIDE_INT val, hostval;
1156
1157 if (TREE_CODE (cst) != INTEGER_CST
1158 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1159 return 1;
1160
1161 /* Do not care if it fits or not right here. */
1162 val = TREE_INT_CST_LOW (cst);
1163
1164 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1165 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1166
1167 hostval = val;
1168 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1169 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1170
1171 if (val != hostval)
1172 return 1;
1173
1174 *p = hostval;
1175 return 0;
1176 }
1177
1178 /* Similar to save_expr, but assumes that arbitrary code is not executed
1179 in between the multiple evaluations. In particular, we assume that a
1180 non-addressable local variable will not be modified. */
1181
1182 static tree
1183 builtin_save_expr (tree exp)
1184 {
1185 if (TREE_CODE (exp) == SSA_NAME
1186 || (TREE_ADDRESSABLE (exp) == 0
1187 && (TREE_CODE (exp) == PARM_DECL
1188 || (VAR_P (exp) && !TREE_STATIC (exp)))))
1189 return exp;
1190
1191 return save_expr (exp);
1192 }
1193
1194 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1195 times to get the address of either a higher stack frame, or a return
1196 address located within it (depending on FNDECL_CODE). */
1197
1198 static rtx
1199 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1200 {
1201 int i;
1202 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1203 if (tem == NULL_RTX)
1204 {
1205 /* For a zero count with __builtin_return_address, we don't care what
1206 frame address we return, because target-specific definitions will
1207 override us. Therefore frame pointer elimination is OK, and using
1208 the soft frame pointer is OK.
1209
1210 For a nonzero count, or a zero count with __builtin_frame_address,
1211 we require a stable offset from the current frame pointer to the
1212 previous one, so we must use the hard frame pointer, and
1213 we must disable frame pointer elimination. */
1214 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1215 tem = frame_pointer_rtx;
1216 else
1217 {
1218 tem = hard_frame_pointer_rtx;
1219
1220 /* Tell reload not to eliminate the frame pointer. */
1221 crtl->accesses_prior_frames = 1;
1222 }
1223 }
1224
1225 if (count > 0)
1226 SETUP_FRAME_ADDRESSES ();
1227
1228 /* On the SPARC, the return address is not in the frame, it is in a
1229 register. There is no way to access it off of the current frame
1230 pointer, but it can be accessed off the previous frame pointer by
1231 reading the value from the register window save area. */
1232 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1233 count--;
1234
1235 /* Scan back COUNT frames to the specified frame. */
1236 for (i = 0; i < count; i++)
1237 {
1238 /* Assume the dynamic chain pointer is in the word that the
1239 frame address points to, unless otherwise specified. */
1240 tem = DYNAMIC_CHAIN_ADDRESS (tem);
1241 tem = memory_address (Pmode, tem);
1242 tem = gen_frame_mem (Pmode, tem);
1243 tem = copy_to_reg (tem);
1244 }
1245
1246 /* For __builtin_frame_address, return what we've got. But, on
1247 the SPARC for example, we may have to add a bias. */
1248 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1249 return FRAME_ADDR_RTX (tem);
1250
1251 /* For __builtin_return_address, get the return address from that frame. */
1252 #ifdef RETURN_ADDR_RTX
1253 tem = RETURN_ADDR_RTX (count, tem);
1254 #else
1255 tem = memory_address (Pmode,
1256 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1257 tem = gen_frame_mem (Pmode, tem);
1258 #endif
1259 return tem;
1260 }
1261
1262 /* Alias set used for setjmp buffer. */
1263 static alias_set_type setjmp_alias_set = -1;
1264
1265 /* Construct the leading half of a __builtin_setjmp call. Control will
1266 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1267 exception handling code. */
1268
1269 void
1270 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1271 {
1272 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1273 rtx stack_save;
1274 rtx mem;
1275
1276 if (setjmp_alias_set == -1)
1277 setjmp_alias_set = new_alias_set ();
1278
1279 buf_addr = convert_memory_address (Pmode, buf_addr);
1280
1281 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1282
1283 /* We store the frame pointer and the address of receiver_label in
1284 the buffer and use the rest of it for the stack save area, which
1285 is machine-dependent. */
1286
1287 mem = gen_rtx_MEM (Pmode, buf_addr);
1288 set_mem_alias_set (mem, setjmp_alias_set);
1289 emit_move_insn (mem, hard_frame_pointer_rtx);
1290
1291 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1292 GET_MODE_SIZE (Pmode))),
1293 set_mem_alias_set (mem, setjmp_alias_set);
1294
1295 emit_move_insn (validize_mem (mem),
1296 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1297
1298 stack_save = gen_rtx_MEM (sa_mode,
1299 plus_constant (Pmode, buf_addr,
1300 2 * GET_MODE_SIZE (Pmode)));
1301 set_mem_alias_set (stack_save, setjmp_alias_set);
1302 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1303
1304 /* If there is further processing to do, do it. */
1305 if (targetm.have_builtin_setjmp_setup ())
1306 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1307
1308 /* We have a nonlocal label. */
1309 cfun->has_nonlocal_label = 1;
1310 }
1311
1312 /* Construct the trailing part of a __builtin_setjmp call. This is
1313 also called directly by the SJLJ exception handling code.
1314 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1315
1316 void
1317 expand_builtin_setjmp_receiver (rtx receiver_label)
1318 {
1319 rtx chain;
1320
1321 /* Mark the FP as used when we get here, so we have to make sure it's
1322 marked as used by this function. */
1323 emit_use (hard_frame_pointer_rtx);
1324
1325 /* Mark the static chain as clobbered here so life information
1326 doesn't get messed up for it. */
1327 chain = rtx_for_static_chain (current_function_decl, true);
1328 if (chain && REG_P (chain))
1329 emit_clobber (chain);
1330
1331 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1332 {
1333 /* If the argument pointer can be eliminated in favor of the
1334 frame pointer, we don't need to restore it. We assume here
1335 that if such an elimination is present, it can always be used.
1336 This is the case on all known machines; if we don't make this
1337 assumption, we do unnecessary saving on many machines. */
1338 size_t i;
1339 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1340
1341 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1342 if (elim_regs[i].from == ARG_POINTER_REGNUM
1343 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1344 break;
1345
1346 if (i == ARRAY_SIZE (elim_regs))
1347 {
1348 /* Now restore our arg pointer from the address at which it
1349 was saved in our stack frame. */
1350 emit_move_insn (crtl->args.internal_arg_pointer,
1351 copy_to_reg (get_arg_pointer_save_area ()));
1352 }
1353 }
1354
1355 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1356 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1357 else if (targetm.have_nonlocal_goto_receiver ())
1358 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1359 else
1360 { /* Nothing */ }
1361
1362 /* We must not allow the code we just generated to be reordered by
1363 scheduling. Specifically, the update of the frame pointer must
1364 happen immediately, not later. */
1365 emit_insn (gen_blockage ());
1366 }
1367
1368 /* __builtin_longjmp is passed a pointer to an array of five words (not
1369 all will be used on all machines). It operates similarly to the C
1370 library function of the same name, but is more efficient. Much of
1371 the code below is copied from the handling of non-local gotos. */
1372
1373 static void
1374 expand_builtin_longjmp (rtx buf_addr, rtx value)
1375 {
1376 rtx fp, lab, stack;
1377 rtx_insn *insn, *last;
1378 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1379
1380 /* DRAP is needed for stack realign if longjmp is expanded to current
1381 function */
1382 if (SUPPORTS_STACK_ALIGNMENT)
1383 crtl->need_drap = true;
1384
1385 if (setjmp_alias_set == -1)
1386 setjmp_alias_set = new_alias_set ();
1387
1388 buf_addr = convert_memory_address (Pmode, buf_addr);
1389
1390 buf_addr = force_reg (Pmode, buf_addr);
1391
1392 /* We require that the user must pass a second argument of 1, because
1393 that is what builtin_setjmp will return. */
1394 gcc_assert (value == const1_rtx);
1395
1396 last = get_last_insn ();
1397 if (targetm.have_builtin_longjmp ())
1398 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1399 else
1400 {
1401 fp = gen_rtx_MEM (Pmode, buf_addr);
1402 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1403 GET_MODE_SIZE (Pmode)));
1404
1405 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1406 2 * GET_MODE_SIZE (Pmode)));
1407 set_mem_alias_set (fp, setjmp_alias_set);
1408 set_mem_alias_set (lab, setjmp_alias_set);
1409 set_mem_alias_set (stack, setjmp_alias_set);
1410
1411 /* Pick up FP, label, and SP from the block and jump. This code is
1412 from expand_goto in stmt.c; see there for detailed comments. */
1413 if (targetm.have_nonlocal_goto ())
1414 /* We have to pass a value to the nonlocal_goto pattern that will
1415 get copied into the static_chain pointer, but it does not matter
1416 what that value is, because builtin_setjmp does not use it. */
1417 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1418 else
1419 {
1420 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1421 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1422
1423 lab = copy_to_reg (lab);
1424
1425 /* Restore the frame pointer and stack pointer. We must use a
1426 temporary since the setjmp buffer may be a local. */
1427 fp = copy_to_reg (fp);
1428 emit_stack_restore (SAVE_NONLOCAL, stack);
1429
1430 /* Ensure the frame pointer move is not optimized. */
1431 emit_insn (gen_blockage ());
1432 emit_clobber (hard_frame_pointer_rtx);
1433 emit_clobber (frame_pointer_rtx);
1434 emit_move_insn (hard_frame_pointer_rtx, fp);
1435
1436 emit_use (hard_frame_pointer_rtx);
1437 emit_use (stack_pointer_rtx);
1438 emit_indirect_jump (lab);
1439 }
1440 }
1441
1442 /* Search backwards and mark the jump insn as a non-local goto.
1443 Note that this precludes the use of __builtin_longjmp to a
1444 __builtin_setjmp target in the same function. However, we've
1445 already cautioned the user that these functions are for
1446 internal exception handling use only. */
1447 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1448 {
1449 gcc_assert (insn != last);
1450
1451 if (JUMP_P (insn))
1452 {
1453 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1454 break;
1455 }
1456 else if (CALL_P (insn))
1457 break;
1458 }
1459 }
1460
1461 static inline bool
1462 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1463 {
1464 return (iter->i < iter->n);
1465 }
1466
1467 /* This function validates the types of a function call argument list
1468 against a specified list of tree_codes. If the last specifier is a 0,
1469 that represents an ellipsis, otherwise the last specifier must be a
1470 VOID_TYPE. */
1471
1472 static bool
1473 validate_arglist (const_tree callexpr, ...)
1474 {
1475 enum tree_code code;
1476 bool res = 0;
1477 va_list ap;
1478 const_call_expr_arg_iterator iter;
1479 const_tree arg;
1480
1481 va_start (ap, callexpr);
1482 init_const_call_expr_arg_iterator (callexpr, &iter);
1483
1484 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1485 tree fn = CALL_EXPR_FN (callexpr);
1486 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1487
1488 for (unsigned argno = 1; ; ++argno)
1489 {
1490 code = (enum tree_code) va_arg (ap, int);
1491
1492 switch (code)
1493 {
1494 case 0:
1495 /* This signifies an ellipses, any further arguments are all ok. */
1496 res = true;
1497 goto end;
1498 case VOID_TYPE:
1499 /* This signifies an endlink, if no arguments remain, return
1500 true, otherwise return false. */
1501 res = !more_const_call_expr_args_p (&iter);
1502 goto end;
1503 case POINTER_TYPE:
1504 /* The actual argument must be nonnull when either the whole
1505 called function has been declared nonnull, or when the formal
1506 argument corresponding to the actual argument has been. */
1507 if (argmap
1508 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1509 {
1510 arg = next_const_call_expr_arg (&iter);
1511 if (!validate_arg (arg, code) || integer_zerop (arg))
1512 goto end;
1513 break;
1514 }
1515 /* FALLTHRU */
1516 default:
1517 /* If no parameters remain or the parameter's code does not
1518 match the specified code, return false. Otherwise continue
1519 checking any remaining arguments. */
1520 arg = next_const_call_expr_arg (&iter);
1521 if (!validate_arg (arg, code))
1522 goto end;
1523 break;
1524 }
1525 }
1526
1527 /* We need gotos here since we can only have one VA_CLOSE in a
1528 function. */
1529 end: ;
1530 va_end (ap);
1531
1532 BITMAP_FREE (argmap);
1533
1534 return res;
1535 }
1536
1537 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1538 and the address of the save area. */
1539
1540 static rtx
1541 expand_builtin_nonlocal_goto (tree exp)
1542 {
1543 tree t_label, t_save_area;
1544 rtx r_label, r_save_area, r_fp, r_sp;
1545 rtx_insn *insn;
1546
1547 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1548 return NULL_RTX;
1549
1550 t_label = CALL_EXPR_ARG (exp, 0);
1551 t_save_area = CALL_EXPR_ARG (exp, 1);
1552
1553 r_label = expand_normal (t_label);
1554 r_label = convert_memory_address (Pmode, r_label);
1555 r_save_area = expand_normal (t_save_area);
1556 r_save_area = convert_memory_address (Pmode, r_save_area);
1557 /* Copy the address of the save location to a register just in case it was
1558 based on the frame pointer. */
1559 r_save_area = copy_to_reg (r_save_area);
1560 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1561 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1562 plus_constant (Pmode, r_save_area,
1563 GET_MODE_SIZE (Pmode)));
1564
1565 crtl->has_nonlocal_goto = 1;
1566
1567 /* ??? We no longer need to pass the static chain value, afaik. */
1568 if (targetm.have_nonlocal_goto ())
1569 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1570 else
1571 {
1572 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1573 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1574
1575 r_label = copy_to_reg (r_label);
1576
1577 /* Restore the frame pointer and stack pointer. We must use a
1578 temporary since the setjmp buffer may be a local. */
1579 r_fp = copy_to_reg (r_fp);
1580 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1581
1582 /* Ensure the frame pointer move is not optimized. */
1583 emit_insn (gen_blockage ());
1584 emit_clobber (hard_frame_pointer_rtx);
1585 emit_clobber (frame_pointer_rtx);
1586 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1587
1588 /* USE of hard_frame_pointer_rtx added for consistency;
1589 not clear if really needed. */
1590 emit_use (hard_frame_pointer_rtx);
1591 emit_use (stack_pointer_rtx);
1592
1593 /* If the architecture is using a GP register, we must
1594 conservatively assume that the target function makes use of it.
1595 The prologue of functions with nonlocal gotos must therefore
1596 initialize the GP register to the appropriate value, and we
1597 must then make sure that this value is live at the point
1598 of the jump. (Note that this doesn't necessarily apply
1599 to targets with a nonlocal_goto pattern; they are free
1600 to implement it in their own way. Note also that this is
1601 a no-op if the GP register is a global invariant.) */
1602 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1603 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1604 emit_use (pic_offset_table_rtx);
1605
1606 emit_indirect_jump (r_label);
1607 }
1608
1609 /* Search backwards to the jump insn and mark it as a
1610 non-local goto. */
1611 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1612 {
1613 if (JUMP_P (insn))
1614 {
1615 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1616 break;
1617 }
1618 else if (CALL_P (insn))
1619 break;
1620 }
1621
1622 return const0_rtx;
1623 }
1624
1625 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1626 (not all will be used on all machines) that was passed to __builtin_setjmp.
1627 It updates the stack pointer in that block to the current value. This is
1628 also called directly by the SJLJ exception handling code. */
1629
1630 void
1631 expand_builtin_update_setjmp_buf (rtx buf_addr)
1632 {
1633 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1634 buf_addr = convert_memory_address (Pmode, buf_addr);
1635 rtx stack_save
1636 = gen_rtx_MEM (sa_mode,
1637 memory_address
1638 (sa_mode,
1639 plus_constant (Pmode, buf_addr,
1640 2 * GET_MODE_SIZE (Pmode))));
1641
1642 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1643 }
1644
1645 /* Expand a call to __builtin_prefetch. For a target that does not support
1646 data prefetch, evaluate the memory address argument in case it has side
1647 effects. */
1648
1649 static void
1650 expand_builtin_prefetch (tree exp)
1651 {
1652 tree arg0, arg1, arg2;
1653 int nargs;
1654 rtx op0, op1, op2;
1655
1656 if (!validate_arglist (exp, POINTER_TYPE, 0))
1657 return;
1658
1659 arg0 = CALL_EXPR_ARG (exp, 0);
1660
1661 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1662 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1663 locality). */
1664 nargs = call_expr_nargs (exp);
1665 if (nargs > 1)
1666 arg1 = CALL_EXPR_ARG (exp, 1);
1667 else
1668 arg1 = integer_zero_node;
1669 if (nargs > 2)
1670 arg2 = CALL_EXPR_ARG (exp, 2);
1671 else
1672 arg2 = integer_three_node;
1673
1674 /* Argument 0 is an address. */
1675 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1676
1677 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1678 if (TREE_CODE (arg1) != INTEGER_CST)
1679 {
1680 error ("second argument to %<__builtin_prefetch%> must be a constant");
1681 arg1 = integer_zero_node;
1682 }
1683 op1 = expand_normal (arg1);
1684 /* Argument 1 must be either zero or one. */
1685 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1686 {
1687 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1688 " using zero");
1689 op1 = const0_rtx;
1690 }
1691
1692 /* Argument 2 (locality) must be a compile-time constant int. */
1693 if (TREE_CODE (arg2) != INTEGER_CST)
1694 {
1695 error ("third argument to %<__builtin_prefetch%> must be a constant");
1696 arg2 = integer_zero_node;
1697 }
1698 op2 = expand_normal (arg2);
1699 /* Argument 2 must be 0, 1, 2, or 3. */
1700 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1701 {
1702 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1703 op2 = const0_rtx;
1704 }
1705
1706 if (targetm.have_prefetch ())
1707 {
1708 class expand_operand ops[3];
1709
1710 create_address_operand (&ops[0], op0);
1711 create_integer_operand (&ops[1], INTVAL (op1));
1712 create_integer_operand (&ops[2], INTVAL (op2));
1713 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1714 return;
1715 }
1716
1717 /* Don't do anything with direct references to volatile memory, but
1718 generate code to handle other side effects. */
1719 if (!MEM_P (op0) && side_effects_p (op0))
1720 emit_insn (op0);
1721 }
1722
1723 /* Get a MEM rtx for expression EXP which is the address of an operand
1724 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1725 the maximum length of the block of memory that might be accessed or
1726 NULL if unknown. */
1727
1728 static rtx
1729 get_memory_rtx (tree exp, tree len)
1730 {
1731 tree orig_exp = exp;
1732 rtx addr, mem;
1733
1734 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1735 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1736 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1737 exp = TREE_OPERAND (exp, 0);
1738
1739 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1740 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1741
1742 /* Get an expression we can use to find the attributes to assign to MEM.
1743 First remove any nops. */
1744 while (CONVERT_EXPR_P (exp)
1745 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1746 exp = TREE_OPERAND (exp, 0);
1747
1748 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1749 (as builtin stringops may alias with anything). */
1750 exp = fold_build2 (MEM_REF,
1751 build_array_type (char_type_node,
1752 build_range_type (sizetype,
1753 size_one_node, len)),
1754 exp, build_int_cst (ptr_type_node, 0));
1755
1756 /* If the MEM_REF has no acceptable address, try to get the base object
1757 from the original address we got, and build an all-aliasing
1758 unknown-sized access to that one. */
1759 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1760 set_mem_attributes (mem, exp, 0);
1761 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1762 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1763 0))))
1764 {
1765 exp = build_fold_addr_expr (exp);
1766 exp = fold_build2 (MEM_REF,
1767 build_array_type (char_type_node,
1768 build_range_type (sizetype,
1769 size_zero_node,
1770 NULL)),
1771 exp, build_int_cst (ptr_type_node, 0));
1772 set_mem_attributes (mem, exp, 0);
1773 }
1774 set_mem_alias_set (mem, 0);
1775 return mem;
1776 }
1777 \f
1778 /* Built-in functions to perform an untyped call and return. */
1779
1780 #define apply_args_mode \
1781 (this_target_builtins->x_apply_args_mode)
1782 #define apply_result_mode \
1783 (this_target_builtins->x_apply_result_mode)
1784
1785 /* Return the size required for the block returned by __builtin_apply_args,
1786 and initialize apply_args_mode. */
1787
1788 static int
1789 apply_args_size (void)
1790 {
1791 static int size = -1;
1792 int align;
1793 unsigned int regno;
1794
1795 /* The values computed by this function never change. */
1796 if (size < 0)
1797 {
1798 /* The first value is the incoming arg-pointer. */
1799 size = GET_MODE_SIZE (Pmode);
1800
1801 /* The second value is the structure value address unless this is
1802 passed as an "invisible" first argument. */
1803 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1804 size += GET_MODE_SIZE (Pmode);
1805
1806 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1807 if (FUNCTION_ARG_REGNO_P (regno))
1808 {
1809 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1810
1811 gcc_assert (mode != VOIDmode);
1812
1813 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1814 if (size % align != 0)
1815 size = CEIL (size, align) * align;
1816 size += GET_MODE_SIZE (mode);
1817 apply_args_mode[regno] = mode;
1818 }
1819 else
1820 {
1821 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1822 }
1823 }
1824 return size;
1825 }
1826
1827 /* Return the size required for the block returned by __builtin_apply,
1828 and initialize apply_result_mode. */
1829
1830 static int
1831 apply_result_size (void)
1832 {
1833 static int size = -1;
1834 int align, regno;
1835
1836 /* The values computed by this function never change. */
1837 if (size < 0)
1838 {
1839 size = 0;
1840
1841 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1842 if (targetm.calls.function_value_regno_p (regno))
1843 {
1844 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1845
1846 gcc_assert (mode != VOIDmode);
1847
1848 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1849 if (size % align != 0)
1850 size = CEIL (size, align) * align;
1851 size += GET_MODE_SIZE (mode);
1852 apply_result_mode[regno] = mode;
1853 }
1854 else
1855 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1856
1857 /* Allow targets that use untyped_call and untyped_return to override
1858 the size so that machine-specific information can be stored here. */
1859 #ifdef APPLY_RESULT_SIZE
1860 size = APPLY_RESULT_SIZE;
1861 #endif
1862 }
1863 return size;
1864 }
1865
1866 /* Create a vector describing the result block RESULT. If SAVEP is true,
1867 the result block is used to save the values; otherwise it is used to
1868 restore the values. */
1869
1870 static rtx
1871 result_vector (int savep, rtx result)
1872 {
1873 int regno, size, align, nelts;
1874 fixed_size_mode mode;
1875 rtx reg, mem;
1876 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1877
1878 size = nelts = 0;
1879 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1880 if ((mode = apply_result_mode[regno]) != VOIDmode)
1881 {
1882 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1883 if (size % align != 0)
1884 size = CEIL (size, align) * align;
1885 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1886 mem = adjust_address (result, mode, size);
1887 savevec[nelts++] = (savep
1888 ? gen_rtx_SET (mem, reg)
1889 : gen_rtx_SET (reg, mem));
1890 size += GET_MODE_SIZE (mode);
1891 }
1892 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1893 }
1894
1895 /* Save the state required to perform an untyped call with the same
1896 arguments as were passed to the current function. */
1897
1898 static rtx
1899 expand_builtin_apply_args_1 (void)
1900 {
1901 rtx registers, tem;
1902 int size, align, regno;
1903 fixed_size_mode mode;
1904 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1905
1906 /* Create a block where the arg-pointer, structure value address,
1907 and argument registers can be saved. */
1908 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1909
1910 /* Walk past the arg-pointer and structure value address. */
1911 size = GET_MODE_SIZE (Pmode);
1912 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1913 size += GET_MODE_SIZE (Pmode);
1914
1915 /* Save each register used in calling a function to the block. */
1916 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1917 if ((mode = apply_args_mode[regno]) != VOIDmode)
1918 {
1919 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1920 if (size % align != 0)
1921 size = CEIL (size, align) * align;
1922
1923 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1924
1925 emit_move_insn (adjust_address (registers, mode, size), tem);
1926 size += GET_MODE_SIZE (mode);
1927 }
1928
1929 /* Save the arg pointer to the block. */
1930 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1931 /* We need the pointer as the caller actually passed them to us, not
1932 as we might have pretended they were passed. Make sure it's a valid
1933 operand, as emit_move_insn isn't expected to handle a PLUS. */
1934 if (STACK_GROWS_DOWNWARD)
1935 tem
1936 = force_operand (plus_constant (Pmode, tem,
1937 crtl->args.pretend_args_size),
1938 NULL_RTX);
1939 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1940
1941 size = GET_MODE_SIZE (Pmode);
1942
1943 /* Save the structure value address unless this is passed as an
1944 "invisible" first argument. */
1945 if (struct_incoming_value)
1946 emit_move_insn (adjust_address (registers, Pmode, size),
1947 copy_to_reg (struct_incoming_value));
1948
1949 /* Return the address of the block. */
1950 return copy_addr_to_reg (XEXP (registers, 0));
1951 }
1952
1953 /* __builtin_apply_args returns block of memory allocated on
1954 the stack into which is stored the arg pointer, structure
1955 value address, static chain, and all the registers that might
1956 possibly be used in performing a function call. The code is
1957 moved to the start of the function so the incoming values are
1958 saved. */
1959
1960 static rtx
1961 expand_builtin_apply_args (void)
1962 {
1963 /* Don't do __builtin_apply_args more than once in a function.
1964 Save the result of the first call and reuse it. */
1965 if (apply_args_value != 0)
1966 return apply_args_value;
1967 {
1968 /* When this function is called, it means that registers must be
1969 saved on entry to this function. So we migrate the
1970 call to the first insn of this function. */
1971 rtx temp;
1972
1973 start_sequence ();
1974 temp = expand_builtin_apply_args_1 ();
1975 rtx_insn *seq = get_insns ();
1976 end_sequence ();
1977
1978 apply_args_value = temp;
1979
1980 /* Put the insns after the NOTE that starts the function.
1981 If this is inside a start_sequence, make the outer-level insn
1982 chain current, so the code is placed at the start of the
1983 function. If internal_arg_pointer is a non-virtual pseudo,
1984 it needs to be placed after the function that initializes
1985 that pseudo. */
1986 push_topmost_sequence ();
1987 if (REG_P (crtl->args.internal_arg_pointer)
1988 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1989 emit_insn_before (seq, parm_birth_insn);
1990 else
1991 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1992 pop_topmost_sequence ();
1993 return temp;
1994 }
1995 }
1996
1997 /* Perform an untyped call and save the state required to perform an
1998 untyped return of whatever value was returned by the given function. */
1999
2000 static rtx
2001 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
2002 {
2003 int size, align, regno;
2004 fixed_size_mode mode;
2005 rtx incoming_args, result, reg, dest, src;
2006 rtx_call_insn *call_insn;
2007 rtx old_stack_level = 0;
2008 rtx call_fusage = 0;
2009 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
2010
2011 arguments = convert_memory_address (Pmode, arguments);
2012
2013 /* Create a block where the return registers can be saved. */
2014 result = assign_stack_local (BLKmode, apply_result_size (), -1);
2015
2016 /* Fetch the arg pointer from the ARGUMENTS block. */
2017 incoming_args = gen_reg_rtx (Pmode);
2018 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
2019 if (!STACK_GROWS_DOWNWARD)
2020 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
2021 incoming_args, 0, OPTAB_LIB_WIDEN);
2022
2023 /* Push a new argument block and copy the arguments. Do not allow
2024 the (potential) memcpy call below to interfere with our stack
2025 manipulations. */
2026 do_pending_stack_adjust ();
2027 NO_DEFER_POP;
2028
2029 /* Save the stack with nonlocal if available. */
2030 if (targetm.have_save_stack_nonlocal ())
2031 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
2032 else
2033 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2034
2035 /* Allocate a block of memory onto the stack and copy the memory
2036 arguments to the outgoing arguments address. We can pass TRUE
2037 as the 4th argument because we just saved the stack pointer
2038 and will restore it right after the call. */
2039 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2040
2041 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2042 may have already set current_function_calls_alloca to true.
2043 current_function_calls_alloca won't be set if argsize is zero,
2044 so we have to guarantee need_drap is true here. */
2045 if (SUPPORTS_STACK_ALIGNMENT)
2046 crtl->need_drap = true;
2047
2048 dest = virtual_outgoing_args_rtx;
2049 if (!STACK_GROWS_DOWNWARD)
2050 {
2051 if (CONST_INT_P (argsize))
2052 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
2053 else
2054 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
2055 }
2056 dest = gen_rtx_MEM (BLKmode, dest);
2057 set_mem_align (dest, PARM_BOUNDARY);
2058 src = gen_rtx_MEM (BLKmode, incoming_args);
2059 set_mem_align (src, PARM_BOUNDARY);
2060 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
2061
2062 /* Refer to the argument block. */
2063 apply_args_size ();
2064 arguments = gen_rtx_MEM (BLKmode, arguments);
2065 set_mem_align (arguments, PARM_BOUNDARY);
2066
2067 /* Walk past the arg-pointer and structure value address. */
2068 size = GET_MODE_SIZE (Pmode);
2069 if (struct_value)
2070 size += GET_MODE_SIZE (Pmode);
2071
2072 /* Restore each of the registers previously saved. Make USE insns
2073 for each of these registers for use in making the call. */
2074 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2075 if ((mode = apply_args_mode[regno]) != VOIDmode)
2076 {
2077 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2078 if (size % align != 0)
2079 size = CEIL (size, align) * align;
2080 reg = gen_rtx_REG (mode, regno);
2081 emit_move_insn (reg, adjust_address (arguments, mode, size));
2082 use_reg (&call_fusage, reg);
2083 size += GET_MODE_SIZE (mode);
2084 }
2085
2086 /* Restore the structure value address unless this is passed as an
2087 "invisible" first argument. */
2088 size = GET_MODE_SIZE (Pmode);
2089 if (struct_value)
2090 {
2091 rtx value = gen_reg_rtx (Pmode);
2092 emit_move_insn (value, adjust_address (arguments, Pmode, size));
2093 emit_move_insn (struct_value, value);
2094 if (REG_P (struct_value))
2095 use_reg (&call_fusage, struct_value);
2096 }
2097
2098 /* All arguments and registers used for the call are set up by now! */
2099 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
2100
2101 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2102 and we don't want to load it into a register as an optimization,
2103 because prepare_call_address already did it if it should be done. */
2104 if (GET_CODE (function) != SYMBOL_REF)
2105 function = memory_address (FUNCTION_MODE, function);
2106
2107 /* Generate the actual call instruction and save the return value. */
2108 if (targetm.have_untyped_call ())
2109 {
2110 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
2111 emit_call_insn (targetm.gen_untyped_call (mem, result,
2112 result_vector (1, result)));
2113 }
2114 else if (targetm.have_call_value ())
2115 {
2116 rtx valreg = 0;
2117
2118 /* Locate the unique return register. It is not possible to
2119 express a call that sets more than one return register using
2120 call_value; use untyped_call for that. In fact, untyped_call
2121 only needs to save the return registers in the given block. */
2122 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2123 if ((mode = apply_result_mode[regno]) != VOIDmode)
2124 {
2125 gcc_assert (!valreg); /* have_untyped_call required. */
2126
2127 valreg = gen_rtx_REG (mode, regno);
2128 }
2129
2130 emit_insn (targetm.gen_call_value (valreg,
2131 gen_rtx_MEM (FUNCTION_MODE, function),
2132 const0_rtx, NULL_RTX, const0_rtx));
2133
2134 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2135 }
2136 else
2137 gcc_unreachable ();
2138
2139 /* Find the CALL insn we just emitted, and attach the register usage
2140 information. */
2141 call_insn = last_call_insn ();
2142 add_function_usage_to (call_insn, call_fusage);
2143
2144 /* Restore the stack. */
2145 if (targetm.have_save_stack_nonlocal ())
2146 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2147 else
2148 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2149 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2150
2151 OK_DEFER_POP;
2152
2153 /* Return the address of the result block. */
2154 result = copy_addr_to_reg (XEXP (result, 0));
2155 return convert_memory_address (ptr_mode, result);
2156 }
2157
2158 /* Perform an untyped return. */
2159
2160 static void
2161 expand_builtin_return (rtx result)
2162 {
2163 int size, align, regno;
2164 fixed_size_mode mode;
2165 rtx reg;
2166 rtx_insn *call_fusage = 0;
2167
2168 result = convert_memory_address (Pmode, result);
2169
2170 apply_result_size ();
2171 result = gen_rtx_MEM (BLKmode, result);
2172
2173 if (targetm.have_untyped_return ())
2174 {
2175 rtx vector = result_vector (0, result);
2176 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2177 emit_barrier ();
2178 return;
2179 }
2180
2181 /* Restore the return value and note that each value is used. */
2182 size = 0;
2183 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2184 if ((mode = apply_result_mode[regno]) != VOIDmode)
2185 {
2186 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2187 if (size % align != 0)
2188 size = CEIL (size, align) * align;
2189 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2190 emit_move_insn (reg, adjust_address (result, mode, size));
2191
2192 push_to_sequence (call_fusage);
2193 emit_use (reg);
2194 call_fusage = get_insns ();
2195 end_sequence ();
2196 size += GET_MODE_SIZE (mode);
2197 }
2198
2199 /* Put the USE insns before the return. */
2200 emit_insn (call_fusage);
2201
2202 /* Return whatever values was restored by jumping directly to the end
2203 of the function. */
2204 expand_naked_return ();
2205 }
2206
2207 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2208
2209 static enum type_class
2210 type_to_class (tree type)
2211 {
2212 switch (TREE_CODE (type))
2213 {
2214 case VOID_TYPE: return void_type_class;
2215 case INTEGER_TYPE: return integer_type_class;
2216 case ENUMERAL_TYPE: return enumeral_type_class;
2217 case BOOLEAN_TYPE: return boolean_type_class;
2218 case POINTER_TYPE: return pointer_type_class;
2219 case REFERENCE_TYPE: return reference_type_class;
2220 case OFFSET_TYPE: return offset_type_class;
2221 case REAL_TYPE: return real_type_class;
2222 case COMPLEX_TYPE: return complex_type_class;
2223 case FUNCTION_TYPE: return function_type_class;
2224 case METHOD_TYPE: return method_type_class;
2225 case RECORD_TYPE: return record_type_class;
2226 case UNION_TYPE:
2227 case QUAL_UNION_TYPE: return union_type_class;
2228 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2229 ? string_type_class : array_type_class);
2230 case LANG_TYPE: return lang_type_class;
2231 default: return no_type_class;
2232 }
2233 }
2234
2235 /* Expand a call EXP to __builtin_classify_type. */
2236
2237 static rtx
2238 expand_builtin_classify_type (tree exp)
2239 {
2240 if (call_expr_nargs (exp))
2241 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2242 return GEN_INT (no_type_class);
2243 }
2244
2245 /* This helper macro, meant to be used in mathfn_built_in below, determines
2246 which among a set of builtin math functions is appropriate for a given type
2247 mode. The `F' (float) and `L' (long double) are automatically generated
2248 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2249 types, there are additional types that are considered with 'F32', 'F64',
2250 'F128', etc. suffixes. */
2251 #define CASE_MATHFN(MATHFN) \
2252 CASE_CFN_##MATHFN: \
2253 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2254 fcodel = BUILT_IN_##MATHFN##L ; break;
2255 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2256 types. */
2257 #define CASE_MATHFN_FLOATN(MATHFN) \
2258 CASE_CFN_##MATHFN: \
2259 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2260 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2261 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2262 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2263 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2264 break;
2265 /* Similar to above, but appends _R after any F/L suffix. */
2266 #define CASE_MATHFN_REENT(MATHFN) \
2267 case CFN_BUILT_IN_##MATHFN##_R: \
2268 case CFN_BUILT_IN_##MATHFN##F_R: \
2269 case CFN_BUILT_IN_##MATHFN##L_R: \
2270 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2271 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2272
2273 /* Return a function equivalent to FN but operating on floating-point
2274 values of type TYPE, or END_BUILTINS if no such function exists.
2275 This is purely an operation on function codes; it does not guarantee
2276 that the target actually has an implementation of the function. */
2277
2278 static built_in_function
2279 mathfn_built_in_2 (tree type, combined_fn fn)
2280 {
2281 tree mtype;
2282 built_in_function fcode, fcodef, fcodel;
2283 built_in_function fcodef16 = END_BUILTINS;
2284 built_in_function fcodef32 = END_BUILTINS;
2285 built_in_function fcodef64 = END_BUILTINS;
2286 built_in_function fcodef128 = END_BUILTINS;
2287 built_in_function fcodef32x = END_BUILTINS;
2288 built_in_function fcodef64x = END_BUILTINS;
2289 built_in_function fcodef128x = END_BUILTINS;
2290
2291 switch (fn)
2292 {
2293 #define SEQ_OF_CASE_MATHFN \
2294 CASE_MATHFN (ACOS) \
2295 CASE_MATHFN (ACOSH) \
2296 CASE_MATHFN (ASIN) \
2297 CASE_MATHFN (ASINH) \
2298 CASE_MATHFN (ATAN) \
2299 CASE_MATHFN (ATAN2) \
2300 CASE_MATHFN (ATANH) \
2301 CASE_MATHFN (CBRT) \
2302 CASE_MATHFN_FLOATN (CEIL) \
2303 CASE_MATHFN (CEXPI) \
2304 CASE_MATHFN_FLOATN (COPYSIGN) \
2305 CASE_MATHFN (COS) \
2306 CASE_MATHFN (COSH) \
2307 CASE_MATHFN (DREM) \
2308 CASE_MATHFN (ERF) \
2309 CASE_MATHFN (ERFC) \
2310 CASE_MATHFN (EXP) \
2311 CASE_MATHFN (EXP10) \
2312 CASE_MATHFN (EXP2) \
2313 CASE_MATHFN (EXPM1) \
2314 CASE_MATHFN (FABS) \
2315 CASE_MATHFN (FDIM) \
2316 CASE_MATHFN_FLOATN (FLOOR) \
2317 CASE_MATHFN_FLOATN (FMA) \
2318 CASE_MATHFN_FLOATN (FMAX) \
2319 CASE_MATHFN_FLOATN (FMIN) \
2320 CASE_MATHFN (FMOD) \
2321 CASE_MATHFN (FREXP) \
2322 CASE_MATHFN (GAMMA) \
2323 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2324 CASE_MATHFN (HUGE_VAL) \
2325 CASE_MATHFN (HYPOT) \
2326 CASE_MATHFN (ILOGB) \
2327 CASE_MATHFN (ICEIL) \
2328 CASE_MATHFN (IFLOOR) \
2329 CASE_MATHFN (INF) \
2330 CASE_MATHFN (IRINT) \
2331 CASE_MATHFN (IROUND) \
2332 CASE_MATHFN (ISINF) \
2333 CASE_MATHFN (J0) \
2334 CASE_MATHFN (J1) \
2335 CASE_MATHFN (JN) \
2336 CASE_MATHFN (LCEIL) \
2337 CASE_MATHFN (LDEXP) \
2338 CASE_MATHFN (LFLOOR) \
2339 CASE_MATHFN (LGAMMA) \
2340 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2341 CASE_MATHFN (LLCEIL) \
2342 CASE_MATHFN (LLFLOOR) \
2343 CASE_MATHFN (LLRINT) \
2344 CASE_MATHFN (LLROUND) \
2345 CASE_MATHFN (LOG) \
2346 CASE_MATHFN (LOG10) \
2347 CASE_MATHFN (LOG1P) \
2348 CASE_MATHFN (LOG2) \
2349 CASE_MATHFN (LOGB) \
2350 CASE_MATHFN (LRINT) \
2351 CASE_MATHFN (LROUND) \
2352 CASE_MATHFN (MODF) \
2353 CASE_MATHFN (NAN) \
2354 CASE_MATHFN (NANS) \
2355 CASE_MATHFN_FLOATN (NEARBYINT) \
2356 CASE_MATHFN (NEXTAFTER) \
2357 CASE_MATHFN (NEXTTOWARD) \
2358 CASE_MATHFN (POW) \
2359 CASE_MATHFN (POWI) \
2360 CASE_MATHFN (POW10) \
2361 CASE_MATHFN (REMAINDER) \
2362 CASE_MATHFN (REMQUO) \
2363 CASE_MATHFN_FLOATN (RINT) \
2364 CASE_MATHFN_FLOATN (ROUND) \
2365 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2366 CASE_MATHFN (SCALB) \
2367 CASE_MATHFN (SCALBLN) \
2368 CASE_MATHFN (SCALBN) \
2369 CASE_MATHFN (SIGNBIT) \
2370 CASE_MATHFN (SIGNIFICAND) \
2371 CASE_MATHFN (SIN) \
2372 CASE_MATHFN (SINCOS) \
2373 CASE_MATHFN (SINH) \
2374 CASE_MATHFN_FLOATN (SQRT) \
2375 CASE_MATHFN (TAN) \
2376 CASE_MATHFN (TANH) \
2377 CASE_MATHFN (TGAMMA) \
2378 CASE_MATHFN_FLOATN (TRUNC) \
2379 CASE_MATHFN (Y0) \
2380 CASE_MATHFN (Y1) \
2381 CASE_MATHFN (YN)
2382
2383 SEQ_OF_CASE_MATHFN
2384
2385 default:
2386 return END_BUILTINS;
2387 }
2388
2389 mtype = TYPE_MAIN_VARIANT (type);
2390 if (mtype == double_type_node)
2391 return fcode;
2392 else if (mtype == float_type_node)
2393 return fcodef;
2394 else if (mtype == long_double_type_node)
2395 return fcodel;
2396 else if (mtype == float16_type_node)
2397 return fcodef16;
2398 else if (mtype == float32_type_node)
2399 return fcodef32;
2400 else if (mtype == float64_type_node)
2401 return fcodef64;
2402 else if (mtype == float128_type_node)
2403 return fcodef128;
2404 else if (mtype == float32x_type_node)
2405 return fcodef32x;
2406 else if (mtype == float64x_type_node)
2407 return fcodef64x;
2408 else if (mtype == float128x_type_node)
2409 return fcodef128x;
2410 else
2411 return END_BUILTINS;
2412 }
2413
2414 #undef CASE_MATHFN
2415 #undef CASE_MATHFN_FLOATN
2416 #undef CASE_MATHFN_REENT
2417
2418 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2419 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2420 otherwise use the explicit declaration. If we can't do the conversion,
2421 return null. */
2422
2423 static tree
2424 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2425 {
2426 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2427 if (fcode2 == END_BUILTINS)
2428 return NULL_TREE;
2429
2430 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2431 return NULL_TREE;
2432
2433 return builtin_decl_explicit (fcode2);
2434 }
2435
2436 /* Like mathfn_built_in_1, but always use the implicit array. */
2437
2438 tree
2439 mathfn_built_in (tree type, combined_fn fn)
2440 {
2441 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2442 }
2443
2444 /* Like mathfn_built_in_1, but take a built_in_function and
2445 always use the implicit array. */
2446
2447 tree
2448 mathfn_built_in (tree type, enum built_in_function fn)
2449 {
2450 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2451 }
2452
2453 /* Return the type associated with a built in function, i.e., the one
2454 to be passed to mathfn_built_in to get the type-specific
2455 function. */
2456
2457 tree
2458 mathfn_built_in_type (combined_fn fn)
2459 {
2460 #define CASE_MATHFN(MATHFN) \
2461 case CFN_BUILT_IN_##MATHFN: \
2462 return double_type_node; \
2463 case CFN_BUILT_IN_##MATHFN##F: \
2464 return float_type_node; \
2465 case CFN_BUILT_IN_##MATHFN##L: \
2466 return long_double_type_node;
2467
2468 #define CASE_MATHFN_FLOATN(MATHFN) \
2469 CASE_MATHFN(MATHFN) \
2470 case CFN_BUILT_IN_##MATHFN##F16: \
2471 return float16_type_node; \
2472 case CFN_BUILT_IN_##MATHFN##F32: \
2473 return float32_type_node; \
2474 case CFN_BUILT_IN_##MATHFN##F64: \
2475 return float64_type_node; \
2476 case CFN_BUILT_IN_##MATHFN##F128: \
2477 return float128_type_node; \
2478 case CFN_BUILT_IN_##MATHFN##F32X: \
2479 return float32x_type_node; \
2480 case CFN_BUILT_IN_##MATHFN##F64X: \
2481 return float64x_type_node; \
2482 case CFN_BUILT_IN_##MATHFN##F128X: \
2483 return float128x_type_node;
2484
2485 /* Similar to above, but appends _R after any F/L suffix. */
2486 #define CASE_MATHFN_REENT(MATHFN) \
2487 case CFN_BUILT_IN_##MATHFN##_R: \
2488 return double_type_node; \
2489 case CFN_BUILT_IN_##MATHFN##F_R: \
2490 return float_type_node; \
2491 case CFN_BUILT_IN_##MATHFN##L_R: \
2492 return long_double_type_node;
2493
2494 switch (fn)
2495 {
2496 SEQ_OF_CASE_MATHFN
2497
2498 default:
2499 return NULL_TREE;
2500 }
2501
2502 #undef CASE_MATHFN
2503 #undef CASE_MATHFN_FLOATN
2504 #undef CASE_MATHFN_REENT
2505 #undef SEQ_OF_CASE_MATHFN
2506 }
2507
2508 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2509 return its code, otherwise return IFN_LAST. Note that this function
2510 only tests whether the function is defined in internals.def, not whether
2511 it is actually available on the target. */
2512
2513 internal_fn
2514 associated_internal_fn (tree fndecl)
2515 {
2516 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2517 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2518 switch (DECL_FUNCTION_CODE (fndecl))
2519 {
2520 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2521 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2522 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2523 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2524 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2525 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2526 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2527 #include "internal-fn.def"
2528
2529 CASE_FLT_FN (BUILT_IN_POW10):
2530 return IFN_EXP10;
2531
2532 CASE_FLT_FN (BUILT_IN_DREM):
2533 return IFN_REMAINDER;
2534
2535 CASE_FLT_FN (BUILT_IN_SCALBN):
2536 CASE_FLT_FN (BUILT_IN_SCALBLN):
2537 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2538 return IFN_LDEXP;
2539 return IFN_LAST;
2540
2541 default:
2542 return IFN_LAST;
2543 }
2544 }
2545
2546 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2547 on the current target by a call to an internal function, return the
2548 code of that internal function, otherwise return IFN_LAST. The caller
2549 is responsible for ensuring that any side-effects of the built-in
2550 call are dealt with correctly. E.g. if CALL sets errno, the caller
2551 must decide that the errno result isn't needed or make it available
2552 in some other way. */
2553
2554 internal_fn
2555 replacement_internal_fn (gcall *call)
2556 {
2557 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2558 {
2559 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2560 if (ifn != IFN_LAST)
2561 {
2562 tree_pair types = direct_internal_fn_types (ifn, call);
2563 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2564 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2565 return ifn;
2566 }
2567 }
2568 return IFN_LAST;
2569 }
2570
2571 /* Expand a call to the builtin trinary math functions (fma).
2572 Return NULL_RTX if a normal call should be emitted rather than expanding the
2573 function in-line. EXP is the expression that is a call to the builtin
2574 function; if convenient, the result should be placed in TARGET.
2575 SUBTARGET may be used as the target for computing one of EXP's
2576 operands. */
2577
2578 static rtx
2579 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2580 {
2581 optab builtin_optab;
2582 rtx op0, op1, op2, result;
2583 rtx_insn *insns;
2584 tree fndecl = get_callee_fndecl (exp);
2585 tree arg0, arg1, arg2;
2586 machine_mode mode;
2587
2588 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2589 return NULL_RTX;
2590
2591 arg0 = CALL_EXPR_ARG (exp, 0);
2592 arg1 = CALL_EXPR_ARG (exp, 1);
2593 arg2 = CALL_EXPR_ARG (exp, 2);
2594
2595 switch (DECL_FUNCTION_CODE (fndecl))
2596 {
2597 CASE_FLT_FN (BUILT_IN_FMA):
2598 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2599 builtin_optab = fma_optab; break;
2600 default:
2601 gcc_unreachable ();
2602 }
2603
2604 /* Make a suitable register to place result in. */
2605 mode = TYPE_MODE (TREE_TYPE (exp));
2606
2607 /* Before working hard, check whether the instruction is available. */
2608 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2609 return NULL_RTX;
2610
2611 result = gen_reg_rtx (mode);
2612
2613 /* Always stabilize the argument list. */
2614 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2615 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2616 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2617
2618 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2619 op1 = expand_normal (arg1);
2620 op2 = expand_normal (arg2);
2621
2622 start_sequence ();
2623
2624 /* Compute into RESULT.
2625 Set RESULT to wherever the result comes back. */
2626 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2627 result, 0);
2628
2629 /* If we were unable to expand via the builtin, stop the sequence
2630 (without outputting the insns) and call to the library function
2631 with the stabilized argument list. */
2632 if (result == 0)
2633 {
2634 end_sequence ();
2635 return expand_call (exp, target, target == const0_rtx);
2636 }
2637
2638 /* Output the entire sequence. */
2639 insns = get_insns ();
2640 end_sequence ();
2641 emit_insn (insns);
2642
2643 return result;
2644 }
2645
2646 /* Expand a call to the builtin sin and cos math functions.
2647 Return NULL_RTX if a normal call should be emitted rather than expanding the
2648 function in-line. EXP is the expression that is a call to the builtin
2649 function; if convenient, the result should be placed in TARGET.
2650 SUBTARGET may be used as the target for computing one of EXP's
2651 operands. */
2652
2653 static rtx
2654 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2655 {
2656 optab builtin_optab;
2657 rtx op0;
2658 rtx_insn *insns;
2659 tree fndecl = get_callee_fndecl (exp);
2660 machine_mode mode;
2661 tree arg;
2662
2663 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2664 return NULL_RTX;
2665
2666 arg = CALL_EXPR_ARG (exp, 0);
2667
2668 switch (DECL_FUNCTION_CODE (fndecl))
2669 {
2670 CASE_FLT_FN (BUILT_IN_SIN):
2671 CASE_FLT_FN (BUILT_IN_COS):
2672 builtin_optab = sincos_optab; break;
2673 default:
2674 gcc_unreachable ();
2675 }
2676
2677 /* Make a suitable register to place result in. */
2678 mode = TYPE_MODE (TREE_TYPE (exp));
2679
2680 /* Check if sincos insn is available, otherwise fallback
2681 to sin or cos insn. */
2682 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2683 switch (DECL_FUNCTION_CODE (fndecl))
2684 {
2685 CASE_FLT_FN (BUILT_IN_SIN):
2686 builtin_optab = sin_optab; break;
2687 CASE_FLT_FN (BUILT_IN_COS):
2688 builtin_optab = cos_optab; break;
2689 default:
2690 gcc_unreachable ();
2691 }
2692
2693 /* Before working hard, check whether the instruction is available. */
2694 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2695 {
2696 rtx result = gen_reg_rtx (mode);
2697
2698 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2699 need to expand the argument again. This way, we will not perform
2700 side-effects more the once. */
2701 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2702
2703 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2704
2705 start_sequence ();
2706
2707 /* Compute into RESULT.
2708 Set RESULT to wherever the result comes back. */
2709 if (builtin_optab == sincos_optab)
2710 {
2711 int ok;
2712
2713 switch (DECL_FUNCTION_CODE (fndecl))
2714 {
2715 CASE_FLT_FN (BUILT_IN_SIN):
2716 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2717 break;
2718 CASE_FLT_FN (BUILT_IN_COS):
2719 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2720 break;
2721 default:
2722 gcc_unreachable ();
2723 }
2724 gcc_assert (ok);
2725 }
2726 else
2727 result = expand_unop (mode, builtin_optab, op0, result, 0);
2728
2729 if (result != 0)
2730 {
2731 /* Output the entire sequence. */
2732 insns = get_insns ();
2733 end_sequence ();
2734 emit_insn (insns);
2735 return result;
2736 }
2737
2738 /* If we were unable to expand via the builtin, stop the sequence
2739 (without outputting the insns) and call to the library function
2740 with the stabilized argument list. */
2741 end_sequence ();
2742 }
2743
2744 return expand_call (exp, target, target == const0_rtx);
2745 }
2746
2747 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2748 return an RTL instruction code that implements the functionality.
2749 If that isn't possible or available return CODE_FOR_nothing. */
2750
2751 static enum insn_code
2752 interclass_mathfn_icode (tree arg, tree fndecl)
2753 {
2754 bool errno_set = false;
2755 optab builtin_optab = unknown_optab;
2756 machine_mode mode;
2757
2758 switch (DECL_FUNCTION_CODE (fndecl))
2759 {
2760 CASE_FLT_FN (BUILT_IN_ILOGB):
2761 errno_set = true; builtin_optab = ilogb_optab; break;
2762 CASE_FLT_FN (BUILT_IN_ISINF):
2763 builtin_optab = isinf_optab; break;
2764 case BUILT_IN_ISNORMAL:
2765 case BUILT_IN_ISFINITE:
2766 CASE_FLT_FN (BUILT_IN_FINITE):
2767 case BUILT_IN_FINITED32:
2768 case BUILT_IN_FINITED64:
2769 case BUILT_IN_FINITED128:
2770 case BUILT_IN_ISINFD32:
2771 case BUILT_IN_ISINFD64:
2772 case BUILT_IN_ISINFD128:
2773 /* These builtins have no optabs (yet). */
2774 break;
2775 default:
2776 gcc_unreachable ();
2777 }
2778
2779 /* There's no easy way to detect the case we need to set EDOM. */
2780 if (flag_errno_math && errno_set)
2781 return CODE_FOR_nothing;
2782
2783 /* Optab mode depends on the mode of the input argument. */
2784 mode = TYPE_MODE (TREE_TYPE (arg));
2785
2786 if (builtin_optab)
2787 return optab_handler (builtin_optab, mode);
2788 return CODE_FOR_nothing;
2789 }
2790
2791 /* Expand a call to one of the builtin math functions that operate on
2792 floating point argument and output an integer result (ilogb, isinf,
2793 isnan, etc).
2794 Return 0 if a normal call should be emitted rather than expanding the
2795 function in-line. EXP is the expression that is a call to the builtin
2796 function; if convenient, the result should be placed in TARGET. */
2797
2798 static rtx
2799 expand_builtin_interclass_mathfn (tree exp, rtx target)
2800 {
2801 enum insn_code icode = CODE_FOR_nothing;
2802 rtx op0;
2803 tree fndecl = get_callee_fndecl (exp);
2804 machine_mode mode;
2805 tree arg;
2806
2807 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2808 return NULL_RTX;
2809
2810 arg = CALL_EXPR_ARG (exp, 0);
2811 icode = interclass_mathfn_icode (arg, fndecl);
2812 mode = TYPE_MODE (TREE_TYPE (arg));
2813
2814 if (icode != CODE_FOR_nothing)
2815 {
2816 class expand_operand ops[1];
2817 rtx_insn *last = get_last_insn ();
2818 tree orig_arg = arg;
2819
2820 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2821 need to expand the argument again. This way, we will not perform
2822 side-effects more the once. */
2823 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2824
2825 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2826
2827 if (mode != GET_MODE (op0))
2828 op0 = convert_to_mode (mode, op0, 0);
2829
2830 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2831 if (maybe_legitimize_operands (icode, 0, 1, ops)
2832 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2833 return ops[0].value;
2834
2835 delete_insns_since (last);
2836 CALL_EXPR_ARG (exp, 0) = orig_arg;
2837 }
2838
2839 return NULL_RTX;
2840 }
2841
2842 /* Expand a call to the builtin sincos math function.
2843 Return NULL_RTX if a normal call should be emitted rather than expanding the
2844 function in-line. EXP is the expression that is a call to the builtin
2845 function. */
2846
2847 static rtx
2848 expand_builtin_sincos (tree exp)
2849 {
2850 rtx op0, op1, op2, target1, target2;
2851 machine_mode mode;
2852 tree arg, sinp, cosp;
2853 int result;
2854 location_t loc = EXPR_LOCATION (exp);
2855 tree alias_type, alias_off;
2856
2857 if (!validate_arglist (exp, REAL_TYPE,
2858 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2859 return NULL_RTX;
2860
2861 arg = CALL_EXPR_ARG (exp, 0);
2862 sinp = CALL_EXPR_ARG (exp, 1);
2863 cosp = CALL_EXPR_ARG (exp, 2);
2864
2865 /* Make a suitable register to place result in. */
2866 mode = TYPE_MODE (TREE_TYPE (arg));
2867
2868 /* Check if sincos insn is available, otherwise emit the call. */
2869 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2870 return NULL_RTX;
2871
2872 target1 = gen_reg_rtx (mode);
2873 target2 = gen_reg_rtx (mode);
2874
2875 op0 = expand_normal (arg);
2876 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2877 alias_off = build_int_cst (alias_type, 0);
2878 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2879 sinp, alias_off));
2880 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2881 cosp, alias_off));
2882
2883 /* Compute into target1 and target2.
2884 Set TARGET to wherever the result comes back. */
2885 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2886 gcc_assert (result);
2887
2888 /* Move target1 and target2 to the memory locations indicated
2889 by op1 and op2. */
2890 emit_move_insn (op1, target1);
2891 emit_move_insn (op2, target2);
2892
2893 return const0_rtx;
2894 }
2895
2896 /* Expand a call to the internal cexpi builtin to the sincos math function.
2897 EXP is the expression that is a call to the builtin function; if convenient,
2898 the result should be placed in TARGET. */
2899
2900 static rtx
2901 expand_builtin_cexpi (tree exp, rtx target)
2902 {
2903 tree fndecl = get_callee_fndecl (exp);
2904 tree arg, type;
2905 machine_mode mode;
2906 rtx op0, op1, op2;
2907 location_t loc = EXPR_LOCATION (exp);
2908
2909 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2910 return NULL_RTX;
2911
2912 arg = CALL_EXPR_ARG (exp, 0);
2913 type = TREE_TYPE (arg);
2914 mode = TYPE_MODE (TREE_TYPE (arg));
2915
2916 /* Try expanding via a sincos optab, fall back to emitting a libcall
2917 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2918 is only generated from sincos, cexp or if we have either of them. */
2919 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2920 {
2921 op1 = gen_reg_rtx (mode);
2922 op2 = gen_reg_rtx (mode);
2923
2924 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2925
2926 /* Compute into op1 and op2. */
2927 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2928 }
2929 else if (targetm.libc_has_function (function_sincos, type))
2930 {
2931 tree call, fn = NULL_TREE;
2932 tree top1, top2;
2933 rtx op1a, op2a;
2934
2935 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2936 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2937 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2938 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2939 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2940 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2941 else
2942 gcc_unreachable ();
2943
2944 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2945 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2946 op1a = copy_addr_to_reg (XEXP (op1, 0));
2947 op2a = copy_addr_to_reg (XEXP (op2, 0));
2948 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2949 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2950
2951 /* Make sure not to fold the sincos call again. */
2952 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2953 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2954 call, 3, arg, top1, top2));
2955 }
2956 else
2957 {
2958 tree call, fn = NULL_TREE, narg;
2959 tree ctype = build_complex_type (type);
2960
2961 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2962 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2963 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2964 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2965 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2966 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2967 else
2968 gcc_unreachable ();
2969
2970 /* If we don't have a decl for cexp create one. This is the
2971 friendliest fallback if the user calls __builtin_cexpi
2972 without full target C99 function support. */
2973 if (fn == NULL_TREE)
2974 {
2975 tree fntype;
2976 const char *name = NULL;
2977
2978 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2979 name = "cexpf";
2980 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2981 name = "cexp";
2982 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2983 name = "cexpl";
2984
2985 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2986 fn = build_fn_decl (name, fntype);
2987 }
2988
2989 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2990 build_real (type, dconst0), arg);
2991
2992 /* Make sure not to fold the cexp call again. */
2993 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2994 return expand_expr (build_call_nary (ctype, call, 1, narg),
2995 target, VOIDmode, EXPAND_NORMAL);
2996 }
2997
2998 /* Now build the proper return type. */
2999 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3000 make_tree (TREE_TYPE (arg), op2),
3001 make_tree (TREE_TYPE (arg), op1)),
3002 target, VOIDmode, EXPAND_NORMAL);
3003 }
3004
3005 /* Conveniently construct a function call expression. FNDECL names the
3006 function to be called, N is the number of arguments, and the "..."
3007 parameters are the argument expressions. Unlike build_call_exr
3008 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3009
3010 static tree
3011 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3012 {
3013 va_list ap;
3014 tree fntype = TREE_TYPE (fndecl);
3015 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3016
3017 va_start (ap, n);
3018 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
3019 va_end (ap);
3020 SET_EXPR_LOCATION (fn, loc);
3021 return fn;
3022 }
3023
3024 /* Expand a call to one of the builtin rounding functions gcc defines
3025 as an extension (lfloor and lceil). As these are gcc extensions we
3026 do not need to worry about setting errno to EDOM.
3027 If expanding via optab fails, lower expression to (int)(floor(x)).
3028 EXP is the expression that is a call to the builtin function;
3029 if convenient, the result should be placed in TARGET. */
3030
3031 static rtx
3032 expand_builtin_int_roundingfn (tree exp, rtx target)
3033 {
3034 convert_optab builtin_optab;
3035 rtx op0, tmp;
3036 rtx_insn *insns;
3037 tree fndecl = get_callee_fndecl (exp);
3038 enum built_in_function fallback_fn;
3039 tree fallback_fndecl;
3040 machine_mode mode;
3041 tree arg;
3042
3043 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3044 return NULL_RTX;
3045
3046 arg = CALL_EXPR_ARG (exp, 0);
3047
3048 switch (DECL_FUNCTION_CODE (fndecl))
3049 {
3050 CASE_FLT_FN (BUILT_IN_ICEIL):
3051 CASE_FLT_FN (BUILT_IN_LCEIL):
3052 CASE_FLT_FN (BUILT_IN_LLCEIL):
3053 builtin_optab = lceil_optab;
3054 fallback_fn = BUILT_IN_CEIL;
3055 break;
3056
3057 CASE_FLT_FN (BUILT_IN_IFLOOR):
3058 CASE_FLT_FN (BUILT_IN_LFLOOR):
3059 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3060 builtin_optab = lfloor_optab;
3061 fallback_fn = BUILT_IN_FLOOR;
3062 break;
3063
3064 default:
3065 gcc_unreachable ();
3066 }
3067
3068 /* Make a suitable register to place result in. */
3069 mode = TYPE_MODE (TREE_TYPE (exp));
3070
3071 target = gen_reg_rtx (mode);
3072
3073 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3074 need to expand the argument again. This way, we will not perform
3075 side-effects more the once. */
3076 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3077
3078 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3079
3080 start_sequence ();
3081
3082 /* Compute into TARGET. */
3083 if (expand_sfix_optab (target, op0, builtin_optab))
3084 {
3085 /* Output the entire sequence. */
3086 insns = get_insns ();
3087 end_sequence ();
3088 emit_insn (insns);
3089 return target;
3090 }
3091
3092 /* If we were unable to expand via the builtin, stop the sequence
3093 (without outputting the insns). */
3094 end_sequence ();
3095
3096 /* Fall back to floating point rounding optab. */
3097 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3098
3099 /* For non-C99 targets we may end up without a fallback fndecl here
3100 if the user called __builtin_lfloor directly. In this case emit
3101 a call to the floor/ceil variants nevertheless. This should result
3102 in the best user experience for not full C99 targets. */
3103 if (fallback_fndecl == NULL_TREE)
3104 {
3105 tree fntype;
3106 const char *name = NULL;
3107
3108 switch (DECL_FUNCTION_CODE (fndecl))
3109 {
3110 case BUILT_IN_ICEIL:
3111 case BUILT_IN_LCEIL:
3112 case BUILT_IN_LLCEIL:
3113 name = "ceil";
3114 break;
3115 case BUILT_IN_ICEILF:
3116 case BUILT_IN_LCEILF:
3117 case BUILT_IN_LLCEILF:
3118 name = "ceilf";
3119 break;
3120 case BUILT_IN_ICEILL:
3121 case BUILT_IN_LCEILL:
3122 case BUILT_IN_LLCEILL:
3123 name = "ceill";
3124 break;
3125 case BUILT_IN_IFLOOR:
3126 case BUILT_IN_LFLOOR:
3127 case BUILT_IN_LLFLOOR:
3128 name = "floor";
3129 break;
3130 case BUILT_IN_IFLOORF:
3131 case BUILT_IN_LFLOORF:
3132 case BUILT_IN_LLFLOORF:
3133 name = "floorf";
3134 break;
3135 case BUILT_IN_IFLOORL:
3136 case BUILT_IN_LFLOORL:
3137 case BUILT_IN_LLFLOORL:
3138 name = "floorl";
3139 break;
3140 default:
3141 gcc_unreachable ();
3142 }
3143
3144 fntype = build_function_type_list (TREE_TYPE (arg),
3145 TREE_TYPE (arg), NULL_TREE);
3146 fallback_fndecl = build_fn_decl (name, fntype);
3147 }
3148
3149 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3150
3151 tmp = expand_normal (exp);
3152 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3153
3154 /* Truncate the result of floating point optab to integer
3155 via expand_fix (). */
3156 target = gen_reg_rtx (mode);
3157 expand_fix (target, tmp, 0);
3158
3159 return target;
3160 }
3161
3162 /* Expand a call to one of the builtin math functions doing integer
3163 conversion (lrint).
3164 Return 0 if a normal call should be emitted rather than expanding the
3165 function in-line. EXP is the expression that is a call to the builtin
3166 function; if convenient, the result should be placed in TARGET. */
3167
3168 static rtx
3169 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3170 {
3171 convert_optab builtin_optab;
3172 rtx op0;
3173 rtx_insn *insns;
3174 tree fndecl = get_callee_fndecl (exp);
3175 tree arg;
3176 machine_mode mode;
3177 enum built_in_function fallback_fn = BUILT_IN_NONE;
3178
3179 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3180 return NULL_RTX;
3181
3182 arg = CALL_EXPR_ARG (exp, 0);
3183
3184 switch (DECL_FUNCTION_CODE (fndecl))
3185 {
3186 CASE_FLT_FN (BUILT_IN_IRINT):
3187 fallback_fn = BUILT_IN_LRINT;
3188 gcc_fallthrough ();
3189 CASE_FLT_FN (BUILT_IN_LRINT):
3190 CASE_FLT_FN (BUILT_IN_LLRINT):
3191 builtin_optab = lrint_optab;
3192 break;
3193
3194 CASE_FLT_FN (BUILT_IN_IROUND):
3195 fallback_fn = BUILT_IN_LROUND;
3196 gcc_fallthrough ();
3197 CASE_FLT_FN (BUILT_IN_LROUND):
3198 CASE_FLT_FN (BUILT_IN_LLROUND):
3199 builtin_optab = lround_optab;
3200 break;
3201
3202 default:
3203 gcc_unreachable ();
3204 }
3205
3206 /* There's no easy way to detect the case we need to set EDOM. */
3207 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3208 return NULL_RTX;
3209
3210 /* Make a suitable register to place result in. */
3211 mode = TYPE_MODE (TREE_TYPE (exp));
3212
3213 /* There's no easy way to detect the case we need to set EDOM. */
3214 if (!flag_errno_math)
3215 {
3216 rtx result = gen_reg_rtx (mode);
3217
3218 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3219 need to expand the argument again. This way, we will not perform
3220 side-effects more the once. */
3221 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3222
3223 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3224
3225 start_sequence ();
3226
3227 if (expand_sfix_optab (result, op0, builtin_optab))
3228 {
3229 /* Output the entire sequence. */
3230 insns = get_insns ();
3231 end_sequence ();
3232 emit_insn (insns);
3233 return result;
3234 }
3235
3236 /* If we were unable to expand via the builtin, stop the sequence
3237 (without outputting the insns) and call to the library function
3238 with the stabilized argument list. */
3239 end_sequence ();
3240 }
3241
3242 if (fallback_fn != BUILT_IN_NONE)
3243 {
3244 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3245 targets, (int) round (x) should never be transformed into
3246 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3247 a call to lround in the hope that the target provides at least some
3248 C99 functions. This should result in the best user experience for
3249 not full C99 targets. */
3250 tree fallback_fndecl = mathfn_built_in_1
3251 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3252
3253 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3254 fallback_fndecl, 1, arg);
3255
3256 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3257 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3258 return convert_to_mode (mode, target, 0);
3259 }
3260
3261 return expand_call (exp, target, target == const0_rtx);
3262 }
3263
3264 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3265 a normal call should be emitted rather than expanding the function
3266 in-line. EXP is the expression that is a call to the builtin
3267 function; if convenient, the result should be placed in TARGET. */
3268
3269 static rtx
3270 expand_builtin_powi (tree exp, rtx target)
3271 {
3272 tree arg0, arg1;
3273 rtx op0, op1;
3274 machine_mode mode;
3275 machine_mode mode2;
3276
3277 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3278 return NULL_RTX;
3279
3280 arg0 = CALL_EXPR_ARG (exp, 0);
3281 arg1 = CALL_EXPR_ARG (exp, 1);
3282 mode = TYPE_MODE (TREE_TYPE (exp));
3283
3284 /* Emit a libcall to libgcc. */
3285
3286 /* Mode of the 2nd argument must match that of an int. */
3287 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3288
3289 if (target == NULL_RTX)
3290 target = gen_reg_rtx (mode);
3291
3292 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3293 if (GET_MODE (op0) != mode)
3294 op0 = convert_to_mode (mode, op0, 0);
3295 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3296 if (GET_MODE (op1) != mode2)
3297 op1 = convert_to_mode (mode2, op1, 0);
3298
3299 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3300 target, LCT_CONST, mode,
3301 op0, mode, op1, mode2);
3302
3303 return target;
3304 }
3305
3306 /* Expand expression EXP which is a call to the strlen builtin. Return
3307 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3308 try to get the result in TARGET, if convenient. */
3309
3310 static rtx
3311 expand_builtin_strlen (tree exp, rtx target,
3312 machine_mode target_mode)
3313 {
3314 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3315 return NULL_RTX;
3316
3317 tree src = CALL_EXPR_ARG (exp, 0);
3318 if (!check_read_access (exp, src))
3319 return NULL_RTX;
3320
3321 /* If the length can be computed at compile-time, return it. */
3322 if (tree len = c_strlen (src, 0))
3323 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3324
3325 /* If the length can be computed at compile-time and is constant
3326 integer, but there are side-effects in src, evaluate
3327 src for side-effects, then return len.
3328 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3329 can be optimized into: i++; x = 3; */
3330 tree len = c_strlen (src, 1);
3331 if (len && TREE_CODE (len) == INTEGER_CST)
3332 {
3333 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3334 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3335 }
3336
3337 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3338
3339 /* If SRC is not a pointer type, don't do this operation inline. */
3340 if (align == 0)
3341 return NULL_RTX;
3342
3343 /* Bail out if we can't compute strlen in the right mode. */
3344 machine_mode insn_mode;
3345 enum insn_code icode = CODE_FOR_nothing;
3346 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3347 {
3348 icode = optab_handler (strlen_optab, insn_mode);
3349 if (icode != CODE_FOR_nothing)
3350 break;
3351 }
3352 if (insn_mode == VOIDmode)
3353 return NULL_RTX;
3354
3355 /* Make a place to hold the source address. We will not expand
3356 the actual source until we are sure that the expansion will
3357 not fail -- there are trees that cannot be expanded twice. */
3358 rtx src_reg = gen_reg_rtx (Pmode);
3359
3360 /* Mark the beginning of the strlen sequence so we can emit the
3361 source operand later. */
3362 rtx_insn *before_strlen = get_last_insn ();
3363
3364 class expand_operand ops[4];
3365 create_output_operand (&ops[0], target, insn_mode);
3366 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3367 create_integer_operand (&ops[2], 0);
3368 create_integer_operand (&ops[3], align);
3369 if (!maybe_expand_insn (icode, 4, ops))
3370 return NULL_RTX;
3371
3372 /* Check to see if the argument was declared attribute nonstring
3373 and if so, issue a warning since at this point it's not known
3374 to be nul-terminated. */
3375 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3376
3377 /* Now that we are assured of success, expand the source. */
3378 start_sequence ();
3379 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3380 if (pat != src_reg)
3381 {
3382 #ifdef POINTERS_EXTEND_UNSIGNED
3383 if (GET_MODE (pat) != Pmode)
3384 pat = convert_to_mode (Pmode, pat,
3385 POINTERS_EXTEND_UNSIGNED);
3386 #endif
3387 emit_move_insn (src_reg, pat);
3388 }
3389 pat = get_insns ();
3390 end_sequence ();
3391
3392 if (before_strlen)
3393 emit_insn_after (pat, before_strlen);
3394 else
3395 emit_insn_before (pat, get_insns ());
3396
3397 /* Return the value in the proper mode for this function. */
3398 if (GET_MODE (ops[0].value) == target_mode)
3399 target = ops[0].value;
3400 else if (target != 0)
3401 convert_move (target, ops[0].value, 0);
3402 else
3403 target = convert_to_mode (target_mode, ops[0].value, 0);
3404
3405 return target;
3406 }
3407
3408 /* Expand call EXP to the strnlen built-in, returning the result
3409 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3410
3411 static rtx
3412 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3413 {
3414 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3415 return NULL_RTX;
3416
3417 tree src = CALL_EXPR_ARG (exp, 0);
3418 tree bound = CALL_EXPR_ARG (exp, 1);
3419
3420 if (!bound)
3421 return NULL_RTX;
3422
3423 check_read_access (exp, src, bound);
3424
3425 location_t loc = UNKNOWN_LOCATION;
3426 if (EXPR_HAS_LOCATION (exp))
3427 loc = EXPR_LOCATION (exp);
3428
3429 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3430 so these conversions aren't necessary. */
3431 c_strlen_data lendata = { };
3432 tree len = c_strlen (src, 0, &lendata, 1);
3433 if (len)
3434 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3435
3436 if (TREE_CODE (bound) == INTEGER_CST)
3437 {
3438 if (!len)
3439 return NULL_RTX;
3440
3441 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3442 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3443 }
3444
3445 if (TREE_CODE (bound) != SSA_NAME)
3446 return NULL_RTX;
3447
3448 wide_int min, max;
3449 enum value_range_kind rng = get_range_info (bound, &min, &max);
3450 if (rng != VR_RANGE)
3451 return NULL_RTX;
3452
3453 if (!len || TREE_CODE (len) != INTEGER_CST)
3454 {
3455 bool exact;
3456 lendata.decl = unterminated_array (src, &len, &exact);
3457 if (!lendata.decl)
3458 return NULL_RTX;
3459 }
3460
3461 if (lendata.decl)
3462 return NULL_RTX;
3463
3464 if (wi::gtu_p (min, wi::to_wide (len)))
3465 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3466
3467 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3468 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3469 }
3470
3471 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3472 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3473 a target constant. */
3474
3475 static rtx
3476 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3477 scalar_int_mode mode)
3478 {
3479 /* The REPresentation pointed to by DATA need not be a nul-terminated
3480 string but the caller guarantees it's large enough for MODE. */
3481 const char *rep = (const char *) data;
3482
3483 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3484 }
3485
3486 /* LEN specify length of the block of memcpy/memset operation.
3487 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3488 In some cases we can make very likely guess on max size, then we
3489 set it into PROBABLE_MAX_SIZE. */
3490
3491 static void
3492 determine_block_size (tree len, rtx len_rtx,
3493 unsigned HOST_WIDE_INT *min_size,
3494 unsigned HOST_WIDE_INT *max_size,
3495 unsigned HOST_WIDE_INT *probable_max_size)
3496 {
3497 if (CONST_INT_P (len_rtx))
3498 {
3499 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3500 return;
3501 }
3502 else
3503 {
3504 wide_int min, max;
3505 enum value_range_kind range_type = VR_UNDEFINED;
3506
3507 /* Determine bounds from the type. */
3508 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3509 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3510 else
3511 *min_size = 0;
3512 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3513 *probable_max_size = *max_size
3514 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3515 else
3516 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3517
3518 if (TREE_CODE (len) == SSA_NAME)
3519 range_type = get_range_info (len, &min, &max);
3520 if (range_type == VR_RANGE)
3521 {
3522 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3523 *min_size = min.to_uhwi ();
3524 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3525 *probable_max_size = *max_size = max.to_uhwi ();
3526 }
3527 else if (range_type == VR_ANTI_RANGE)
3528 {
3529 /* Code like
3530
3531 int n;
3532 if (n < 100)
3533 memcpy (a, b, n)
3534
3535 Produce anti range allowing negative values of N. We still
3536 can use the information and make a guess that N is not negative.
3537 */
3538 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3539 *probable_max_size = min.to_uhwi () - 1;
3540 }
3541 }
3542 gcc_checking_assert (*max_size <=
3543 (unsigned HOST_WIDE_INT)
3544 GET_MODE_MASK (GET_MODE (len_rtx)));
3545 }
3546
3547 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3548 accessing an object with SIZE. */
3549
3550 static bool
3551 maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3552 tree bndrng[2], tree size, const access_data *pad = NULL)
3553 {
3554 if (!bndrng[0] || TREE_NO_WARNING (exp))
3555 return false;
3556
3557 tree maxobjsize = max_object_size ();
3558
3559 bool warned = false;
3560
3561 if (opt == OPT_Wstringop_overread)
3562 {
3563 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3564 {
3565 if (bndrng[0] == bndrng[1])
3566 warned = (func
3567 ? warning_at (loc, opt,
3568 "%K%qD specified bound %E "
3569 "exceeds maximum object size %E",
3570 exp, func, bndrng[0], maxobjsize)
3571 : warning_at (loc, opt,
3572 "%Kspecified bound %E "
3573 "exceeds maximum object size %E",
3574 exp, bndrng[0], maxobjsize));
3575 else
3576 warned = (func
3577 ? warning_at (loc, opt,
3578 "%K%qD specified bound [%E, %E] "
3579 "exceeds maximum object size %E",
3580 exp, func,
3581 bndrng[0], bndrng[1], maxobjsize)
3582 : warning_at (loc, opt,
3583 "%Kspecified bound [%E, %E] "
3584 "exceeds maximum object size %E",
3585 exp, bndrng[0], bndrng[1], maxobjsize));
3586 }
3587 else if (!size || tree_int_cst_le (bndrng[0], size))
3588 return false;
3589 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3590 warned = (func
3591 ? warning_at (loc, opt,
3592 "%K%qD specified bound %E exceeds "
3593 "source size %E",
3594 exp, func, bndrng[0], size)
3595 : warning_at (loc, opt,
3596 "%Kspecified bound %E exceeds "
3597 "source size %E",
3598 exp, bndrng[0], size));
3599 else
3600 warned = (func
3601 ? warning_at (loc, opt,
3602 "%K%qD specified bound [%E, %E] exceeds "
3603 "source size %E",
3604 exp, func, bndrng[0], bndrng[1], size)
3605 : warning_at (loc, opt,
3606 "%Kspecified bound [%E, %E] exceeds "
3607 "source size %E",
3608 exp, bndrng[0], bndrng[1], size));
3609 if (warned)
3610 {
3611 if (pad && pad->src.ref)
3612 {
3613 if (DECL_P (pad->src.ref))
3614 inform (DECL_SOURCE_LOCATION (pad->src.ref),
3615 "source object declared here");
3616 else if (EXPR_HAS_LOCATION (pad->src.ref))
3617 inform (EXPR_LOCATION (pad->src.ref),
3618 "source object allocated here");
3619 }
3620 TREE_NO_WARNING (exp) = true;
3621 }
3622
3623 return warned;
3624 }
3625
3626 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3627 {
3628 if (bndrng[0] == bndrng[1])
3629 warned = (func
3630 ? warning_at (loc, opt,
3631 "%K%qD specified size %E "
3632 "exceeds maximum object size %E",
3633 exp, func, bndrng[0], maxobjsize)
3634 : warning_at (loc, opt,
3635 "%Kspecified size %E "
3636 "exceeds maximum object size %E",
3637 exp, bndrng[0], maxobjsize));
3638 else
3639 warned = (func
3640 ? warning_at (loc, opt,
3641 "%K%qD specified size between %E and %E "
3642 "exceeds maximum object size %E",
3643 exp, func,
3644 bndrng[0], bndrng[1], maxobjsize)
3645 : warning_at (loc, opt,
3646 "%Kspecified size between %E and %E "
3647 "exceeds maximum object size %E",
3648 exp, bndrng[0], bndrng[1], maxobjsize));
3649 }
3650 else if (!size || tree_int_cst_le (bndrng[0], size))
3651 return false;
3652 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3653 warned = (func
3654 ? warning_at (loc, OPT_Wstringop_overflow_,
3655 "%K%qD specified bound %E exceeds "
3656 "destination size %E",
3657 exp, func, bndrng[0], size)
3658 : warning_at (loc, OPT_Wstringop_overflow_,
3659 "%Kspecified bound %E exceeds "
3660 "destination size %E",
3661 exp, bndrng[0], size));
3662 else
3663 warned = (func
3664 ? warning_at (loc, OPT_Wstringop_overflow_,
3665 "%K%qD specified bound [%E, %E] exceeds "
3666 "destination size %E",
3667 exp, func, bndrng[0], bndrng[1], size)
3668 : warning_at (loc, OPT_Wstringop_overflow_,
3669 "%Kspecified bound [%E, %E] exceeds "
3670 "destination size %E",
3671 exp, bndrng[0], bndrng[1], size));
3672
3673 if (warned)
3674 {
3675 if (pad && pad->dst.ref)
3676 {
3677 if (DECL_P (pad->dst.ref))
3678 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
3679 "destination object declared here");
3680 else if (EXPR_HAS_LOCATION (pad->dst.ref))
3681 inform (EXPR_LOCATION (pad->dst.ref),
3682 "destination object allocated here");
3683 }
3684 TREE_NO_WARNING (exp) = true;
3685 }
3686
3687 return warned;
3688 }
3689
3690 /* For an expression EXP issue an access warning controlled by option OPT
3691 with access to a region SIZE bytes in size in the RANGE of sizes.
3692 WRITE is true for a write access, READ for a read access, neither for
3693 call that may or may not perform an access but for which the range
3694 is expected to valid.
3695 Returns true when a warning has been issued. */
3696
3697 static bool
3698 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
3699 tree size, bool write, bool read)
3700 {
3701 bool warned = false;
3702
3703 if (write && read)
3704 {
3705 if (tree_int_cst_equal (range[0], range[1]))
3706 warned = (func
3707 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3708 "%K%qD accessing %E byte in a region "
3709 "of size %E",
3710 "%K%qD accessing %E bytes in a region "
3711 "of size %E",
3712 exp, func, range[0], size)
3713 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3714 "%Kaccessing %E byte in a region "
3715 "of size %E",
3716 "%Kaccessing %E bytes in a region "
3717 "of size %E",
3718 exp, range[0], size));
3719 else if (tree_int_cst_sign_bit (range[1]))
3720 {
3721 /* Avoid printing the upper bound if it's invalid. */
3722 warned = (func
3723 ? warning_at (loc, opt,
3724 "%K%qD accessing %E or more bytes in "
3725 "a region of size %E",
3726 exp, func, range[0], size)
3727 : warning_at (loc, opt,
3728 "%Kaccessing %E or more bytes in "
3729 "a region of size %E",
3730 exp, range[0], size));
3731 }
3732 else
3733 warned = (func
3734 ? warning_at (loc, opt,
3735 "%K%qD accessing between %E and %E bytes "
3736 "in a region of size %E",
3737 exp, func, range[0], range[1],
3738 size)
3739 : warning_at (loc, opt,
3740 "%Kaccessing between %E and %E bytes "
3741 "in a region of size %E",
3742 exp, range[0], range[1],
3743 size));
3744 return warned;
3745 }
3746
3747 if (write)
3748 {
3749 if (tree_int_cst_equal (range[0], range[1]))
3750 warned = (func
3751 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3752 "%K%qD writing %E byte into a region "
3753 "of size %E overflows the destination",
3754 "%K%qD writing %E bytes into a region "
3755 "of size %E overflows the destination",
3756 exp, func, range[0], size)
3757 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3758 "%Kwriting %E byte into a region "
3759 "of size %E overflows the destination",
3760 "%Kwriting %E bytes into a region "
3761 "of size %E overflows the destination",
3762 exp, range[0], size));
3763 else if (tree_int_cst_sign_bit (range[1]))
3764 {
3765 /* Avoid printing the upper bound if it's invalid. */
3766 warned = (func
3767 ? warning_at (loc, opt,
3768 "%K%qD writing %E or more bytes into "
3769 "a region of size %E overflows "
3770 "the destination",
3771 exp, func, range[0], size)
3772 : warning_at (loc, opt,
3773 "%Kwriting %E or more bytes into "
3774 "a region of size %E overflows "
3775 "the destination",
3776 exp, range[0], size));
3777 }
3778 else
3779 warned = (func
3780 ? warning_at (loc, opt,
3781 "%K%qD writing between %E and %E bytes "
3782 "into a region of size %E overflows "
3783 "the destination",
3784 exp, func, range[0], range[1],
3785 size)
3786 : warning_at (loc, opt,
3787 "%Kwriting between %E and %E bytes "
3788 "into a region of size %E overflows "
3789 "the destination",
3790 exp, range[0], range[1],
3791 size));
3792 return warned;
3793 }
3794
3795 if (read)
3796 {
3797 if (tree_int_cst_equal (range[0], range[1]))
3798 warned = (func
3799 ? warning_n (loc, OPT_Wstringop_overread,
3800 tree_to_uhwi (range[0]),
3801 "%K%qD reading %E byte from a region of size %E",
3802 "%K%qD reading %E bytes from a region of size %E", exp, func, range[0], size)
3803 : warning_n (loc, OPT_Wstringop_overread,
3804 tree_to_uhwi (range[0]),
3805 "%Kreading %E byte from a region of size %E",
3806 "%Kreading %E bytes from a region of size %E",
3807 exp, range[0], size));
3808 else if (tree_int_cst_sign_bit (range[1]))
3809 {
3810 /* Avoid printing the upper bound if it's invalid. */
3811 warned = (func
3812 ? warning_at (loc, OPT_Wstringop_overread,
3813 "%K%qD reading %E or more bytes from "
3814 "a region of size %E",
3815 exp, func, range[0], size)
3816 : warning_at (loc, OPT_Wstringop_overread,
3817 "%Kreading %E or more bytes from a region "
3818 "of size %E",
3819 exp, range[0], size));
3820 }
3821 else
3822 warned = (func
3823 ? warning_at (loc, OPT_Wstringop_overread,
3824 "%K%qD reading between %E and %E bytes from "
3825 "a region of size %E",
3826 exp, func, range[0], range[1], size)
3827 : warning_at (loc, opt,
3828 "%K reading between %E and %E bytes from "
3829 "a region of size %E",
3830 exp, range[0], range[1], size));
3831
3832 if (warned)
3833 TREE_NO_WARNING (exp) = true;
3834
3835 return warned;
3836 }
3837
3838 if (tree_int_cst_equal (range[0], range[1])
3839 || tree_int_cst_sign_bit (range[1]))
3840 warned = (func
3841 ? warning_n (loc, OPT_Wstringop_overread,
3842 tree_to_uhwi (range[0]),
3843 "%K%qD epecting %E byte in a region of size %E",
3844 "%K%qD expecting %E bytes in a region of size %E",
3845 exp, func, range[0], size)
3846 : warning_n (loc, OPT_Wstringop_overread,
3847 tree_to_uhwi (range[0]),
3848 "%Kexpecting %E byte in a region of size %E",
3849 "%Kexpecting %E bytes in a region of size %E",
3850 exp, range[0], size));
3851 else if (tree_int_cst_sign_bit (range[1]))
3852 {
3853 /* Avoid printing the upper bound if it's invalid. */
3854 warned = (func
3855 ? warning_at (loc, OPT_Wstringop_overread,
3856 "%K%qD expecting %E or more bytes in a region "
3857 "of size %E",
3858 exp, func, range[0], size)
3859 : warning_at (loc, OPT_Wstringop_overread,
3860 "%Kexpecting %E or more bytes in a region "
3861 "of size %E",
3862 exp, range[0], size));
3863 }
3864 else
3865 warned = (func
3866 ? warning_at (loc, OPT_Wstringop_overread,
3867 "%K%qD expecting between %E and %E bytes in "
3868 "a region of size %E",
3869 exp, func, range[0], range[1], size)
3870 : warning_at (loc, OPT_Wstringop_overread,
3871 "%Kexpectting between %E and %E bytes in "
3872 "a region of size %E",
3873 exp, range[0], range[1], size));
3874
3875 if (warned)
3876 TREE_NO_WARNING (exp) = true;
3877
3878 return warned;
3879 }
3880
3881 /* Issue an inform message describing the target of an access REF.
3882 WRITE is set for a write access and clear for a read access. */
3883
3884 static void
3885 inform_access (const access_ref &ref, access_mode mode)
3886 {
3887 if (!ref.ref)
3888 return;
3889
3890 /* Convert offset range and avoid including a zero range since it
3891 isn't necessarily meaningful. */
3892 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
3893 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
3894 HOST_WIDE_INT minoff;
3895 HOST_WIDE_INT maxoff = diff_max;
3896 if (wi::fits_shwi_p (ref.offrng[0]))
3897 minoff = ref.offrng[0].to_shwi ();
3898 else
3899 minoff = ref.offrng[0] < 0 ? diff_min : diff_max;
3900
3901 if (wi::fits_shwi_p (ref.offrng[1]))
3902 maxoff = ref.offrng[1].to_shwi ();
3903
3904 if (maxoff <= diff_min || maxoff >= diff_max)
3905 /* Avoid mentioning an upper bound that's equal to or in excess
3906 of the maximum of ptrdiff_t. */
3907 maxoff = minoff;
3908
3909 /* Convert size range and always include it since all sizes are
3910 meaningful. */
3911 unsigned long long minsize = 0, maxsize = 0;
3912 if (wi::fits_shwi_p (ref.sizrng[0])
3913 && wi::fits_shwi_p (ref.sizrng[1]))
3914 {
3915 minsize = ref.sizrng[0].to_shwi ();
3916 maxsize = ref.sizrng[1].to_shwi ();
3917 }
3918
3919 char sizestr[80];
3920 location_t loc;
3921 tree allocfn = NULL_TREE;
3922 if (TREE_CODE (ref.ref) == SSA_NAME)
3923 {
3924 gimple *stmt = SSA_NAME_DEF_STMT (ref.ref);
3925 gcc_assert (is_gimple_call (stmt));
3926 loc = gimple_location (stmt);
3927 allocfn = gimple_call_fndecl (stmt);
3928 if (!allocfn)
3929 /* Handle calls through pointers to functions. */
3930 allocfn = gimple_call_fn (stmt);
3931
3932 /* SIZRNG doesn't necessarily have the same range as the allocation
3933 size determined by gimple_call_alloc_size (). */
3934
3935 if (minsize == maxsize)
3936 sprintf (sizestr, "%llu", minsize);
3937 else
3938 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
3939
3940 }
3941 else if (DECL_P (ref.ref))
3942 loc = DECL_SOURCE_LOCATION (ref.ref);
3943 else if (EXPR_P (ref.ref) && EXPR_HAS_LOCATION (ref.ref))
3944 loc = EXPR_LOCATION (ref.ref);
3945 else
3946 return;
3947
3948 if (mode == access_read_write || mode == access_write_only)
3949 {
3950 if (allocfn == NULL_TREE)
3951 {
3952 if (minoff == maxoff)
3953 {
3954 if (minoff == 0)
3955 inform (loc, "destination object %qE", ref.ref);
3956 else
3957 inform (loc, "at offset %wi into destination object %qE",
3958 minoff, ref.ref);
3959 }
3960 else
3961 inform (loc, "at offset [%wi, %wi] into destination object %qE",
3962 minoff, maxoff, ref.ref);
3963 return;
3964 }
3965
3966 if (minoff == maxoff)
3967 {
3968 if (minoff == 0)
3969 inform (loc, "destination object of size %s allocated by %qE",
3970 sizestr, allocfn);
3971 else
3972 inform (loc,
3973 "at offset %wi into destination object of size %s "
3974 "allocated by %qE", minoff, sizestr, allocfn);
3975 }
3976 else
3977 inform (loc,
3978 "at offset [%wi, %wi] into destination object of size %s "
3979 "allocated by %qE",
3980 minoff, maxoff, sizestr, allocfn);
3981
3982 return;
3983 }
3984
3985 if (DECL_P (ref.ref))
3986 {
3987 if (minoff == maxoff)
3988 {
3989 if (minoff == 0)
3990 inform (loc, "source object %qD", ref.ref);
3991 else
3992 inform (loc, "at offset %wi into source object %qD",
3993 minoff, ref.ref);
3994 }
3995 else
3996 inform (loc, "at offset [%wi, %wi] into source object %qD",
3997 minoff, maxoff, ref.ref);
3998 return;
3999 }
4000
4001 if (minoff == maxoff)
4002 {
4003 if (minoff == 0)
4004 inform (loc, "source object of size %s allocated by %qE",
4005 sizestr, allocfn);
4006 else
4007 inform (loc,
4008 "at offset %wi into source object of size %s "
4009 "allocated by %qE", minoff, sizestr, allocfn);
4010 }
4011 else
4012 inform (loc,
4013 "at offset [%wi, %wi] into source object of size %s "
4014 "allocated by %qE",
4015 minoff, maxoff, sizestr, allocfn);
4016 }
4017
4018 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4019 by BNDRNG if nonnull and valid. */
4020
4021 static void
4022 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4023 {
4024 if (bound)
4025 get_size_range (bound, range);
4026
4027 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
4028 return;
4029
4030 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
4031 {
4032 offset_int r[] =
4033 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
4034 if (r[0] < bndrng[0])
4035 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4036 if (bndrng[1] < r[1])
4037 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4038 }
4039 else
4040 {
4041 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4042 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4043 }
4044 }
4045
4046 /* Try to verify that the sizes and lengths of the arguments to a string
4047 manipulation function given by EXP are within valid bounds and that
4048 the operation does not lead to buffer overflow or read past the end.
4049 Arguments other than EXP may be null. When non-null, the arguments
4050 have the following meaning:
4051 DST is the destination of a copy call or NULL otherwise.
4052 SRC is the source of a copy call or NULL otherwise.
4053 DSTWRITE is the number of bytes written into the destination obtained
4054 from the user-supplied size argument to the function (such as in
4055 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4056 MAXREAD is the user-supplied bound on the length of the source sequence
4057 (such as in strncat(d, s, N). It specifies the upper limit on the number
4058 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4059 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4060 expression EXP is a string function call (as opposed to a memory call
4061 like memcpy). As an exception, SRCSTR can also be an integer denoting
4062 the precomputed size of the source string or object (for functions like
4063 memcpy).
4064 DSTSIZE is the size of the destination object.
4065
4066 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4067 SIZE_MAX.
4068
4069 WRITE is true for write accesses, READ is true for reads. Both are
4070 false for simple size checks in calls to functions that neither read
4071 from nor write to the region.
4072
4073 When nonnull, PAD points to a more detailed description of the access.
4074
4075 If the call is successfully verified as safe return true, otherwise
4076 return false. */
4077
4078 bool
4079 check_access (tree exp, tree dstwrite,
4080 tree maxread, tree srcstr, tree dstsize,
4081 access_mode mode, const access_data *pad /* = NULL */)
4082 {
4083 /* The size of the largest object is half the address space, or
4084 PTRDIFF_MAX. (This is way too permissive.) */
4085 tree maxobjsize = max_object_size ();
4086
4087 /* Either an approximate/minimum the length of the source string for
4088 string functions or the size of the source object for raw memory
4089 functions. */
4090 tree slen = NULL_TREE;
4091
4092 /* The range of the access in bytes; first set to the write access
4093 for functions that write and then read for those that also (or
4094 just) read. */
4095 tree range[2] = { NULL_TREE, NULL_TREE };
4096
4097 /* Set to true when the exact number of bytes written by a string
4098 function like strcpy is not known and the only thing that is
4099 known is that it must be at least one (for the terminating nul). */
4100 bool at_least_one = false;
4101 if (srcstr)
4102 {
4103 /* SRCSTR is normally a pointer to string but as a special case
4104 it can be an integer denoting the length of a string. */
4105 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
4106 {
4107 if (!check_nul_terminated_array (exp, srcstr, maxread))
4108 return false;
4109 /* Try to determine the range of lengths the source string
4110 refers to. If it can be determined and is less than
4111 the upper bound given by MAXREAD add one to it for
4112 the terminating nul. Otherwise, set it to one for
4113 the same reason, or to MAXREAD as appropriate. */
4114 c_strlen_data lendata = { };
4115 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4116 range[0] = lendata.minlen;
4117 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
4118 if (range[0]
4119 && TREE_CODE (range[0]) == INTEGER_CST
4120 && TREE_CODE (range[1]) == INTEGER_CST
4121 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
4122 {
4123 if (maxread && tree_int_cst_le (maxread, range[0]))
4124 range[0] = range[1] = maxread;
4125 else
4126 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
4127 range[0], size_one_node);
4128
4129 if (maxread && tree_int_cst_le (maxread, range[1]))
4130 range[1] = maxread;
4131 else if (!integer_all_onesp (range[1]))
4132 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
4133 range[1], size_one_node);
4134
4135 slen = range[0];
4136 }
4137 else
4138 {
4139 at_least_one = true;
4140 slen = size_one_node;
4141 }
4142 }
4143 else
4144 slen = srcstr;
4145 }
4146
4147 if (!dstwrite && !maxread)
4148 {
4149 /* When the only available piece of data is the object size
4150 there is nothing to do. */
4151 if (!slen)
4152 return true;
4153
4154 /* Otherwise, when the length of the source sequence is known
4155 (as with strlen), set DSTWRITE to it. */
4156 if (!range[0])
4157 dstwrite = slen;
4158 }
4159
4160 if (!dstsize)
4161 dstsize = maxobjsize;
4162
4163 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4164 if valid. */
4165 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
4166
4167 tree func = get_callee_fndecl (exp);
4168 /* Read vs write access by built-ins can be determined from the const
4169 qualifiers on the pointer argument. In the absence of attribute
4170 access, non-const qualified pointer arguments to user-defined
4171 functions are assumed to both read and write the objects. */
4172 const bool builtin = func ? fndecl_built_in_p (func) : false;
4173
4174 /* First check the number of bytes to be written against the maximum
4175 object size. */
4176 if (range[0]
4177 && TREE_CODE (range[0]) == INTEGER_CST
4178 && tree_int_cst_lt (maxobjsize, range[0]))
4179 {
4180 location_t loc = tree_nonartificial_location (exp);
4181 loc = expansion_point_location_if_in_system_header (loc);
4182
4183 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4184 NULL_TREE, pad);
4185 return false;
4186 }
4187
4188 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4189 constant, and in range of unsigned HOST_WIDE_INT. */
4190 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4191
4192 /* Next check the number of bytes to be written against the destination
4193 object size. */
4194 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4195 {
4196 if (range[0]
4197 && TREE_CODE (range[0]) == INTEGER_CST
4198 && ((tree_fits_uhwi_p (dstsize)
4199 && tree_int_cst_lt (dstsize, range[0]))
4200 || (dstwrite
4201 && tree_fits_uhwi_p (dstwrite)
4202 && tree_int_cst_lt (dstwrite, range[0]))))
4203 {
4204 if (TREE_NO_WARNING (exp)
4205 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
4206 return false;
4207
4208 location_t loc = tree_nonartificial_location (exp);
4209 loc = expansion_point_location_if_in_system_header (loc);
4210
4211 bool warned = false;
4212 if (dstwrite == slen && at_least_one)
4213 {
4214 /* This is a call to strcpy with a destination of 0 size
4215 and a source of unknown length. The call will write
4216 at least one byte past the end of the destination. */
4217 warned = (func
4218 ? warning_at (loc, OPT_Wstringop_overflow_,
4219 "%K%qD writing %E or more bytes into "
4220 "a region of size %E overflows "
4221 "the destination",
4222 exp, func, range[0], dstsize)
4223 : warning_at (loc, OPT_Wstringop_overflow_,
4224 "%Kwriting %E or more bytes into "
4225 "a region of size %E overflows "
4226 "the destination",
4227 exp, range[0], dstsize));
4228 }
4229 else
4230 {
4231 const bool read
4232 = mode == access_read_only || mode == access_read_write;
4233 const bool write
4234 = mode == access_write_only || mode == access_read_write;
4235 warned = warn_for_access (loc, func, exp,
4236 OPT_Wstringop_overflow_,
4237 range, dstsize,
4238 write, read && !builtin);
4239 }
4240
4241 if (warned)
4242 {
4243 TREE_NO_WARNING (exp) = true;
4244 if (pad)
4245 inform_access (pad->dst, pad->mode);
4246 }
4247
4248 /* Return error when an overflow has been detected. */
4249 return false;
4250 }
4251 }
4252
4253 /* Check the maximum length of the source sequence against the size
4254 of the destination object if known, or against the maximum size
4255 of an object. */
4256 if (maxread)
4257 {
4258 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4259 PAD is nonnull and BNDRNG is valid. */
4260 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4261
4262 location_t loc = tree_nonartificial_location (exp);
4263 loc = expansion_point_location_if_in_system_header (loc);
4264
4265 tree size = dstsize;
4266 if (pad && pad->mode == access_read_only)
4267 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4268
4269 if (range[0] && maxread && tree_fits_uhwi_p (size))
4270 {
4271 if (tree_int_cst_lt (maxobjsize, range[0]))
4272 {
4273 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4274 range, size, pad);
4275 return false;
4276 }
4277
4278 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4279 {
4280 int opt = (dstwrite || mode != access_read_only
4281 ? OPT_Wstringop_overflow_
4282 : OPT_Wstringop_overread);
4283 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4284 return false;
4285 }
4286 }
4287
4288 maybe_warn_nonstring_arg (func, exp);
4289 }
4290
4291 /* Check for reading past the end of SRC. */
4292 bool overread = (slen
4293 && slen == srcstr
4294 && dstwrite
4295 && range[0]
4296 && TREE_CODE (slen) == INTEGER_CST
4297 && tree_int_cst_lt (slen, range[0]));
4298 /* If none is determined try to get a better answer based on the details
4299 in PAD. */
4300 if (!overread
4301 && pad
4302 && pad->src.sizrng[1] >= 0
4303 && pad->src.offrng[0] >= 0
4304 && (pad->src.offrng[1] < 0
4305 || pad->src.offrng[0] <= pad->src.offrng[1]))
4306 {
4307 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4308 PAD is nonnull and BNDRNG is valid. */
4309 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4310 /* Set OVERREAD for reads starting just past the end of an object. */
4311 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4312 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4313 slen = size_zero_node;
4314 }
4315
4316 if (overread)
4317 {
4318 if (TREE_NO_WARNING (exp)
4319 || (srcstr && TREE_NO_WARNING (srcstr))
4320 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
4321 return false;
4322
4323 location_t loc = tree_nonartificial_location (exp);
4324 loc = expansion_point_location_if_in_system_header (loc);
4325
4326 const bool read
4327 = mode == access_read_only || mode == access_read_write;
4328 if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
4329 slen, false, read))
4330 {
4331 TREE_NO_WARNING (exp) = true;
4332 if (pad)
4333 inform_access (pad->src, access_read_only);
4334 }
4335 return false;
4336 }
4337
4338 return true;
4339 }
4340
4341 /* A convenience wrapper for check_access above to check access
4342 by a read-only function like puts. */
4343
4344 static bool
4345 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4346 int ost /* = 1 */)
4347 {
4348 if (!warn_stringop_overread)
4349 return true;
4350
4351 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4352 compute_objsize (src, ost, &data.src);
4353 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4354 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4355 &data);
4356 }
4357
4358 /* If STMT is a call to an allocation function, returns the constant
4359 maximum size of the object allocated by the call represented as
4360 sizetype. If nonnull, sets RNG1[] to the range of the size.
4361 When nonnull, uses RVALS for range information, otherwise calls
4362 get_range_info to get it.
4363 Returns null when STMT is not a call to a valid allocation function. */
4364
4365 tree
4366 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4367 range_query * /* = NULL */)
4368 {
4369 if (!stmt)
4370 return NULL_TREE;
4371
4372 tree allocfntype;
4373 if (tree fndecl = gimple_call_fndecl (stmt))
4374 allocfntype = TREE_TYPE (fndecl);
4375 else
4376 allocfntype = gimple_call_fntype (stmt);
4377
4378 if (!allocfntype)
4379 return NULL_TREE;
4380
4381 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4382 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4383 if (!at)
4384 {
4385 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4386 return NULL_TREE;
4387
4388 argidx1 = 0;
4389 }
4390
4391 unsigned nargs = gimple_call_num_args (stmt);
4392
4393 if (argidx1 == UINT_MAX)
4394 {
4395 tree atval = TREE_VALUE (at);
4396 if (!atval)
4397 return NULL_TREE;
4398
4399 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4400 if (nargs <= argidx1)
4401 return NULL_TREE;
4402
4403 atval = TREE_CHAIN (atval);
4404 if (atval)
4405 {
4406 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4407 if (nargs <= argidx2)
4408 return NULL_TREE;
4409 }
4410 }
4411
4412 tree size = gimple_call_arg (stmt, argidx1);
4413
4414 wide_int rng1_buf[2];
4415 /* If RNG1 is not set, use the buffer. */
4416 if (!rng1)
4417 rng1 = rng1_buf;
4418
4419 /* Use maximum precision to avoid overflow below. */
4420 const int prec = ADDR_MAX_PRECISION;
4421
4422 {
4423 tree r[2];
4424 /* Determine the largest valid range size, including zero. */
4425 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4426 return NULL_TREE;
4427 rng1[0] = wi::to_wide (r[0], prec);
4428 rng1[1] = wi::to_wide (r[1], prec);
4429 }
4430
4431 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
4432 return fold_convert (sizetype, size);
4433
4434 /* To handle ranges do the math in wide_int and return the product
4435 of the upper bounds as a constant. Ignore anti-ranges. */
4436 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
4437 wide_int rng2[2];
4438 {
4439 tree r[2];
4440 /* As above, use the full non-negative range on failure. */
4441 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4442 return NULL_TREE;
4443 rng2[0] = wi::to_wide (r[0], prec);
4444 rng2[1] = wi::to_wide (r[1], prec);
4445 }
4446
4447 /* Compute products of both bounds for the caller but return the lesser
4448 of SIZE_MAX and the product of the upper bounds as a constant. */
4449 rng1[0] = rng1[0] * rng2[0];
4450 rng1[1] = rng1[1] * rng2[1];
4451
4452 const tree size_max = TYPE_MAX_VALUE (sizetype);
4453 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
4454 {
4455 rng1[1] = wi::to_wide (size_max, prec);
4456 return size_max;
4457 }
4458
4459 return wide_int_to_tree (sizetype, rng1[1]);
4460 }
4461
4462 /* For an access to an object referenced to by the function parameter PTR
4463 of pointer type, and set RNG[] to the range of sizes of the object
4464 obtainedfrom the attribute access specification for the current function.
4465 Return the function parameter on success and null otherwise. */
4466
4467 tree
4468 gimple_parm_array_size (tree ptr, wide_int rng[2],
4469 range_query * /* = NULL */)
4470 {
4471 /* For a function argument try to determine the byte size of the array
4472 from the current function declaratation (e.g., attribute access or
4473 related). */
4474 tree var = SSA_NAME_VAR (ptr);
4475 if (TREE_CODE (var) != PARM_DECL)
4476 return NULL_TREE;
4477
4478 const unsigned prec = TYPE_PRECISION (sizetype);
4479
4480 rdwr_map rdwr_idx;
4481 attr_access *access = get_parm_access (rdwr_idx, var);
4482 if (!access)
4483 return NULL_TREE;
4484
4485 if (access->sizarg != UINT_MAX)
4486 {
4487 /* TODO: Try to extract the range from the argument based on
4488 those of subsequent assertions or based on known calls to
4489 the current function. */
4490 return NULL_TREE;
4491 }
4492
4493 if (!access->minsize)
4494 return NULL_TREE;
4495
4496 /* Only consider ordinary array bound at level 2 (or above if it's
4497 ever added). */
4498 if (warn_array_parameter < 2 && !access->static_p)
4499 return NULL_TREE;
4500
4501 rng[0] = wi::zero (prec);
4502 rng[1] = wi::uhwi (access->minsize, prec);
4503 /* Multiply the array bound encoded in the attribute by the size
4504 of what the pointer argument to which it decays points to. */
4505 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
4506 tree size = TYPE_SIZE_UNIT (eltype);
4507 if (!size || TREE_CODE (size) != INTEGER_CST)
4508 return NULL_TREE;
4509
4510 rng[1] *= wi::to_wide (size, prec);
4511 return var;
4512 }
4513
4514 /* Wrapper around the wide_int overload of get_range that accepts
4515 offset_int instead. For middle end expressions returns the same
4516 result. For a subset of nonconstamt expressions emitted by the front
4517 end determines a more precise range than would be possible otherwise. */
4518
4519 static bool
4520 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
4521 {
4522 offset_int add = 0;
4523 if (TREE_CODE (x) == PLUS_EXPR)
4524 {
4525 /* Handle constant offsets in pointer addition expressions seen
4526 n the front end IL. */
4527 tree op = TREE_OPERAND (x, 1);
4528 if (TREE_CODE (op) == INTEGER_CST)
4529 {
4530 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
4531 add = wi::to_offset (op);
4532 x = TREE_OPERAND (x, 0);
4533 }
4534 }
4535
4536 if (TREE_CODE (x) == NOP_EXPR)
4537 /* Also handle conversions to sizetype seen in the front end IL. */
4538 x = TREE_OPERAND (x, 0);
4539
4540 tree type = TREE_TYPE (x);
4541
4542 if (TREE_CODE (x) != INTEGER_CST
4543 && TREE_CODE (x) != SSA_NAME)
4544 {
4545 if (TYPE_UNSIGNED (type)
4546 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
4547 type = signed_type_for (type);
4548
4549 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
4550 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
4551 return x;
4552 }
4553
4554 wide_int wr[2];
4555 if (!get_range (x, stmt, wr, rvals))
4556 return false;
4557
4558 signop sgn = SIGNED;
4559 /* Only convert signed integers or unsigned sizetype to a signed
4560 offset and avoid converting large positive values in narrower
4561 types to negative offsets. */
4562 if (TYPE_UNSIGNED (type)
4563 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
4564 sgn = UNSIGNED;
4565
4566 r[0] = offset_int::from (wr[0], sgn);
4567 r[1] = offset_int::from (wr[1], sgn);
4568 return true;
4569 }
4570
4571 /* Return the argument that the call STMT to a built-in function returns
4572 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
4573 from the argument reflected in the value returned by the built-in if it
4574 can be determined, otherwise to 0 and HWI_M1U respectively. */
4575
4576 static tree
4577 gimple_call_return_array (gimple *stmt, offset_int offrng[2],
4578 range_query *rvals)
4579 {
4580 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4581 || gimple_call_num_args (stmt) < 1)
4582 return NULL_TREE;
4583
4584 tree fn = gimple_call_fndecl (stmt);
4585 switch (DECL_FUNCTION_CODE (fn))
4586 {
4587 case BUILT_IN_MEMCPY:
4588 case BUILT_IN_MEMCPY_CHK:
4589 case BUILT_IN_MEMMOVE:
4590 case BUILT_IN_MEMMOVE_CHK:
4591 case BUILT_IN_MEMSET:
4592 case BUILT_IN_STPCPY:
4593 case BUILT_IN_STPCPY_CHK:
4594 case BUILT_IN_STPNCPY:
4595 case BUILT_IN_STPNCPY_CHK:
4596 case BUILT_IN_STRCAT:
4597 case BUILT_IN_STRCAT_CHK:
4598 case BUILT_IN_STRCPY:
4599 case BUILT_IN_STRCPY_CHK:
4600 case BUILT_IN_STRNCAT:
4601 case BUILT_IN_STRNCAT_CHK:
4602 case BUILT_IN_STRNCPY:
4603 case BUILT_IN_STRNCPY_CHK:
4604 offrng[0] = offrng[1] = 0;
4605 return gimple_call_arg (stmt, 0);
4606
4607 case BUILT_IN_MEMPCPY:
4608 case BUILT_IN_MEMPCPY_CHK:
4609 {
4610 tree off = gimple_call_arg (stmt, 2);
4611 if (!get_offset_range (off, stmt, offrng, rvals))
4612 {
4613 offrng[0] = 0;
4614 offrng[1] = HOST_WIDE_INT_M1U;
4615 }
4616 return gimple_call_arg (stmt, 0);
4617 }
4618
4619 case BUILT_IN_MEMCHR:
4620 {
4621 tree off = gimple_call_arg (stmt, 2);
4622 if (get_offset_range (off, stmt, offrng, rvals))
4623 offrng[0] = 0;
4624 else
4625 {
4626 offrng[0] = 0;
4627 offrng[1] = HOST_WIDE_INT_M1U;
4628 }
4629 return gimple_call_arg (stmt, 0);
4630 }
4631
4632 case BUILT_IN_STRCHR:
4633 case BUILT_IN_STRRCHR:
4634 case BUILT_IN_STRSTR:
4635 {
4636 offrng[0] = 0;
4637 offrng[1] = HOST_WIDE_INT_M1U;
4638 }
4639 return gimple_call_arg (stmt, 0);
4640
4641 default:
4642 break;
4643 }
4644
4645 return NULL_TREE;
4646 }
4647
4648 /* Helper to compute the size of the object referenced by the PTR
4649 expression which must have pointer type, using Object Size type
4650 OSTYPE (only the least significant 2 bits are used).
4651 On success, sets PREF->REF to the DECL of the referenced object
4652 if it's unique, otherwise to null, PREF->OFFRNG to the range of
4653 offsets into it, and PREF->SIZRNG to the range of sizes of
4654 the object(s).
4655 VISITED is used to avoid visiting the same PHI operand multiple
4656 times, and, when nonnull, RVALS to determine range information.
4657 Returns true on success, false when a meaningful size (or range)
4658 cannot be determined.
4659
4660 The function is intended for diagnostics and should not be used
4661 to influence code generation or optimization. */
4662
4663 static bool
4664 compute_objsize (tree ptr, int ostype, access_ref *pref, bitmap *visited,
4665 range_query *rvals)
4666 {
4667 STRIP_NOPS (ptr);
4668
4669 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
4670 if (addr)
4671 ptr = TREE_OPERAND (ptr, 0);
4672
4673 if (DECL_P (ptr))
4674 {
4675 pref->ref = ptr;
4676
4677 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
4678 {
4679 /* Set the maximum size if the reference is to the pointer
4680 itself (as opposed to what it points to). */
4681 pref->set_max_size_range ();
4682 return true;
4683 }
4684
4685 if (tree size = decl_init_size (ptr, false))
4686 if (TREE_CODE (size) == INTEGER_CST)
4687 {
4688 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4689 return true;
4690 }
4691
4692 pref->set_max_size_range ();
4693 return true;
4694 }
4695
4696 const tree_code code = TREE_CODE (ptr);
4697
4698 if (code == BIT_FIELD_REF)
4699 {
4700 tree ref = TREE_OPERAND (ptr, 0);
4701 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4702 return false;
4703
4704 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
4705 pref->add_offset (off / BITS_PER_UNIT);
4706 return true;
4707 }
4708
4709 if (code == COMPONENT_REF)
4710 {
4711 tree ref = TREE_OPERAND (ptr, 0);
4712 tree field = TREE_OPERAND (ptr, 1);
4713
4714 if (ostype == 0)
4715 {
4716 /* In OSTYPE zero (for raw memory functions like memcpy), use
4717 the maximum size instead if the identity of the enclosing
4718 object cannot be determined. */
4719 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4720 return false;
4721
4722 /* Otherwise, use the size of the enclosing object and add
4723 the offset of the member to the offset computed so far. */
4724 tree offset = byte_position (field);
4725 if (TREE_CODE (offset) == INTEGER_CST)
4726 pref->add_offset (wi::to_offset (offset));
4727 else
4728 pref->add_max_offset ();
4729 return true;
4730 }
4731
4732 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
4733 {
4734 /* Set maximum size if the reference is to the pointer member
4735 itself (as opposed to what it points to). */
4736 pref->set_max_size_range ();
4737 return true;
4738 }
4739
4740 pref->ref = field;
4741
4742 /* SAM is set for array members that might need special treatment. */
4743 special_array_member sam;
4744 tree size = component_ref_size (ptr, &sam);
4745 if (sam == special_array_member::int_0)
4746 pref->sizrng[0] = pref->sizrng[1] = 0;
4747 else if (!pref->trail1special && sam == special_array_member::trail_1)
4748 pref->sizrng[0] = pref->sizrng[1] = 1;
4749 else if (size && TREE_CODE (size) == INTEGER_CST)
4750 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
4751 else
4752 {
4753 /* When the size of the member is unknown it's either a flexible
4754 array member or a trailing special array member (either zero
4755 length or one-element). Set the size to the maximum minus
4756 the constant size of the type. */
4757 pref->sizrng[0] = 0;
4758 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
4759 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
4760 if (TREE_CODE (recsize) == INTEGER_CST)
4761 pref->sizrng[1] -= wi::to_offset (recsize);
4762 }
4763 return true;
4764 }
4765
4766 if (code == ARRAY_REF || code == MEM_REF)
4767 {
4768 tree ref = TREE_OPERAND (ptr, 0);
4769 tree reftype = TREE_TYPE (ref);
4770 if (code == ARRAY_REF
4771 && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
4772 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
4773 of known bound. */
4774 return false;
4775
4776 if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
4777 {
4778 /* Give up for MEM_REFs of vector types; those may be synthesized
4779 from multiple assignments to consecutive data members. See PR
4780 93200.
4781 FIXME: Deal with this more generally, e.g., by marking up such
4782 MEM_REFs at the time they're created. */
4783 reftype = TREE_TYPE (reftype);
4784 if (TREE_CODE (reftype) == VECTOR_TYPE)
4785 return false;
4786 }
4787
4788 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4789 return false;
4790
4791 offset_int orng[2];
4792 tree off = pref->eval (TREE_OPERAND (ptr, 1));
4793 if (!get_offset_range (off, NULL, orng, rvals))
4794 {
4795 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
4796 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
4797 orng[0] = -orng[1] - 1;
4798 }
4799
4800 if (TREE_CODE (ptr) == ARRAY_REF)
4801 {
4802 /* Convert the array index range determined above to a byte
4803 offset. */
4804 tree lowbnd = array_ref_low_bound (ptr);
4805 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4806 {
4807 /* Adjust the index by the low bound of the array domain
4808 (normally zero but 1 in Fortran). */
4809 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4810 orng[0] -= lb;
4811 orng[1] -= lb;
4812 }
4813
4814 tree eltype = TREE_TYPE (ptr);
4815 tree tpsize = TYPE_SIZE_UNIT (eltype);
4816 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
4817 {
4818 pref->add_max_offset ();
4819 return true;
4820 }
4821
4822 offset_int sz = wi::to_offset (tpsize);
4823 orng[0] *= sz;
4824 orng[1] *= sz;
4825
4826 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
4827 {
4828 /* Except for the permissive raw memory functions which use
4829 the size of the whole object determined above, use the size
4830 of the referenced array. Because the overall offset is from
4831 the beginning of the complete array object add this overall
4832 offset to the size of array. */
4833 offset_int sizrng[2] =
4834 {
4835 pref->offrng[0] + orng[0] + sz,
4836 pref->offrng[1] + orng[1] + sz
4837 };
4838 if (sizrng[1] < sizrng[0])
4839 std::swap (sizrng[0], sizrng[1]);
4840 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
4841 pref->sizrng[0] = sizrng[0];
4842 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
4843 pref->sizrng[1] = sizrng[1];
4844 }
4845 }
4846
4847 pref->add_offset (orng[0], orng[1]);
4848 return true;
4849 }
4850
4851 if (code == TARGET_MEM_REF)
4852 {
4853 tree ref = TREE_OPERAND (ptr, 0);
4854 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4855 return false;
4856
4857 /* TODO: Handle remaining operands. Until then, add maximum offset. */
4858 pref->ref = ptr;
4859 pref->add_max_offset ();
4860 return true;
4861 }
4862
4863 if (code == INTEGER_CST)
4864 {
4865 /* Pointer constants other than null are most likely the result
4866 of erroneous null pointer addition/subtraction. Set size to
4867 zero. For null pointers, set size to the maximum for now
4868 since those may be the result of jump threading. */
4869 if (integer_zerop (ptr))
4870 pref->set_max_size_range ();
4871 else
4872 pref->sizrng[0] = pref->sizrng[1] = 0;
4873 pref->ref = ptr;
4874
4875 return true;
4876 }
4877
4878 if (code == STRING_CST)
4879 {
4880 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
4881 return true;
4882 }
4883
4884 if (code == POINTER_PLUS_EXPR)
4885 {
4886 tree ref = TREE_OPERAND (ptr, 0);
4887 if (!compute_objsize (ref, ostype, pref, visited, rvals))
4888 return false;
4889
4890 offset_int orng[2];
4891 tree off = pref->eval (TREE_OPERAND (ptr, 1));
4892 if (get_offset_range (off, NULL, orng, rvals))
4893 pref->add_offset (orng[0], orng[1]);
4894 else
4895 pref->add_max_offset ();
4896 return true;
4897 }
4898
4899 if (code == VIEW_CONVERT_EXPR)
4900 {
4901 ptr = TREE_OPERAND (ptr, 0);
4902 return compute_objsize (ptr, ostype, pref, visited, rvals);
4903 }
4904
4905 if (TREE_CODE (ptr) == SSA_NAME)
4906 {
4907 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
4908 if (is_gimple_call (stmt))
4909 {
4910 /* If STMT is a call to an allocation function get the size
4911 from its argument(s). If successful, also set *PREF->REF
4912 to PTR for the caller to include in diagnostics. */
4913 wide_int wr[2];
4914 if (gimple_call_alloc_size (stmt, wr, rvals))
4915 {
4916 pref->ref = ptr;
4917 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
4918 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
4919 /* Constrain both bounds to a valid size. */
4920 offset_int maxsize = wi::to_offset (max_object_size ());
4921 if (pref->sizrng[0] > maxsize)
4922 pref->sizrng[0] = maxsize;
4923 if (pref->sizrng[1] > maxsize)
4924 pref->sizrng[1] = maxsize;
4925 }
4926 else
4927 {
4928 /* For functions known to return one of their pointer arguments
4929 try to determine what the returned pointer points to, and on
4930 success add OFFRNG which was set to the offset added by
4931 the function (e.g., memchr) to the overall offset. */
4932 offset_int offrng[2];
4933 if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
4934 {
4935 if (!compute_objsize (ret, ostype, pref, visited, rvals))
4936 return false;
4937
4938 /* Cap OFFRNG[1] to at most the remaining size of
4939 the object. */
4940 offset_int remrng[2];
4941 remrng[1] = pref->size_remaining (remrng);
4942 if (remrng[1] < offrng[1])
4943 offrng[1] = remrng[1];
4944 pref->add_offset (offrng[0], offrng[1]);
4945 }
4946 else
4947 {
4948 /* For other calls that might return arbitrary pointers
4949 including into the middle of objects set the size
4950 range to maximum, clear PREF->BASE0, and also set
4951 PREF->REF to include in diagnostics. */
4952 pref->set_max_size_range ();
4953 pref->base0 = false;
4954 pref->ref = ptr;
4955 }
4956 }
4957 return true;
4958 }
4959
4960 if (gimple_nop_p (stmt))
4961 {
4962 /* For a function argument try to determine the byte size
4963 of the array from the current function declaratation
4964 (e.g., attribute access or related). */
4965 wide_int wr[2];
4966 if (tree ref = gimple_parm_array_size (ptr, wr, rvals))
4967 {
4968 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
4969 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
4970 pref->ref = ref;
4971 return true;
4972 }
4973
4974 pref->set_max_size_range ();
4975 pref->base0 = false;
4976 pref->ref = ptr;
4977 if (tree var = SSA_NAME_VAR (ptr))
4978 if (TREE_CODE (var) == PARM_DECL)
4979 pref->ref = var;
4980
4981 return true;
4982 }
4983
4984 /* TODO: Handle PHI. */
4985
4986 if (!is_gimple_assign (stmt))
4987 {
4988 /* Clear BASE0 since the assigned pointer might point into
4989 the middle of the object, set the maximum size range and,
4990 if the SSA_NAME refers to a function argumnent, set
4991 PREF->REF to it. */
4992 pref->base0 = false;
4993 pref->set_max_size_range ();
4994 if (tree var = SSA_NAME_VAR (ptr))
4995 if (TREE_CODE (var) == PARM_DECL)
4996 pref->ref = var;
4997 return true;
4998 }
4999
5000 ptr = gimple_assign_rhs1 (stmt);
5001
5002 tree_code code = gimple_assign_rhs_code (stmt);
5003
5004 if (code == POINTER_PLUS_EXPR
5005 && TREE_CODE (TREE_TYPE (ptr)) == POINTER_TYPE)
5006 {
5007 /* Compute the size of the object first. */
5008 if (!compute_objsize (ptr, ostype, pref, visited, rvals))
5009 return false;
5010
5011 offset_int orng[2];
5012 tree off = gimple_assign_rhs2 (stmt);
5013 if (get_offset_range (off, stmt, orng, rvals))
5014 pref->add_offset (orng[0], orng[1]);
5015 else
5016 pref->add_max_offset ();
5017 return true;
5018 }
5019
5020 if (code == ADDR_EXPR)
5021 return compute_objsize (ptr, ostype, pref, visited, rvals);
5022
5023 /* This could be an assignment from a nonlocal pointer. Save PTR
5024 to mention in diagnostics but otherwise treat it as a pointer
5025 to an unknown object. */
5026 pref->ref = ptr;
5027 }
5028
5029 /* Assume all other expressions point into an unknown object
5030 of the maximum valid size. */
5031 pref->base0 = false;
5032 pref->set_max_size_range ();
5033 return true;
5034 }
5035
5036 /* A "public" wrapper around the above. Clients should use this overload
5037 instead. */
5038
5039 tree
5040 compute_objsize (tree ptr, int ostype, access_ref *pref,
5041 range_query *rvals /* = NULL */)
5042 {
5043 bitmap visited = NULL;
5044
5045 bool success
5046 = compute_objsize (ptr, ostype, pref, &visited, rvals);
5047
5048 if (visited)
5049 BITMAP_FREE (visited);
5050
5051 if (!success)
5052 return NULL_TREE;
5053
5054 offset_int maxsize = pref->size_remaining ();
5055 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5056 pref->offrng[0] = 0;
5057 return wide_int_to_tree (sizetype, maxsize);
5058 }
5059
5060 /* Transitional wrapper around the above. The function should be removed
5061 once callers transition to one of the two above. */
5062
5063 tree
5064 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
5065 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
5066 {
5067 /* Set the initial offsets to zero and size to negative to indicate
5068 none has been computed yet. */
5069 access_ref ref;
5070 tree size = compute_objsize (ptr, ostype, &ref, rvals);
5071 if (!size || !ref.base0)
5072 return NULL_TREE;
5073
5074 if (pdecl)
5075 *pdecl = ref.ref;
5076
5077 if (poff)
5078 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
5079
5080 return size;
5081 }
5082
5083 /* Helper to determine and check the sizes of the source and the destination
5084 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
5085 call expression, DEST is the destination argument, SRC is the source
5086 argument or null, and LEN is the number of bytes. Use Object Size type-0
5087 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5088 (no overflow or invalid sizes), false otherwise. */
5089
5090 static bool
5091 check_memop_access (tree exp, tree dest, tree src, tree size)
5092 {
5093 /* For functions like memset and memcpy that operate on raw memory
5094 try to determine the size of the largest source and destination
5095 object using type-0 Object Size regardless of the object size
5096 type specified by the option. */
5097 access_data data (exp, access_read_write);
5098 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
5099 tree dstsize = compute_objsize (dest, 0, &data.dst);
5100
5101 return check_access (exp, size, /*maxread=*/NULL_TREE,
5102 srcsize, dstsize, data.mode, &data);
5103 }
5104
5105 /* Validate memchr arguments without performing any expansion.
5106 Return NULL_RTX. */
5107
5108 static rtx
5109 expand_builtin_memchr (tree exp, rtx)
5110 {
5111 if (!validate_arglist (exp,
5112 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5113 return NULL_RTX;
5114
5115 tree arg1 = CALL_EXPR_ARG (exp, 0);
5116 tree len = CALL_EXPR_ARG (exp, 2);
5117
5118 check_read_access (exp, arg1, len, 0);
5119
5120 return NULL_RTX;
5121 }
5122
5123 /* Expand a call EXP to the memcpy builtin.
5124 Return NULL_RTX if we failed, the caller should emit a normal call,
5125 otherwise try to get the result in TARGET, if convenient (and in
5126 mode MODE if that's convenient). */
5127
5128 static rtx
5129 expand_builtin_memcpy (tree exp, rtx target)
5130 {
5131 if (!validate_arglist (exp,
5132 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5133 return NULL_RTX;
5134
5135 tree dest = CALL_EXPR_ARG (exp, 0);
5136 tree src = CALL_EXPR_ARG (exp, 1);
5137 tree len = CALL_EXPR_ARG (exp, 2);
5138
5139 check_memop_access (exp, dest, src, len);
5140
5141 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5142 /*retmode=*/ RETURN_BEGIN, false);
5143 }
5144
5145 /* Check a call EXP to the memmove built-in for validity.
5146 Return NULL_RTX on both success and failure. */
5147
5148 static rtx
5149 expand_builtin_memmove (tree exp, rtx target)
5150 {
5151 if (!validate_arglist (exp,
5152 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5153 return NULL_RTX;
5154
5155 tree dest = CALL_EXPR_ARG (exp, 0);
5156 tree src = CALL_EXPR_ARG (exp, 1);
5157 tree len = CALL_EXPR_ARG (exp, 2);
5158
5159 check_memop_access (exp, dest, src, len);
5160
5161 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5162 /*retmode=*/ RETURN_BEGIN, true);
5163 }
5164
5165 /* Expand a call EXP to the mempcpy builtin.
5166 Return NULL_RTX if we failed; the caller should emit a normal call,
5167 otherwise try to get the result in TARGET, if convenient (and in
5168 mode MODE if that's convenient). */
5169
5170 static rtx
5171 expand_builtin_mempcpy (tree exp, rtx target)
5172 {
5173 if (!validate_arglist (exp,
5174 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5175 return NULL_RTX;
5176
5177 tree dest = CALL_EXPR_ARG (exp, 0);
5178 tree src = CALL_EXPR_ARG (exp, 1);
5179 tree len = CALL_EXPR_ARG (exp, 2);
5180
5181 /* Policy does not generally allow using compute_objsize (which
5182 is used internally by check_memop_size) to change code generation
5183 or drive optimization decisions.
5184
5185 In this instance it is safe because the code we generate has
5186 the same semantics regardless of the return value of
5187 check_memop_sizes. Exactly the same amount of data is copied
5188 and the return value is exactly the same in both cases.
5189
5190 Furthermore, check_memop_size always uses mode 0 for the call to
5191 compute_objsize, so the imprecise nature of compute_objsize is
5192 avoided. */
5193
5194 /* Avoid expanding mempcpy into memcpy when the call is determined
5195 to overflow the buffer. This also prevents the same overflow
5196 from being diagnosed again when expanding memcpy. */
5197 if (!check_memop_access (exp, dest, src, len))
5198 return NULL_RTX;
5199
5200 return expand_builtin_mempcpy_args (dest, src, len,
5201 target, exp, /*retmode=*/ RETURN_END);
5202 }
5203
5204 /* Helper function to do the actual work for expand of memory copy family
5205 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
5206 of memory from SRC to DEST and assign to TARGET if convenient. Return
5207 value is based on RETMODE argument. */
5208
5209 static rtx
5210 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
5211 rtx target, tree exp, memop_ret retmode,
5212 bool might_overlap)
5213 {
5214 unsigned int src_align = get_pointer_alignment (src);
5215 unsigned int dest_align = get_pointer_alignment (dest);
5216 rtx dest_mem, src_mem, dest_addr, len_rtx;
5217 HOST_WIDE_INT expected_size = -1;
5218 unsigned int expected_align = 0;
5219 unsigned HOST_WIDE_INT min_size;
5220 unsigned HOST_WIDE_INT max_size;
5221 unsigned HOST_WIDE_INT probable_max_size;
5222
5223 bool is_move_done;
5224
5225 /* If DEST is not a pointer type, call the normal function. */
5226 if (dest_align == 0)
5227 return NULL_RTX;
5228
5229 /* If either SRC is not a pointer type, don't do this
5230 operation in-line. */
5231 if (src_align == 0)
5232 return NULL_RTX;
5233
5234 if (currently_expanding_gimple_stmt)
5235 stringop_block_profile (currently_expanding_gimple_stmt,
5236 &expected_align, &expected_size);
5237
5238 if (expected_align < dest_align)
5239 expected_align = dest_align;
5240 dest_mem = get_memory_rtx (dest, len);
5241 set_mem_align (dest_mem, dest_align);
5242 len_rtx = expand_normal (len);
5243 determine_block_size (len, len_rtx, &min_size, &max_size,
5244 &probable_max_size);
5245
5246 /* Try to get the byte representation of the constant SRC points to,
5247 with its byte size in NBYTES. */
5248 unsigned HOST_WIDE_INT nbytes;
5249 const char *rep = getbyterep (src, &nbytes);
5250
5251 /* If the function's constant bound LEN_RTX is less than or equal
5252 to the byte size of the representation of the constant argument,
5253 and if block move would be done by pieces, we can avoid loading
5254 the bytes from memory and only store the computed constant.
5255 This works in the overlap (memmove) case as well because
5256 store_by_pieces just generates a series of stores of constants
5257 from the representation returned by getbyterep(). */
5258 if (rep
5259 && CONST_INT_P (len_rtx)
5260 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
5261 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
5262 CONST_CAST (char *, rep),
5263 dest_align, false))
5264 {
5265 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
5266 builtin_memcpy_read_str,
5267 CONST_CAST (char *, rep),
5268 dest_align, false, retmode);
5269 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5270 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5271 return dest_mem;
5272 }
5273
5274 src_mem = get_memory_rtx (src, len);
5275 set_mem_align (src_mem, src_align);
5276
5277 /* Copy word part most expediently. */
5278 enum block_op_methods method = BLOCK_OP_NORMAL;
5279 if (CALL_EXPR_TAILCALL (exp)
5280 && (retmode == RETURN_BEGIN || target == const0_rtx))
5281 method = BLOCK_OP_TAILCALL;
5282 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
5283 && retmode == RETURN_END
5284 && !might_overlap
5285 && target != const0_rtx);
5286 if (use_mempcpy_call)
5287 method = BLOCK_OP_NO_LIBCALL_RET;
5288 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
5289 expected_align, expected_size,
5290 min_size, max_size, probable_max_size,
5291 use_mempcpy_call, &is_move_done,
5292 might_overlap);
5293
5294 /* Bail out when a mempcpy call would be expanded as libcall and when
5295 we have a target that provides a fast implementation
5296 of mempcpy routine. */
5297 if (!is_move_done)
5298 return NULL_RTX;
5299
5300 if (dest_addr == pc_rtx)
5301 return NULL_RTX;
5302
5303 if (dest_addr == 0)
5304 {
5305 dest_addr = force_operand (XEXP (dest_mem, 0), target);
5306 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5307 }
5308
5309 if (retmode != RETURN_BEGIN && target != const0_rtx)
5310 {
5311 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
5312 /* stpcpy pointer to last byte. */
5313 if (retmode == RETURN_END_MINUS_ONE)
5314 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
5315 }
5316
5317 return dest_addr;
5318 }
5319
5320 static rtx
5321 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
5322 rtx target, tree orig_exp, memop_ret retmode)
5323 {
5324 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
5325 retmode, false);
5326 }
5327
5328 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
5329 we failed, the caller should emit a normal call, otherwise try to
5330 get the result in TARGET, if convenient.
5331 Return value is based on RETMODE argument. */
5332
5333 static rtx
5334 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
5335 {
5336 class expand_operand ops[3];
5337 rtx dest_mem;
5338 rtx src_mem;
5339
5340 if (!targetm.have_movstr ())
5341 return NULL_RTX;
5342
5343 dest_mem = get_memory_rtx (dest, NULL);
5344 src_mem = get_memory_rtx (src, NULL);
5345 if (retmode == RETURN_BEGIN)
5346 {
5347 target = force_reg (Pmode, XEXP (dest_mem, 0));
5348 dest_mem = replace_equiv_address (dest_mem, target);
5349 }
5350
5351 create_output_operand (&ops[0],
5352 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
5353 create_fixed_operand (&ops[1], dest_mem);
5354 create_fixed_operand (&ops[2], src_mem);
5355 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
5356 return NULL_RTX;
5357
5358 if (retmode != RETURN_BEGIN && target != const0_rtx)
5359 {
5360 target = ops[0].value;
5361 /* movstr is supposed to set end to the address of the NUL
5362 terminator. If the caller requested a mempcpy-like return value,
5363 adjust it. */
5364 if (retmode == RETURN_END)
5365 {
5366 rtx tem = plus_constant (GET_MODE (target),
5367 gen_lowpart (GET_MODE (target), target), 1);
5368 emit_move_insn (target, force_operand (tem, NULL_RTX));
5369 }
5370 }
5371 return target;
5372 }
5373
5374 /* Do some very basic size validation of a call to the strcpy builtin
5375 given by EXP. Return NULL_RTX to have the built-in expand to a call
5376 to the library function. */
5377
5378 static rtx
5379 expand_builtin_strcat (tree exp)
5380 {
5381 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
5382 || !warn_stringop_overflow)
5383 return NULL_RTX;
5384
5385 tree dest = CALL_EXPR_ARG (exp, 0);
5386 tree src = CALL_EXPR_ARG (exp, 1);
5387
5388 /* There is no way here to determine the length of the string in
5389 the destination to which the SRC string is being appended so
5390 just diagnose cases when the souce string is longer than
5391 the destination object. */
5392 access_data data (exp, access_read_write, NULL_TREE, true,
5393 NULL_TREE, true);
5394 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
5395 compute_objsize (src, ost, &data.src);
5396 tree destsize = compute_objsize (dest, ost, &data.dst);
5397
5398 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
5399 src, destsize, data.mode, &data);
5400
5401 return NULL_RTX;
5402 }
5403
5404 /* Expand expression EXP, which is a call to the strcpy builtin. Return
5405 NULL_RTX if we failed the caller should emit a normal call, otherwise
5406 try to get the result in TARGET, if convenient (and in mode MODE if that's
5407 convenient). */
5408
5409 static rtx
5410 expand_builtin_strcpy (tree exp, rtx target)
5411 {
5412 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5413 return NULL_RTX;
5414
5415 tree dest = CALL_EXPR_ARG (exp, 0);
5416 tree src = CALL_EXPR_ARG (exp, 1);
5417
5418 if (warn_stringop_overflow)
5419 {
5420 access_data data (exp, access_read_write, NULL_TREE, true,
5421 NULL_TREE, true);
5422 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
5423 compute_objsize (src, ost, &data.src);
5424 tree dstsize = compute_objsize (dest, ost, &data.dst);
5425 check_access (exp, /*dstwrite=*/ NULL_TREE,
5426 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
5427 dstsize, data.mode, &data);
5428 }
5429
5430 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
5431 {
5432 /* Check to see if the argument was declared attribute nonstring
5433 and if so, issue a warning since at this point it's not known
5434 to be nul-terminated. */
5435 tree fndecl = get_callee_fndecl (exp);
5436 maybe_warn_nonstring_arg (fndecl, exp);
5437 return ret;
5438 }
5439
5440 return NULL_RTX;
5441 }
5442
5443 /* Helper function to do the actual work for expand_builtin_strcpy. The
5444 arguments to the builtin_strcpy call DEST and SRC are broken out
5445 so that this can also be called without constructing an actual CALL_EXPR.
5446 The other arguments and return value are the same as for
5447 expand_builtin_strcpy. */
5448
5449 static rtx
5450 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
5451 {
5452 /* Detect strcpy calls with unterminated arrays.. */
5453 tree size;
5454 bool exact;
5455 if (tree nonstr = unterminated_array (src, &size, &exact))
5456 {
5457 /* NONSTR refers to the non-nul terminated constant array. */
5458 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
5459 size, exact);
5460 return NULL_RTX;
5461 }
5462
5463 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
5464 }
5465
5466 /* Expand a call EXP to the stpcpy builtin.
5467 Return NULL_RTX if we failed the caller should emit a normal call,
5468 otherwise try to get the result in TARGET, if convenient (and in
5469 mode MODE if that's convenient). */
5470
5471 static rtx
5472 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
5473 {
5474 tree dst, src;
5475 location_t loc = EXPR_LOCATION (exp);
5476
5477 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5478 return NULL_RTX;
5479
5480 dst = CALL_EXPR_ARG (exp, 0);
5481 src = CALL_EXPR_ARG (exp, 1);
5482
5483 if (warn_stringop_overflow)
5484 {
5485 access_data data (exp, access_read_write);
5486 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
5487 &data.dst);
5488 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
5489 src, destsize, data.mode, &data);
5490 }
5491
5492 /* If return value is ignored, transform stpcpy into strcpy. */
5493 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
5494 {
5495 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
5496 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
5497 return expand_expr (result, target, mode, EXPAND_NORMAL);
5498 }
5499 else
5500 {
5501 tree len, lenp1;
5502 rtx ret;
5503
5504 /* Ensure we get an actual string whose length can be evaluated at
5505 compile-time, not an expression containing a string. This is
5506 because the latter will potentially produce pessimized code
5507 when used to produce the return value. */
5508 c_strlen_data lendata = { };
5509 if (!c_getstr (src)
5510 || !(len = c_strlen (src, 0, &lendata, 1)))
5511 return expand_movstr (dst, src, target,
5512 /*retmode=*/ RETURN_END_MINUS_ONE);
5513
5514 if (lendata.decl)
5515 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
5516
5517 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
5518 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
5519 target, exp,
5520 /*retmode=*/ RETURN_END_MINUS_ONE);
5521
5522 if (ret)
5523 return ret;
5524
5525 if (TREE_CODE (len) == INTEGER_CST)
5526 {
5527 rtx len_rtx = expand_normal (len);
5528
5529 if (CONST_INT_P (len_rtx))
5530 {
5531 ret = expand_builtin_strcpy_args (exp, dst, src, target);
5532
5533 if (ret)
5534 {
5535 if (! target)
5536 {
5537 if (mode != VOIDmode)
5538 target = gen_reg_rtx (mode);
5539 else
5540 target = gen_reg_rtx (GET_MODE (ret));
5541 }
5542 if (GET_MODE (target) != GET_MODE (ret))
5543 ret = gen_lowpart (GET_MODE (target), ret);
5544
5545 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
5546 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
5547 gcc_assert (ret);
5548
5549 return target;
5550 }
5551 }
5552 }
5553
5554 return expand_movstr (dst, src, target,
5555 /*retmode=*/ RETURN_END_MINUS_ONE);
5556 }
5557 }
5558
5559 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
5560 arguments while being careful to avoid duplicate warnings (which could
5561 be issued if the expander were to expand the call, resulting in it
5562 being emitted in expand_call(). */
5563
5564 static rtx
5565 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
5566 {
5567 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
5568 {
5569 /* The call has been successfully expanded. Check for nonstring
5570 arguments and issue warnings as appropriate. */
5571 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
5572 return ret;
5573 }
5574
5575 return NULL_RTX;
5576 }
5577
5578 /* Check a call EXP to the stpncpy built-in for validity.
5579 Return NULL_RTX on both success and failure. */
5580
5581 static rtx
5582 expand_builtin_stpncpy (tree exp, rtx)
5583 {
5584 if (!validate_arglist (exp,
5585 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5586 || !warn_stringop_overflow)
5587 return NULL_RTX;
5588
5589 /* The source and destination of the call. */
5590 tree dest = CALL_EXPR_ARG (exp, 0);
5591 tree src = CALL_EXPR_ARG (exp, 1);
5592
5593 /* The exact number of bytes to write (not the maximum). */
5594 tree len = CALL_EXPR_ARG (exp, 2);
5595 access_data data (exp, access_read_write);
5596 /* The size of the destination object. */
5597 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5598 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
5599 return NULL_RTX;
5600 }
5601
5602 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5603 bytes from constant string DATA + OFFSET and return it as target
5604 constant. */
5605
5606 rtx
5607 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
5608 scalar_int_mode mode)
5609 {
5610 const char *str = (const char *) data;
5611
5612 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
5613 return const0_rtx;
5614
5615 return c_readstr (str + offset, mode);
5616 }
5617
5618 /* Helper to check the sizes of sequences and the destination of calls
5619 to __builtin_strncat and __builtin___strncat_chk. Returns true on
5620 success (no overflow or invalid sizes), false otherwise. */
5621
5622 static bool
5623 check_strncat_sizes (tree exp, tree objsize)
5624 {
5625 tree dest = CALL_EXPR_ARG (exp, 0);
5626 tree src = CALL_EXPR_ARG (exp, 1);
5627 tree maxread = CALL_EXPR_ARG (exp, 2);
5628
5629 /* Try to determine the range of lengths that the source expression
5630 refers to. */
5631 c_strlen_data lendata = { };
5632 get_range_strlen (src, &lendata, /* eltsize = */ 1);
5633
5634 /* Try to verify that the destination is big enough for the shortest
5635 string. */
5636
5637 access_data data (exp, access_read_write, maxread, true);
5638 if (!objsize && warn_stringop_overflow)
5639 {
5640 /* If it hasn't been provided by __strncat_chk, try to determine
5641 the size of the destination object into which the source is
5642 being copied. */
5643 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5644 }
5645
5646 /* Add one for the terminating nul. */
5647 tree srclen = (lendata.minlen
5648 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
5649 size_one_node)
5650 : NULL_TREE);
5651
5652 /* The strncat function copies at most MAXREAD bytes and always appends
5653 the terminating nul so the specified upper bound should never be equal
5654 to (or greater than) the size of the destination. */
5655 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
5656 && tree_int_cst_equal (objsize, maxread))
5657 {
5658 location_t loc = tree_nonartificial_location (exp);
5659 loc = expansion_point_location_if_in_system_header (loc);
5660
5661 warning_at (loc, OPT_Wstringop_overflow_,
5662 "%K%qD specified bound %E equals destination size",
5663 exp, get_callee_fndecl (exp), maxread);
5664
5665 return false;
5666 }
5667
5668 if (!srclen
5669 || (maxread && tree_fits_uhwi_p (maxread)
5670 && tree_fits_uhwi_p (srclen)
5671 && tree_int_cst_lt (maxread, srclen)))
5672 srclen = maxread;
5673
5674 /* The number of bytes to write is LEN but check_access will alsoa
5675 check SRCLEN if LEN's value isn't known. */
5676 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
5677 objsize, data.mode, &data);
5678 }
5679
5680 /* Similar to expand_builtin_strcat, do some very basic size validation
5681 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
5682 the built-in expand to a call to the library function. */
5683
5684 static rtx
5685 expand_builtin_strncat (tree exp, rtx)
5686 {
5687 if (!validate_arglist (exp,
5688 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5689 || !warn_stringop_overflow)
5690 return NULL_RTX;
5691
5692 tree dest = CALL_EXPR_ARG (exp, 0);
5693 tree src = CALL_EXPR_ARG (exp, 1);
5694 /* The upper bound on the number of bytes to write. */
5695 tree maxread = CALL_EXPR_ARG (exp, 2);
5696
5697 /* Detect unterminated source (only). */
5698 if (!check_nul_terminated_array (exp, src, maxread))
5699 return NULL_RTX;
5700
5701 /* The length of the source sequence. */
5702 tree slen = c_strlen (src, 1);
5703
5704 /* Try to determine the range of lengths that the source expression
5705 refers to. Since the lengths are only used for warning and not
5706 for code generation disable strict mode below. */
5707 tree maxlen = slen;
5708 if (!maxlen)
5709 {
5710 c_strlen_data lendata = { };
5711 get_range_strlen (src, &lendata, /* eltsize = */ 1);
5712 maxlen = lendata.maxbound;
5713 }
5714
5715 access_data data (exp, access_read_write);
5716 /* Try to verify that the destination is big enough for the shortest
5717 string. First try to determine the size of the destination object
5718 into which the source is being copied. */
5719 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
5720
5721 /* Add one for the terminating nul. */
5722 tree srclen = (maxlen
5723 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
5724 size_one_node)
5725 : NULL_TREE);
5726
5727 /* The strncat function copies at most MAXREAD bytes and always appends
5728 the terminating nul so the specified upper bound should never be equal
5729 to (or greater than) the size of the destination. */
5730 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
5731 && tree_int_cst_equal (destsize, maxread))
5732 {
5733 location_t loc = tree_nonartificial_location (exp);
5734 loc = expansion_point_location_if_in_system_header (loc);
5735
5736 warning_at (loc, OPT_Wstringop_overflow_,
5737 "%K%qD specified bound %E equals destination size",
5738 exp, get_callee_fndecl (exp), maxread);
5739
5740 return NULL_RTX;
5741 }
5742
5743 if (!srclen
5744 || (maxread && tree_fits_uhwi_p (maxread)
5745 && tree_fits_uhwi_p (srclen)
5746 && tree_int_cst_lt (maxread, srclen)))
5747 srclen = maxread;
5748
5749 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
5750 destsize, data.mode, &data);
5751 return NULL_RTX;
5752 }
5753
5754 /* Expand expression EXP, which is a call to the strncpy builtin. Return
5755 NULL_RTX if we failed the caller should emit a normal call. */
5756
5757 static rtx
5758 expand_builtin_strncpy (tree exp, rtx target)
5759 {
5760 location_t loc = EXPR_LOCATION (exp);
5761
5762 if (!validate_arglist (exp,
5763 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5764 return NULL_RTX;
5765 tree dest = CALL_EXPR_ARG (exp, 0);
5766 tree src = CALL_EXPR_ARG (exp, 1);
5767 /* The number of bytes to write (not the maximum). */
5768 tree len = CALL_EXPR_ARG (exp, 2);
5769
5770 /* The length of the source sequence. */
5771 tree slen = c_strlen (src, 1);
5772
5773 if (warn_stringop_overflow)
5774 {
5775 access_data data (exp, access_read_write, len, true, len, true);
5776 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
5777 compute_objsize (src, ost, &data.src);
5778 tree dstsize = compute_objsize (dest, ost, &data.dst);
5779 /* The number of bytes to write is LEN but check_access will also
5780 check SLEN if LEN's value isn't known. */
5781 check_access (exp, /*dstwrite=*/len,
5782 /*maxread=*/len, src, dstsize, data.mode, &data);
5783 }
5784
5785 /* We must be passed a constant len and src parameter. */
5786 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
5787 return NULL_RTX;
5788
5789 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
5790
5791 /* We're required to pad with trailing zeros if the requested
5792 len is greater than strlen(s2)+1. In that case try to
5793 use store_by_pieces, if it fails, punt. */
5794 if (tree_int_cst_lt (slen, len))
5795 {
5796 unsigned int dest_align = get_pointer_alignment (dest);
5797 const char *p = c_getstr (src);
5798 rtx dest_mem;
5799
5800 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
5801 || !can_store_by_pieces (tree_to_uhwi (len),
5802 builtin_strncpy_read_str,
5803 CONST_CAST (char *, p),
5804 dest_align, false))
5805 return NULL_RTX;
5806
5807 dest_mem = get_memory_rtx (dest, len);
5808 store_by_pieces (dest_mem, tree_to_uhwi (len),
5809 builtin_strncpy_read_str,
5810 CONST_CAST (char *, p), dest_align, false,
5811 RETURN_BEGIN);
5812 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5813 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5814 return dest_mem;
5815 }
5816
5817 return NULL_RTX;
5818 }
5819
5820 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
5821 bytes from constant string DATA + OFFSET and return it as target
5822 constant. */
5823
5824 rtx
5825 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5826 scalar_int_mode mode)
5827 {
5828 const char *c = (const char *) data;
5829 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
5830
5831 memset (p, *c, GET_MODE_SIZE (mode));
5832
5833 return c_readstr (p, mode);
5834 }
5835
5836 /* Callback routine for store_by_pieces. Return the RTL of a register
5837 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
5838 char value given in the RTL register data. For example, if mode is
5839 4 bytes wide, return the RTL for 0x01010101*data. */
5840
5841 static rtx
5842 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
5843 scalar_int_mode mode)
5844 {
5845 rtx target, coeff;
5846 size_t size;
5847 char *p;
5848
5849 size = GET_MODE_SIZE (mode);
5850 if (size == 1)
5851 return (rtx) data;
5852
5853 p = XALLOCAVEC (char, size);
5854 memset (p, 1, size);
5855 coeff = c_readstr (p, mode);
5856
5857 target = convert_to_mode (mode, (rtx) data, 1);
5858 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
5859 return force_reg (mode, target);
5860 }
5861
5862 /* Expand expression EXP, which is a call to the memset builtin. Return
5863 NULL_RTX if we failed the caller should emit a normal call, otherwise
5864 try to get the result in TARGET, if convenient (and in mode MODE if that's
5865 convenient). */
5866
5867 static rtx
5868 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
5869 {
5870 if (!validate_arglist (exp,
5871 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5872 return NULL_RTX;
5873
5874 tree dest = CALL_EXPR_ARG (exp, 0);
5875 tree val = CALL_EXPR_ARG (exp, 1);
5876 tree len = CALL_EXPR_ARG (exp, 2);
5877
5878 check_memop_access (exp, dest, NULL_TREE, len);
5879
5880 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5881 }
5882
5883 /* Helper function to do the actual work for expand_builtin_memset. The
5884 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
5885 so that this can also be called without constructing an actual CALL_EXPR.
5886 The other arguments and return value are the same as for
5887 expand_builtin_memset. */
5888
5889 static rtx
5890 expand_builtin_memset_args (tree dest, tree val, tree len,
5891 rtx target, machine_mode mode, tree orig_exp)
5892 {
5893 tree fndecl, fn;
5894 enum built_in_function fcode;
5895 machine_mode val_mode;
5896 char c;
5897 unsigned int dest_align;
5898 rtx dest_mem, dest_addr, len_rtx;
5899 HOST_WIDE_INT expected_size = -1;
5900 unsigned int expected_align = 0;
5901 unsigned HOST_WIDE_INT min_size;
5902 unsigned HOST_WIDE_INT max_size;
5903 unsigned HOST_WIDE_INT probable_max_size;
5904
5905 dest_align = get_pointer_alignment (dest);
5906
5907 /* If DEST is not a pointer type, don't do this operation in-line. */
5908 if (dest_align == 0)
5909 return NULL_RTX;
5910
5911 if (currently_expanding_gimple_stmt)
5912 stringop_block_profile (currently_expanding_gimple_stmt,
5913 &expected_align, &expected_size);
5914
5915 if (expected_align < dest_align)
5916 expected_align = dest_align;
5917
5918 /* If the LEN parameter is zero, return DEST. */
5919 if (integer_zerop (len))
5920 {
5921 /* Evaluate and ignore VAL in case it has side-effects. */
5922 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
5923 return expand_expr (dest, target, mode, EXPAND_NORMAL);
5924 }
5925
5926 /* Stabilize the arguments in case we fail. */
5927 dest = builtin_save_expr (dest);
5928 val = builtin_save_expr (val);
5929 len = builtin_save_expr (len);
5930
5931 len_rtx = expand_normal (len);
5932 determine_block_size (len, len_rtx, &min_size, &max_size,
5933 &probable_max_size);
5934 dest_mem = get_memory_rtx (dest, len);
5935 val_mode = TYPE_MODE (unsigned_char_type_node);
5936
5937 if (TREE_CODE (val) != INTEGER_CST)
5938 {
5939 rtx val_rtx;
5940
5941 val_rtx = expand_normal (val);
5942 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
5943
5944 /* Assume that we can memset by pieces if we can store
5945 * the coefficients by pieces (in the required modes).
5946 * We can't pass builtin_memset_gen_str as that emits RTL. */
5947 c = 1;
5948 if (tree_fits_uhwi_p (len)
5949 && can_store_by_pieces (tree_to_uhwi (len),
5950 builtin_memset_read_str, &c, dest_align,
5951 true))
5952 {
5953 val_rtx = force_reg (val_mode, val_rtx);
5954 store_by_pieces (dest_mem, tree_to_uhwi (len),
5955 builtin_memset_gen_str, val_rtx, dest_align,
5956 true, RETURN_BEGIN);
5957 }
5958 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5959 dest_align, expected_align,
5960 expected_size, min_size, max_size,
5961 probable_max_size))
5962 goto do_libcall;
5963
5964 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5965 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5966 return dest_mem;
5967 }
5968
5969 if (target_char_cast (val, &c))
5970 goto do_libcall;
5971
5972 if (c)
5973 {
5974 if (tree_fits_uhwi_p (len)
5975 && can_store_by_pieces (tree_to_uhwi (len),
5976 builtin_memset_read_str, &c, dest_align,
5977 true))
5978 store_by_pieces (dest_mem, tree_to_uhwi (len),
5979 builtin_memset_read_str, &c, dest_align, true,
5980 RETURN_BEGIN);
5981 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5982 gen_int_mode (c, val_mode),
5983 dest_align, expected_align,
5984 expected_size, min_size, max_size,
5985 probable_max_size))
5986 goto do_libcall;
5987
5988 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5989 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5990 return dest_mem;
5991 }
5992
5993 set_mem_align (dest_mem, dest_align);
5994 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5995 CALL_EXPR_TAILCALL (orig_exp)
5996 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5997 expected_align, expected_size,
5998 min_size, max_size,
5999 probable_max_size);
6000
6001 if (dest_addr == 0)
6002 {
6003 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6004 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6005 }
6006
6007 return dest_addr;
6008
6009 do_libcall:
6010 fndecl = get_callee_fndecl (orig_exp);
6011 fcode = DECL_FUNCTION_CODE (fndecl);
6012 if (fcode == BUILT_IN_MEMSET)
6013 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
6014 dest, val, len);
6015 else if (fcode == BUILT_IN_BZERO)
6016 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
6017 dest, len);
6018 else
6019 gcc_unreachable ();
6020 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6021 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
6022 return expand_call (fn, target, target == const0_rtx);
6023 }
6024
6025 /* Expand expression EXP, which is a call to the bzero builtin. Return
6026 NULL_RTX if we failed the caller should emit a normal call. */
6027
6028 static rtx
6029 expand_builtin_bzero (tree exp)
6030 {
6031 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6032 return NULL_RTX;
6033
6034 tree dest = CALL_EXPR_ARG (exp, 0);
6035 tree size = CALL_EXPR_ARG (exp, 1);
6036
6037 check_memop_access (exp, dest, NULL_TREE, size);
6038
6039 /* New argument list transforming bzero(ptr x, int y) to
6040 memset(ptr x, int 0, size_t y). This is done this way
6041 so that if it isn't expanded inline, we fallback to
6042 calling bzero instead of memset. */
6043
6044 location_t loc = EXPR_LOCATION (exp);
6045
6046 return expand_builtin_memset_args (dest, integer_zero_node,
6047 fold_convert_loc (loc,
6048 size_type_node, size),
6049 const0_rtx, VOIDmode, exp);
6050 }
6051
6052 /* Try to expand cmpstr operation ICODE with the given operands.
6053 Return the result rtx on success, otherwise return null. */
6054
6055 static rtx
6056 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
6057 HOST_WIDE_INT align)
6058 {
6059 machine_mode insn_mode = insn_data[icode].operand[0].mode;
6060
6061 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
6062 target = NULL_RTX;
6063
6064 class expand_operand ops[4];
6065 create_output_operand (&ops[0], target, insn_mode);
6066 create_fixed_operand (&ops[1], arg1_rtx);
6067 create_fixed_operand (&ops[2], arg2_rtx);
6068 create_integer_operand (&ops[3], align);
6069 if (maybe_expand_insn (icode, 4, ops))
6070 return ops[0].value;
6071 return NULL_RTX;
6072 }
6073
6074 /* Expand expression EXP, which is a call to the memcmp built-in function.
6075 Return NULL_RTX if we failed and the caller should emit a normal call,
6076 otherwise try to get the result in TARGET, if convenient.
6077 RESULT_EQ is true if we can relax the returned value to be either zero
6078 or nonzero, without caring about the sign. */
6079
6080 static rtx
6081 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
6082 {
6083 if (!validate_arglist (exp,
6084 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6085 return NULL_RTX;
6086
6087 tree arg1 = CALL_EXPR_ARG (exp, 0);
6088 tree arg2 = CALL_EXPR_ARG (exp, 1);
6089 tree len = CALL_EXPR_ARG (exp, 2);
6090
6091 /* Diagnose calls where the specified length exceeds the size of either
6092 object. */
6093 if (!check_read_access (exp, arg1, len, 0)
6094 || !check_read_access (exp, arg2, len, 0))
6095 return NULL_RTX;
6096
6097 /* Due to the performance benefit, always inline the calls first
6098 when result_eq is false. */
6099 rtx result = NULL_RTX;
6100 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
6101 if (!result_eq && fcode != BUILT_IN_BCMP)
6102 {
6103 result = inline_expand_builtin_bytecmp (exp, target);
6104 if (result)
6105 return result;
6106 }
6107
6108 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6109 location_t loc = EXPR_LOCATION (exp);
6110
6111 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6112 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6113
6114 /* If we don't have POINTER_TYPE, call the function. */
6115 if (arg1_align == 0 || arg2_align == 0)
6116 return NULL_RTX;
6117
6118 rtx arg1_rtx = get_memory_rtx (arg1, len);
6119 rtx arg2_rtx = get_memory_rtx (arg2, len);
6120 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
6121
6122 /* Set MEM_SIZE as appropriate. */
6123 if (CONST_INT_P (len_rtx))
6124 {
6125 set_mem_size (arg1_rtx, INTVAL (len_rtx));
6126 set_mem_size (arg2_rtx, INTVAL (len_rtx));
6127 }
6128
6129 by_pieces_constfn constfn = NULL;
6130
6131 /* Try to get the byte representation of the constant ARG2 (or, only
6132 when the function's result is used for equality to zero, ARG1)
6133 points to, with its byte size in NBYTES. */
6134 unsigned HOST_WIDE_INT nbytes;
6135 const char *rep = getbyterep (arg2, &nbytes);
6136 if (result_eq && rep == NULL)
6137 {
6138 /* For equality to zero the arguments are interchangeable. */
6139 rep = getbyterep (arg1, &nbytes);
6140 if (rep != NULL)
6141 std::swap (arg1_rtx, arg2_rtx);
6142 }
6143
6144 /* If the function's constant bound LEN_RTX is less than or equal
6145 to the byte size of the representation of the constant argument,
6146 and if block move would be done by pieces, we can avoid loading
6147 the bytes from memory and only store the computed constant result. */
6148 if (rep
6149 && CONST_INT_P (len_rtx)
6150 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
6151 constfn = builtin_memcpy_read_str;
6152
6153 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
6154 TREE_TYPE (len), target,
6155 result_eq, constfn,
6156 CONST_CAST (char *, rep));
6157
6158 if (result)
6159 {
6160 /* Return the value in the proper mode for this function. */
6161 if (GET_MODE (result) == mode)
6162 return result;
6163
6164 if (target != 0)
6165 {
6166 convert_move (target, result, 0);
6167 return target;
6168 }
6169
6170 return convert_to_mode (mode, result, 0);
6171 }
6172
6173 return NULL_RTX;
6174 }
6175
6176 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
6177 if we failed the caller should emit a normal call, otherwise try to get
6178 the result in TARGET, if convenient. */
6179
6180 static rtx
6181 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
6182 {
6183 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6184 return NULL_RTX;
6185
6186 tree arg1 = CALL_EXPR_ARG (exp, 0);
6187 tree arg2 = CALL_EXPR_ARG (exp, 1);
6188
6189 if (!check_read_access (exp, arg1)
6190 || !check_read_access (exp, arg2))
6191 return NULL_RTX;
6192
6193 /* Due to the performance benefit, always inline the calls first. */
6194 rtx result = NULL_RTX;
6195 result = inline_expand_builtin_bytecmp (exp, target);
6196 if (result)
6197 return result;
6198
6199 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
6200 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
6201 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
6202 return NULL_RTX;
6203
6204 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6205 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6206
6207 /* If we don't have POINTER_TYPE, call the function. */
6208 if (arg1_align == 0 || arg2_align == 0)
6209 return NULL_RTX;
6210
6211 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
6212 arg1 = builtin_save_expr (arg1);
6213 arg2 = builtin_save_expr (arg2);
6214
6215 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
6216 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
6217
6218 /* Try to call cmpstrsi. */
6219 if (cmpstr_icode != CODE_FOR_nothing)
6220 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
6221 MIN (arg1_align, arg2_align));
6222
6223 /* Try to determine at least one length and call cmpstrnsi. */
6224 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6225 {
6226 tree len;
6227 rtx arg3_rtx;
6228
6229 tree len1 = c_strlen (arg1, 1);
6230 tree len2 = c_strlen (arg2, 1);
6231
6232 if (len1)
6233 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
6234 if (len2)
6235 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
6236
6237 /* If we don't have a constant length for the first, use the length
6238 of the second, if we know it. We don't require a constant for
6239 this case; some cost analysis could be done if both are available
6240 but neither is constant. For now, assume they're equally cheap,
6241 unless one has side effects. If both strings have constant lengths,
6242 use the smaller. */
6243
6244 if (!len1)
6245 len = len2;
6246 else if (!len2)
6247 len = len1;
6248 else if (TREE_SIDE_EFFECTS (len1))
6249 len = len2;
6250 else if (TREE_SIDE_EFFECTS (len2))
6251 len = len1;
6252 else if (TREE_CODE (len1) != INTEGER_CST)
6253 len = len2;
6254 else if (TREE_CODE (len2) != INTEGER_CST)
6255 len = len1;
6256 else if (tree_int_cst_lt (len1, len2))
6257 len = len1;
6258 else
6259 len = len2;
6260
6261 /* If both arguments have side effects, we cannot optimize. */
6262 if (len && !TREE_SIDE_EFFECTS (len))
6263 {
6264 arg3_rtx = expand_normal (len);
6265 result = expand_cmpstrn_or_cmpmem
6266 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
6267 arg3_rtx, MIN (arg1_align, arg2_align));
6268 }
6269 }
6270
6271 tree fndecl = get_callee_fndecl (exp);
6272 if (result)
6273 {
6274 /* Check to see if the argument was declared attribute nonstring
6275 and if so, issue a warning since at this point it's not known
6276 to be nul-terminated. */
6277 maybe_warn_nonstring_arg (fndecl, exp);
6278
6279 /* Return the value in the proper mode for this function. */
6280 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6281 if (GET_MODE (result) == mode)
6282 return result;
6283 if (target == 0)
6284 return convert_to_mode (mode, result, 0);
6285 convert_move (target, result, 0);
6286 return target;
6287 }
6288
6289 /* Expand the library call ourselves using a stabilized argument
6290 list to avoid re-evaluating the function's arguments twice. */
6291 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
6292 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6293 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6294 return expand_call (fn, target, target == const0_rtx);
6295 }
6296
6297 /* Expand expression EXP, which is a call to the strncmp builtin. Return
6298 NULL_RTX if we failed the caller should emit a normal call, otherwise
6299 try to get the result in TARGET, if convenient. */
6300
6301 static rtx
6302 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
6303 ATTRIBUTE_UNUSED machine_mode mode)
6304 {
6305 if (!validate_arglist (exp,
6306 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6307 return NULL_RTX;
6308
6309 tree arg1 = CALL_EXPR_ARG (exp, 0);
6310 tree arg2 = CALL_EXPR_ARG (exp, 1);
6311 tree arg3 = CALL_EXPR_ARG (exp, 2);
6312
6313 if (!check_nul_terminated_array (exp, arg1, arg3)
6314 || !check_nul_terminated_array (exp, arg2, arg3))
6315 return NULL_RTX;
6316
6317 location_t loc = tree_nonartificial_location (exp);
6318 loc = expansion_point_location_if_in_system_header (loc);
6319
6320 tree len1 = c_strlen (arg1, 1);
6321 tree len2 = c_strlen (arg2, 1);
6322
6323 if (!len1 || !len2)
6324 {
6325 /* Check to see if the argument was declared attribute nonstring
6326 and if so, issue a warning since at this point it's not known
6327 to be nul-terminated. */
6328 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
6329 && !len1 && !len2)
6330 {
6331 /* A strncmp read is constrained not just by the bound but
6332 also by the length of the shorter string. Specifying
6333 a bound that's larger than the size of either array makes
6334 no sense and is likely a bug. When the length of neither
6335 of the two strings is known but the sizes of both of
6336 the arrays they are stored in is, issue a warning if
6337 the bound is larger than than the size of the larger
6338 of the two arrays. */
6339
6340 access_ref ref1 (arg3, true);
6341 access_ref ref2 (arg3, true);
6342
6343 tree bndrng[2] = { NULL_TREE, NULL_TREE };
6344 get_size_range (arg3, bndrng, ref1.bndrng);
6345
6346 tree size1 = compute_objsize (arg1, 1, &ref1);
6347 tree size2 = compute_objsize (arg2, 1, &ref2);
6348 tree func = get_callee_fndecl (exp);
6349
6350 if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0]))
6351 {
6352 offset_int rem1 = ref1.size_remaining ();
6353 offset_int rem2 = ref2.size_remaining ();
6354 if (rem1 == 0 || rem2 == 0)
6355 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
6356 bndrng, integer_zero_node);
6357 else
6358 {
6359 offset_int maxrem = wi::max (rem1, rem2, UNSIGNED);
6360 if (maxrem < wi::to_offset (bndrng[0]))
6361 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp,
6362 func, bndrng,
6363 wide_int_to_tree (sizetype, maxrem));
6364 }
6365 }
6366 else if (bndrng[0]
6367 && !integer_zerop (bndrng[0])
6368 && ((size1 && integer_zerop (size1))
6369 || (size2 && integer_zerop (size2))))
6370 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
6371 bndrng, integer_zero_node);
6372 }
6373 }
6374
6375 /* Due to the performance benefit, always inline the calls first. */
6376 rtx result = NULL_RTX;
6377 result = inline_expand_builtin_bytecmp (exp, target);
6378 if (result)
6379 return result;
6380
6381 /* If c_strlen can determine an expression for one of the string
6382 lengths, and it doesn't have side effects, then emit cmpstrnsi
6383 using length MIN(strlen(string)+1, arg3). */
6384 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
6385 if (cmpstrn_icode == CODE_FOR_nothing)
6386 return NULL_RTX;
6387
6388 tree len;
6389
6390 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6391 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6392
6393 if (len1)
6394 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
6395 if (len2)
6396 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
6397
6398 tree len3 = fold_convert_loc (loc, sizetype, arg3);
6399
6400 /* If we don't have a constant length for the first, use the length
6401 of the second, if we know it. If neither string is constant length,
6402 use the given length argument. We don't require a constant for
6403 this case; some cost analysis could be done if both are available
6404 but neither is constant. For now, assume they're equally cheap,
6405 unless one has side effects. If both strings have constant lengths,
6406 use the smaller. */
6407
6408 if (!len1 && !len2)
6409 len = len3;
6410 else if (!len1)
6411 len = len2;
6412 else if (!len2)
6413 len = len1;
6414 else if (TREE_SIDE_EFFECTS (len1))
6415 len = len2;
6416 else if (TREE_SIDE_EFFECTS (len2))
6417 len = len1;
6418 else if (TREE_CODE (len1) != INTEGER_CST)
6419 len = len2;
6420 else if (TREE_CODE (len2) != INTEGER_CST)
6421 len = len1;
6422 else if (tree_int_cst_lt (len1, len2))
6423 len = len1;
6424 else
6425 len = len2;
6426
6427 /* If we are not using the given length, we must incorporate it here.
6428 The actual new length parameter will be MIN(len,arg3) in this case. */
6429 if (len != len3)
6430 {
6431 len = fold_convert_loc (loc, sizetype, len);
6432 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
6433 }
6434 rtx arg1_rtx = get_memory_rtx (arg1, len);
6435 rtx arg2_rtx = get_memory_rtx (arg2, len);
6436 rtx arg3_rtx = expand_normal (len);
6437 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
6438 arg2_rtx, TREE_TYPE (len), arg3_rtx,
6439 MIN (arg1_align, arg2_align));
6440
6441 tree fndecl = get_callee_fndecl (exp);
6442 if (result)
6443 {
6444 /* Return the value in the proper mode for this function. */
6445 mode = TYPE_MODE (TREE_TYPE (exp));
6446 if (GET_MODE (result) == mode)
6447 return result;
6448 if (target == 0)
6449 return convert_to_mode (mode, result, 0);
6450 convert_move (target, result, 0);
6451 return target;
6452 }
6453
6454 /* Expand the library call ourselves using a stabilized argument
6455 list to avoid re-evaluating the function's arguments twice. */
6456 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
6457 if (TREE_NO_WARNING (exp))
6458 TREE_NO_WARNING (call) = true;
6459 gcc_assert (TREE_CODE (call) == CALL_EXPR);
6460 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
6461 return expand_call (call, target, target == const0_rtx);
6462 }
6463
6464 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
6465 if that's convenient. */
6466
6467 rtx
6468 expand_builtin_saveregs (void)
6469 {
6470 rtx val;
6471 rtx_insn *seq;
6472
6473 /* Don't do __builtin_saveregs more than once in a function.
6474 Save the result of the first call and reuse it. */
6475 if (saveregs_value != 0)
6476 return saveregs_value;
6477
6478 /* When this function is called, it means that registers must be
6479 saved on entry to this function. So we migrate the call to the
6480 first insn of this function. */
6481
6482 start_sequence ();
6483
6484 /* Do whatever the machine needs done in this case. */
6485 val = targetm.calls.expand_builtin_saveregs ();
6486
6487 seq = get_insns ();
6488 end_sequence ();
6489
6490 saveregs_value = val;
6491
6492 /* Put the insns after the NOTE that starts the function. If this
6493 is inside a start_sequence, make the outer-level insn chain current, so
6494 the code is placed at the start of the function. */
6495 push_topmost_sequence ();
6496 emit_insn_after (seq, entry_of_function ());
6497 pop_topmost_sequence ();
6498
6499 return val;
6500 }
6501
6502 /* Expand a call to __builtin_next_arg. */
6503
6504 static rtx
6505 expand_builtin_next_arg (void)
6506 {
6507 /* Checking arguments is already done in fold_builtin_next_arg
6508 that must be called before this function. */
6509 return expand_binop (ptr_mode, add_optab,
6510 crtl->args.internal_arg_pointer,
6511 crtl->args.arg_offset_rtx,
6512 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6513 }
6514
6515 /* Make it easier for the backends by protecting the valist argument
6516 from multiple evaluations. */
6517
6518 static tree
6519 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
6520 {
6521 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
6522
6523 /* The current way of determining the type of valist is completely
6524 bogus. We should have the information on the va builtin instead. */
6525 if (!vatype)
6526 vatype = targetm.fn_abi_va_list (cfun->decl);
6527
6528 if (TREE_CODE (vatype) == ARRAY_TYPE)
6529 {
6530 if (TREE_SIDE_EFFECTS (valist))
6531 valist = save_expr (valist);
6532
6533 /* For this case, the backends will be expecting a pointer to
6534 vatype, but it's possible we've actually been given an array
6535 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
6536 So fix it. */
6537 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
6538 {
6539 tree p1 = build_pointer_type (TREE_TYPE (vatype));
6540 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
6541 }
6542 }
6543 else
6544 {
6545 tree pt = build_pointer_type (vatype);
6546
6547 if (! needs_lvalue)
6548 {
6549 if (! TREE_SIDE_EFFECTS (valist))
6550 return valist;
6551
6552 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
6553 TREE_SIDE_EFFECTS (valist) = 1;
6554 }
6555
6556 if (TREE_SIDE_EFFECTS (valist))
6557 valist = save_expr (valist);
6558 valist = fold_build2_loc (loc, MEM_REF,
6559 vatype, valist, build_int_cst (pt, 0));
6560 }
6561
6562 return valist;
6563 }
6564
6565 /* The "standard" definition of va_list is void*. */
6566
6567 tree
6568 std_build_builtin_va_list (void)
6569 {
6570 return ptr_type_node;
6571 }
6572
6573 /* The "standard" abi va_list is va_list_type_node. */
6574
6575 tree
6576 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
6577 {
6578 return va_list_type_node;
6579 }
6580
6581 /* The "standard" type of va_list is va_list_type_node. */
6582
6583 tree
6584 std_canonical_va_list_type (tree type)
6585 {
6586 tree wtype, htype;
6587
6588 wtype = va_list_type_node;
6589 htype = type;
6590
6591 if (TREE_CODE (wtype) == ARRAY_TYPE)
6592 {
6593 /* If va_list is an array type, the argument may have decayed
6594 to a pointer type, e.g. by being passed to another function.
6595 In that case, unwrap both types so that we can compare the
6596 underlying records. */
6597 if (TREE_CODE (htype) == ARRAY_TYPE
6598 || POINTER_TYPE_P (htype))
6599 {
6600 wtype = TREE_TYPE (wtype);
6601 htype = TREE_TYPE (htype);
6602 }
6603 }
6604 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
6605 return va_list_type_node;
6606
6607 return NULL_TREE;
6608 }
6609
6610 /* The "standard" implementation of va_start: just assign `nextarg' to
6611 the variable. */
6612
6613 void
6614 std_expand_builtin_va_start (tree valist, rtx nextarg)
6615 {
6616 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
6617 convert_move (va_r, nextarg, 0);
6618 }
6619
6620 /* Expand EXP, a call to __builtin_va_start. */
6621
6622 static rtx
6623 expand_builtin_va_start (tree exp)
6624 {
6625 rtx nextarg;
6626 tree valist;
6627 location_t loc = EXPR_LOCATION (exp);
6628
6629 if (call_expr_nargs (exp) < 2)
6630 {
6631 error_at (loc, "too few arguments to function %<va_start%>");
6632 return const0_rtx;
6633 }
6634
6635 if (fold_builtin_next_arg (exp, true))
6636 return const0_rtx;
6637
6638 nextarg = expand_builtin_next_arg ();
6639 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
6640
6641 if (targetm.expand_builtin_va_start)
6642 targetm.expand_builtin_va_start (valist, nextarg);
6643 else
6644 std_expand_builtin_va_start (valist, nextarg);
6645
6646 return const0_rtx;
6647 }
6648
6649 /* Expand EXP, a call to __builtin_va_end. */
6650
6651 static rtx
6652 expand_builtin_va_end (tree exp)
6653 {
6654 tree valist = CALL_EXPR_ARG (exp, 0);
6655
6656 /* Evaluate for side effects, if needed. I hate macros that don't
6657 do that. */
6658 if (TREE_SIDE_EFFECTS (valist))
6659 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
6660
6661 return const0_rtx;
6662 }
6663
6664 /* Expand EXP, a call to __builtin_va_copy. We do this as a
6665 builtin rather than just as an assignment in stdarg.h because of the
6666 nastiness of array-type va_list types. */
6667
6668 static rtx
6669 expand_builtin_va_copy (tree exp)
6670 {
6671 tree dst, src, t;
6672 location_t loc = EXPR_LOCATION (exp);
6673
6674 dst = CALL_EXPR_ARG (exp, 0);
6675 src = CALL_EXPR_ARG (exp, 1);
6676
6677 dst = stabilize_va_list_loc (loc, dst, 1);
6678 src = stabilize_va_list_loc (loc, src, 0);
6679
6680 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
6681
6682 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
6683 {
6684 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
6685 TREE_SIDE_EFFECTS (t) = 1;
6686 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6687 }
6688 else
6689 {
6690 rtx dstb, srcb, size;
6691
6692 /* Evaluate to pointers. */
6693 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
6694 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
6695 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
6696 NULL_RTX, VOIDmode, EXPAND_NORMAL);
6697
6698 dstb = convert_memory_address (Pmode, dstb);
6699 srcb = convert_memory_address (Pmode, srcb);
6700
6701 /* "Dereference" to BLKmode memories. */
6702 dstb = gen_rtx_MEM (BLKmode, dstb);
6703 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
6704 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
6705 srcb = gen_rtx_MEM (BLKmode, srcb);
6706 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
6707 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
6708
6709 /* Copy. */
6710 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
6711 }
6712
6713 return const0_rtx;
6714 }
6715
6716 /* Expand a call to one of the builtin functions __builtin_frame_address or
6717 __builtin_return_address. */
6718
6719 static rtx
6720 expand_builtin_frame_address (tree fndecl, tree exp)
6721 {
6722 /* The argument must be a nonnegative integer constant.
6723 It counts the number of frames to scan up the stack.
6724 The value is either the frame pointer value or the return
6725 address saved in that frame. */
6726 if (call_expr_nargs (exp) == 0)
6727 /* Warning about missing arg was already issued. */
6728 return const0_rtx;
6729 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
6730 {
6731 error ("invalid argument to %qD", fndecl);
6732 return const0_rtx;
6733 }
6734 else
6735 {
6736 /* Number of frames to scan up the stack. */
6737 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
6738
6739 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
6740
6741 /* Some ports cannot access arbitrary stack frames. */
6742 if (tem == NULL)
6743 {
6744 warning (0, "unsupported argument to %qD", fndecl);
6745 return const0_rtx;
6746 }
6747
6748 if (count)
6749 {
6750 /* Warn since no effort is made to ensure that any frame
6751 beyond the current one exists or can be safely reached. */
6752 warning (OPT_Wframe_address, "calling %qD with "
6753 "a nonzero argument is unsafe", fndecl);
6754 }
6755
6756 /* For __builtin_frame_address, return what we've got. */
6757 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6758 return tem;
6759
6760 if (!REG_P (tem)
6761 && ! CONSTANT_P (tem))
6762 tem = copy_addr_to_reg (tem);
6763 return tem;
6764 }
6765 }
6766
6767 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
6768 failed and the caller should emit a normal call. */
6769
6770 static rtx
6771 expand_builtin_alloca (tree exp)
6772 {
6773 rtx op0;
6774 rtx result;
6775 unsigned int align;
6776 tree fndecl = get_callee_fndecl (exp);
6777 HOST_WIDE_INT max_size;
6778 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6779 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
6780 bool valid_arglist
6781 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6782 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
6783 VOID_TYPE)
6784 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
6785 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
6786 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
6787
6788 if (!valid_arglist)
6789 return NULL_RTX;
6790
6791 if ((alloca_for_var
6792 && warn_vla_limit >= HOST_WIDE_INT_MAX
6793 && warn_alloc_size_limit < warn_vla_limit)
6794 || (!alloca_for_var
6795 && warn_alloca_limit >= HOST_WIDE_INT_MAX
6796 && warn_alloc_size_limit < warn_alloca_limit
6797 ))
6798 {
6799 /* -Walloca-larger-than and -Wvla-larger-than settings of
6800 less than HOST_WIDE_INT_MAX override the more general
6801 -Walloc-size-larger-than so unless either of the former
6802 options is smaller than the last one (wchich would imply
6803 that the call was already checked), check the alloca
6804 arguments for overflow. */
6805 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
6806 int idx[] = { 0, -1 };
6807 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
6808 }
6809
6810 /* Compute the argument. */
6811 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
6812
6813 /* Compute the alignment. */
6814 align = (fcode == BUILT_IN_ALLOCA
6815 ? BIGGEST_ALIGNMENT
6816 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
6817
6818 /* Compute the maximum size. */
6819 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
6820 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
6821 : -1);
6822
6823 /* Allocate the desired space. If the allocation stems from the declaration
6824 of a variable-sized object, it cannot accumulate. */
6825 result
6826 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
6827 result = convert_memory_address (ptr_mode, result);
6828
6829 /* Dynamic allocations for variables are recorded during gimplification. */
6830 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
6831 record_dynamic_alloc (exp);
6832
6833 return result;
6834 }
6835
6836 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
6837 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
6838 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
6839 handle_builtin_stack_restore function. */
6840
6841 static rtx
6842 expand_asan_emit_allocas_unpoison (tree exp)
6843 {
6844 tree arg0 = CALL_EXPR_ARG (exp, 0);
6845 tree arg1 = CALL_EXPR_ARG (exp, 1);
6846 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6847 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
6848 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
6849 stack_pointer_rtx, NULL_RTX, 0,
6850 OPTAB_LIB_WIDEN);
6851 off = convert_modes (ptr_mode, Pmode, off, 0);
6852 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
6853 OPTAB_LIB_WIDEN);
6854 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
6855 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
6856 top, ptr_mode, bot, ptr_mode);
6857 return ret;
6858 }
6859
6860 /* Expand a call to bswap builtin in EXP.
6861 Return NULL_RTX if a normal call should be emitted rather than expanding the
6862 function in-line. If convenient, the result should be placed in TARGET.
6863 SUBTARGET may be used as the target for computing one of EXP's operands. */
6864
6865 static rtx
6866 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
6867 rtx subtarget)
6868 {
6869 tree arg;
6870 rtx op0;
6871
6872 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6873 return NULL_RTX;
6874
6875 arg = CALL_EXPR_ARG (exp, 0);
6876 op0 = expand_expr (arg,
6877 subtarget && GET_MODE (subtarget) == target_mode
6878 ? subtarget : NULL_RTX,
6879 target_mode, EXPAND_NORMAL);
6880 if (GET_MODE (op0) != target_mode)
6881 op0 = convert_to_mode (target_mode, op0, 1);
6882
6883 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
6884
6885 gcc_assert (target);
6886
6887 return convert_to_mode (target_mode, target, 1);
6888 }
6889
6890 /* Expand a call to a unary builtin in EXP.
6891 Return NULL_RTX if a normal call should be emitted rather than expanding the
6892 function in-line. If convenient, the result should be placed in TARGET.
6893 SUBTARGET may be used as the target for computing one of EXP's operands. */
6894
6895 static rtx
6896 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
6897 rtx subtarget, optab op_optab)
6898 {
6899 rtx op0;
6900
6901 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
6902 return NULL_RTX;
6903
6904 /* Compute the argument. */
6905 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
6906 (subtarget
6907 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
6908 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
6909 VOIDmode, EXPAND_NORMAL);
6910 /* Compute op, into TARGET if possible.
6911 Set TARGET to wherever the result comes back. */
6912 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6913 op_optab, op0, target, op_optab != clrsb_optab);
6914 gcc_assert (target);
6915
6916 return convert_to_mode (target_mode, target, 0);
6917 }
6918
6919 /* Expand a call to __builtin_expect. We just return our argument
6920 as the builtin_expect semantic should've been already executed by
6921 tree branch prediction pass. */
6922
6923 static rtx
6924 expand_builtin_expect (tree exp, rtx target)
6925 {
6926 tree arg;
6927
6928 if (call_expr_nargs (exp) < 2)
6929 return const0_rtx;
6930 arg = CALL_EXPR_ARG (exp, 0);
6931
6932 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6933 /* When guessing was done, the hints should be already stripped away. */
6934 gcc_assert (!flag_guess_branch_prob
6935 || optimize == 0 || seen_error ());
6936 return target;
6937 }
6938
6939 /* Expand a call to __builtin_expect_with_probability. We just return our
6940 argument as the builtin_expect semantic should've been already executed by
6941 tree branch prediction pass. */
6942
6943 static rtx
6944 expand_builtin_expect_with_probability (tree exp, rtx target)
6945 {
6946 tree arg;
6947
6948 if (call_expr_nargs (exp) < 3)
6949 return const0_rtx;
6950 arg = CALL_EXPR_ARG (exp, 0);
6951
6952 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6953 /* When guessing was done, the hints should be already stripped away. */
6954 gcc_assert (!flag_guess_branch_prob
6955 || optimize == 0 || seen_error ());
6956 return target;
6957 }
6958
6959
6960 /* Expand a call to __builtin_assume_aligned. We just return our first
6961 argument as the builtin_assume_aligned semantic should've been already
6962 executed by CCP. */
6963
6964 static rtx
6965 expand_builtin_assume_aligned (tree exp, rtx target)
6966 {
6967 if (call_expr_nargs (exp) < 2)
6968 return const0_rtx;
6969 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
6970 EXPAND_NORMAL);
6971 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
6972 && (call_expr_nargs (exp) < 3
6973 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
6974 return target;
6975 }
6976
6977 void
6978 expand_builtin_trap (void)
6979 {
6980 if (targetm.have_trap ())
6981 {
6982 rtx_insn *insn = emit_insn (targetm.gen_trap ());
6983 /* For trap insns when not accumulating outgoing args force
6984 REG_ARGS_SIZE note to prevent crossjumping of calls with
6985 different args sizes. */
6986 if (!ACCUMULATE_OUTGOING_ARGS)
6987 add_args_size_note (insn, stack_pointer_delta);
6988 }
6989 else
6990 {
6991 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
6992 tree call_expr = build_call_expr (fn, 0);
6993 expand_call (call_expr, NULL_RTX, false);
6994 }
6995
6996 emit_barrier ();
6997 }
6998
6999 /* Expand a call to __builtin_unreachable. We do nothing except emit
7000 a barrier saying that control flow will not pass here.
7001
7002 It is the responsibility of the program being compiled to ensure
7003 that control flow does never reach __builtin_unreachable. */
7004 static void
7005 expand_builtin_unreachable (void)
7006 {
7007 emit_barrier ();
7008 }
7009
7010 /* Expand EXP, a call to fabs, fabsf or fabsl.
7011 Return NULL_RTX if a normal call should be emitted rather than expanding
7012 the function inline. If convenient, the result should be placed
7013 in TARGET. SUBTARGET may be used as the target for computing
7014 the operand. */
7015
7016 static rtx
7017 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
7018 {
7019 machine_mode mode;
7020 tree arg;
7021 rtx op0;
7022
7023 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7024 return NULL_RTX;
7025
7026 arg = CALL_EXPR_ARG (exp, 0);
7027 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7028 mode = TYPE_MODE (TREE_TYPE (arg));
7029 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7030 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
7031 }
7032
7033 /* Expand EXP, a call to copysign, copysignf, or copysignl.
7034 Return NULL is a normal call should be emitted rather than expanding the
7035 function inline. If convenient, the result should be placed in TARGET.
7036 SUBTARGET may be used as the target for computing the operand. */
7037
7038 static rtx
7039 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
7040 {
7041 rtx op0, op1;
7042 tree arg;
7043
7044 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7045 return NULL_RTX;
7046
7047 arg = CALL_EXPR_ARG (exp, 0);
7048 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7049
7050 arg = CALL_EXPR_ARG (exp, 1);
7051 op1 = expand_normal (arg);
7052
7053 return expand_copysign (op0, op1, target);
7054 }
7055
7056 /* Expand a call to __builtin___clear_cache. */
7057
7058 static rtx
7059 expand_builtin___clear_cache (tree exp)
7060 {
7061 if (!targetm.code_for_clear_cache)
7062 {
7063 #ifdef CLEAR_INSN_CACHE
7064 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7065 does something. Just do the default expansion to a call to
7066 __clear_cache(). */
7067 return NULL_RTX;
7068 #else
7069 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7070 does nothing. There is no need to call it. Do nothing. */
7071 return const0_rtx;
7072 #endif /* CLEAR_INSN_CACHE */
7073 }
7074
7075 /* We have a "clear_cache" insn, and it will handle everything. */
7076 tree begin, end;
7077 rtx begin_rtx, end_rtx;
7078
7079 /* We must not expand to a library call. If we did, any
7080 fallback library function in libgcc that might contain a call to
7081 __builtin___clear_cache() would recurse infinitely. */
7082 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7083 {
7084 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7085 return const0_rtx;
7086 }
7087
7088 if (targetm.have_clear_cache ())
7089 {
7090 class expand_operand ops[2];
7091
7092 begin = CALL_EXPR_ARG (exp, 0);
7093 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
7094
7095 end = CALL_EXPR_ARG (exp, 1);
7096 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
7097
7098 create_address_operand (&ops[0], begin_rtx);
7099 create_address_operand (&ops[1], end_rtx);
7100 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
7101 return const0_rtx;
7102 }
7103 return const0_rtx;
7104 }
7105
7106 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
7107
7108 static rtx
7109 round_trampoline_addr (rtx tramp)
7110 {
7111 rtx temp, addend, mask;
7112
7113 /* If we don't need too much alignment, we'll have been guaranteed
7114 proper alignment by get_trampoline_type. */
7115 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
7116 return tramp;
7117
7118 /* Round address up to desired boundary. */
7119 temp = gen_reg_rtx (Pmode);
7120 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
7121 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
7122
7123 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
7124 temp, 0, OPTAB_LIB_WIDEN);
7125 tramp = expand_simple_binop (Pmode, AND, temp, mask,
7126 temp, 0, OPTAB_LIB_WIDEN);
7127
7128 return tramp;
7129 }
7130
7131 static rtx
7132 expand_builtin_init_trampoline (tree exp, bool onstack)
7133 {
7134 tree t_tramp, t_func, t_chain;
7135 rtx m_tramp, r_tramp, r_chain, tmp;
7136
7137 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
7138 POINTER_TYPE, VOID_TYPE))
7139 return NULL_RTX;
7140
7141 t_tramp = CALL_EXPR_ARG (exp, 0);
7142 t_func = CALL_EXPR_ARG (exp, 1);
7143 t_chain = CALL_EXPR_ARG (exp, 2);
7144
7145 r_tramp = expand_normal (t_tramp);
7146 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
7147 MEM_NOTRAP_P (m_tramp) = 1;
7148
7149 /* If ONSTACK, the TRAMP argument should be the address of a field
7150 within the local function's FRAME decl. Either way, let's see if
7151 we can fill in the MEM_ATTRs for this memory. */
7152 if (TREE_CODE (t_tramp) == ADDR_EXPR)
7153 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
7154
7155 /* Creator of a heap trampoline is responsible for making sure the
7156 address is aligned to at least STACK_BOUNDARY. Normally malloc
7157 will ensure this anyhow. */
7158 tmp = round_trampoline_addr (r_tramp);
7159 if (tmp != r_tramp)
7160 {
7161 m_tramp = change_address (m_tramp, BLKmode, tmp);
7162 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
7163 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
7164 }
7165
7166 /* The FUNC argument should be the address of the nested function.
7167 Extract the actual function decl to pass to the hook. */
7168 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
7169 t_func = TREE_OPERAND (t_func, 0);
7170 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
7171
7172 r_chain = expand_normal (t_chain);
7173
7174 /* Generate insns to initialize the trampoline. */
7175 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
7176
7177 if (onstack)
7178 {
7179 trampolines_created = 1;
7180
7181 if (targetm.calls.custom_function_descriptors != 0)
7182 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
7183 "trampoline generated for nested function %qD", t_func);
7184 }
7185
7186 return const0_rtx;
7187 }
7188
7189 static rtx
7190 expand_builtin_adjust_trampoline (tree exp)
7191 {
7192 rtx tramp;
7193
7194 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7195 return NULL_RTX;
7196
7197 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7198 tramp = round_trampoline_addr (tramp);
7199 if (targetm.calls.trampoline_adjust_address)
7200 tramp = targetm.calls.trampoline_adjust_address (tramp);
7201
7202 return tramp;
7203 }
7204
7205 /* Expand a call to the builtin descriptor initialization routine.
7206 A descriptor is made up of a couple of pointers to the static
7207 chain and the code entry in this order. */
7208
7209 static rtx
7210 expand_builtin_init_descriptor (tree exp)
7211 {
7212 tree t_descr, t_func, t_chain;
7213 rtx m_descr, r_descr, r_func, r_chain;
7214
7215 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
7216 VOID_TYPE))
7217 return NULL_RTX;
7218
7219 t_descr = CALL_EXPR_ARG (exp, 0);
7220 t_func = CALL_EXPR_ARG (exp, 1);
7221 t_chain = CALL_EXPR_ARG (exp, 2);
7222
7223 r_descr = expand_normal (t_descr);
7224 m_descr = gen_rtx_MEM (BLKmode, r_descr);
7225 MEM_NOTRAP_P (m_descr) = 1;
7226 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
7227
7228 r_func = expand_normal (t_func);
7229 r_chain = expand_normal (t_chain);
7230
7231 /* Generate insns to initialize the descriptor. */
7232 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
7233 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
7234 POINTER_SIZE / BITS_PER_UNIT), r_func);
7235
7236 return const0_rtx;
7237 }
7238
7239 /* Expand a call to the builtin descriptor adjustment routine. */
7240
7241 static rtx
7242 expand_builtin_adjust_descriptor (tree exp)
7243 {
7244 rtx tramp;
7245
7246 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7247 return NULL_RTX;
7248
7249 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7250
7251 /* Unalign the descriptor to allow runtime identification. */
7252 tramp = plus_constant (ptr_mode, tramp,
7253 targetm.calls.custom_function_descriptors);
7254
7255 return force_operand (tramp, NULL_RTX);
7256 }
7257
7258 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
7259 function. The function first checks whether the back end provides
7260 an insn to implement signbit for the respective mode. If not, it
7261 checks whether the floating point format of the value is such that
7262 the sign bit can be extracted. If that is not the case, error out.
7263 EXP is the expression that is a call to the builtin function; if
7264 convenient, the result should be placed in TARGET. */
7265 static rtx
7266 expand_builtin_signbit (tree exp, rtx target)
7267 {
7268 const struct real_format *fmt;
7269 scalar_float_mode fmode;
7270 scalar_int_mode rmode, imode;
7271 tree arg;
7272 int word, bitpos;
7273 enum insn_code icode;
7274 rtx temp;
7275 location_t loc = EXPR_LOCATION (exp);
7276
7277 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7278 return NULL_RTX;
7279
7280 arg = CALL_EXPR_ARG (exp, 0);
7281 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
7282 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
7283 fmt = REAL_MODE_FORMAT (fmode);
7284
7285 arg = builtin_save_expr (arg);
7286
7287 /* Expand the argument yielding a RTX expression. */
7288 temp = expand_normal (arg);
7289
7290 /* Check if the back end provides an insn that handles signbit for the
7291 argument's mode. */
7292 icode = optab_handler (signbit_optab, fmode);
7293 if (icode != CODE_FOR_nothing)
7294 {
7295 rtx_insn *last = get_last_insn ();
7296 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7297 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
7298 return target;
7299 delete_insns_since (last);
7300 }
7301
7302 /* For floating point formats without a sign bit, implement signbit
7303 as "ARG < 0.0". */
7304 bitpos = fmt->signbit_ro;
7305 if (bitpos < 0)
7306 {
7307 /* But we can't do this if the format supports signed zero. */
7308 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
7309
7310 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
7311 build_real (TREE_TYPE (arg), dconst0));
7312 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7313 }
7314
7315 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
7316 {
7317 imode = int_mode_for_mode (fmode).require ();
7318 temp = gen_lowpart (imode, temp);
7319 }
7320 else
7321 {
7322 imode = word_mode;
7323 /* Handle targets with different FP word orders. */
7324 if (FLOAT_WORDS_BIG_ENDIAN)
7325 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
7326 else
7327 word = bitpos / BITS_PER_WORD;
7328 temp = operand_subword_force (temp, word, fmode);
7329 bitpos = bitpos % BITS_PER_WORD;
7330 }
7331
7332 /* Force the intermediate word_mode (or narrower) result into a
7333 register. This avoids attempting to create paradoxical SUBREGs
7334 of floating point modes below. */
7335 temp = force_reg (imode, temp);
7336
7337 /* If the bitpos is within the "result mode" lowpart, the operation
7338 can be implement with a single bitwise AND. Otherwise, we need
7339 a right shift and an AND. */
7340
7341 if (bitpos < GET_MODE_BITSIZE (rmode))
7342 {
7343 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
7344
7345 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
7346 temp = gen_lowpart (rmode, temp);
7347 temp = expand_binop (rmode, and_optab, temp,
7348 immed_wide_int_const (mask, rmode),
7349 NULL_RTX, 1, OPTAB_LIB_WIDEN);
7350 }
7351 else
7352 {
7353 /* Perform a logical right shift to place the signbit in the least
7354 significant bit, then truncate the result to the desired mode
7355 and mask just this bit. */
7356 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
7357 temp = gen_lowpart (rmode, temp);
7358 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
7359 NULL_RTX, 1, OPTAB_LIB_WIDEN);
7360 }
7361
7362 return temp;
7363 }
7364
7365 /* Expand fork or exec calls. TARGET is the desired target of the
7366 call. EXP is the call. FN is the
7367 identificator of the actual function. IGNORE is nonzero if the
7368 value is to be ignored. */
7369
7370 static rtx
7371 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
7372 {
7373 tree id, decl;
7374 tree call;
7375
7376 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
7377 {
7378 tree path = CALL_EXPR_ARG (exp, 0);
7379 /* Detect unterminated path. */
7380 if (!check_read_access (exp, path))
7381 return NULL_RTX;
7382
7383 /* Also detect unterminated first argument. */
7384 switch (DECL_FUNCTION_CODE (fn))
7385 {
7386 case BUILT_IN_EXECL:
7387 case BUILT_IN_EXECLE:
7388 case BUILT_IN_EXECLP:
7389 if (!check_read_access (exp, path))
7390 return NULL_RTX;
7391 default:
7392 break;
7393 }
7394 }
7395
7396
7397 /* If we are not profiling, just call the function. */
7398 if (!profile_arc_flag)
7399 return NULL_RTX;
7400
7401 /* Otherwise call the wrapper. This should be equivalent for the rest of
7402 compiler, so the code does not diverge, and the wrapper may run the
7403 code necessary for keeping the profiling sane. */
7404
7405 switch (DECL_FUNCTION_CODE (fn))
7406 {
7407 case BUILT_IN_FORK:
7408 id = get_identifier ("__gcov_fork");
7409 break;
7410
7411 case BUILT_IN_EXECL:
7412 id = get_identifier ("__gcov_execl");
7413 break;
7414
7415 case BUILT_IN_EXECV:
7416 id = get_identifier ("__gcov_execv");
7417 break;
7418
7419 case BUILT_IN_EXECLP:
7420 id = get_identifier ("__gcov_execlp");
7421 break;
7422
7423 case BUILT_IN_EXECLE:
7424 id = get_identifier ("__gcov_execle");
7425 break;
7426
7427 case BUILT_IN_EXECVP:
7428 id = get_identifier ("__gcov_execvp");
7429 break;
7430
7431 case BUILT_IN_EXECVE:
7432 id = get_identifier ("__gcov_execve");
7433 break;
7434
7435 default:
7436 gcc_unreachable ();
7437 }
7438
7439 decl = build_decl (DECL_SOURCE_LOCATION (fn),
7440 FUNCTION_DECL, id, TREE_TYPE (fn));
7441 DECL_EXTERNAL (decl) = 1;
7442 TREE_PUBLIC (decl) = 1;
7443 DECL_ARTIFICIAL (decl) = 1;
7444 TREE_NOTHROW (decl) = 1;
7445 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
7446 DECL_VISIBILITY_SPECIFIED (decl) = 1;
7447 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
7448 return expand_call (call, target, ignore);
7449 }
7450
7451
7452 \f
7453 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
7454 the pointer in these functions is void*, the tree optimizers may remove
7455 casts. The mode computed in expand_builtin isn't reliable either, due
7456 to __sync_bool_compare_and_swap.
7457
7458 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
7459 group of builtins. This gives us log2 of the mode size. */
7460
7461 static inline machine_mode
7462 get_builtin_sync_mode (int fcode_diff)
7463 {
7464 /* The size is not negotiable, so ask not to get BLKmode in return
7465 if the target indicates that a smaller size would be better. */
7466 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
7467 }
7468
7469 /* Expand the memory expression LOC and return the appropriate memory operand
7470 for the builtin_sync operations. */
7471
7472 static rtx
7473 get_builtin_sync_mem (tree loc, machine_mode mode)
7474 {
7475 rtx addr, mem;
7476 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
7477 ? TREE_TYPE (TREE_TYPE (loc))
7478 : TREE_TYPE (loc));
7479 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
7480
7481 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
7482 addr = convert_memory_address (addr_mode, addr);
7483
7484 /* Note that we explicitly do not want any alias information for this
7485 memory, so that we kill all other live memories. Otherwise we don't
7486 satisfy the full barrier semantics of the intrinsic. */
7487 mem = gen_rtx_MEM (mode, addr);
7488
7489 set_mem_addr_space (mem, addr_space);
7490
7491 mem = validize_mem (mem);
7492
7493 /* The alignment needs to be at least according to that of the mode. */
7494 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
7495 get_pointer_alignment (loc)));
7496 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
7497 MEM_VOLATILE_P (mem) = 1;
7498
7499 return mem;
7500 }
7501
7502 /* Make sure an argument is in the right mode.
7503 EXP is the tree argument.
7504 MODE is the mode it should be in. */
7505
7506 static rtx
7507 expand_expr_force_mode (tree exp, machine_mode mode)
7508 {
7509 rtx val;
7510 machine_mode old_mode;
7511
7512 if (TREE_CODE (exp) == SSA_NAME
7513 && TYPE_MODE (TREE_TYPE (exp)) != mode)
7514 {
7515 /* Undo argument promotion if possible, as combine might not
7516 be able to do it later due to MEM_VOLATILE_P uses in the
7517 patterns. */
7518 gimple *g = get_gimple_for_ssa_name (exp);
7519 if (g && gimple_assign_cast_p (g))
7520 {
7521 tree rhs = gimple_assign_rhs1 (g);
7522 tree_code code = gimple_assign_rhs_code (g);
7523 if (CONVERT_EXPR_CODE_P (code)
7524 && TYPE_MODE (TREE_TYPE (rhs)) == mode
7525 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
7526 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
7527 && (TYPE_PRECISION (TREE_TYPE (exp))
7528 > TYPE_PRECISION (TREE_TYPE (rhs))))
7529 exp = rhs;
7530 }
7531 }
7532
7533 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
7534 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
7535 of CONST_INTs, where we know the old_mode only from the call argument. */
7536
7537 old_mode = GET_MODE (val);
7538 if (old_mode == VOIDmode)
7539 old_mode = TYPE_MODE (TREE_TYPE (exp));
7540 val = convert_modes (mode, old_mode, val, 1);
7541 return val;
7542 }
7543
7544
7545 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
7546 EXP is the CALL_EXPR. CODE is the rtx code
7547 that corresponds to the arithmetic or logical operation from the name;
7548 an exception here is that NOT actually means NAND. TARGET is an optional
7549 place for us to store the results; AFTER is true if this is the
7550 fetch_and_xxx form. */
7551
7552 static rtx
7553 expand_builtin_sync_operation (machine_mode mode, tree exp,
7554 enum rtx_code code, bool after,
7555 rtx target)
7556 {
7557 rtx val, mem;
7558 location_t loc = EXPR_LOCATION (exp);
7559
7560 if (code == NOT && warn_sync_nand)
7561 {
7562 tree fndecl = get_callee_fndecl (exp);
7563 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7564
7565 static bool warned_f_a_n, warned_n_a_f;
7566
7567 switch (fcode)
7568 {
7569 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7570 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7571 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7572 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7573 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7574 if (warned_f_a_n)
7575 break;
7576
7577 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
7578 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
7579 warned_f_a_n = true;
7580 break;
7581
7582 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7583 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7584 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7585 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7586 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7587 if (warned_n_a_f)
7588 break;
7589
7590 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
7591 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
7592 warned_n_a_f = true;
7593 break;
7594
7595 default:
7596 gcc_unreachable ();
7597 }
7598 }
7599
7600 /* Expand the operands. */
7601 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7602 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7603
7604 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
7605 after);
7606 }
7607
7608 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
7609 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
7610 true if this is the boolean form. TARGET is a place for us to store the
7611 results; this is NOT optional if IS_BOOL is true. */
7612
7613 static rtx
7614 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
7615 bool is_bool, rtx target)
7616 {
7617 rtx old_val, new_val, mem;
7618 rtx *pbool, *poval;
7619
7620 /* Expand the operands. */
7621 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7622 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7623 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
7624
7625 pbool = poval = NULL;
7626 if (target != const0_rtx)
7627 {
7628 if (is_bool)
7629 pbool = &target;
7630 else
7631 poval = &target;
7632 }
7633 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
7634 false, MEMMODEL_SYNC_SEQ_CST,
7635 MEMMODEL_SYNC_SEQ_CST))
7636 return NULL_RTX;
7637
7638 return target;
7639 }
7640
7641 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
7642 general form is actually an atomic exchange, and some targets only
7643 support a reduced form with the second argument being a constant 1.
7644 EXP is the CALL_EXPR; TARGET is an optional place for us to store
7645 the results. */
7646
7647 static rtx
7648 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
7649 rtx target)
7650 {
7651 rtx val, mem;
7652
7653 /* Expand the operands. */
7654 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7655 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7656
7657 return expand_sync_lock_test_and_set (target, mem, val);
7658 }
7659
7660 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
7661
7662 static void
7663 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
7664 {
7665 rtx mem;
7666
7667 /* Expand the operands. */
7668 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7669
7670 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
7671 }
7672
7673 /* Given an integer representing an ``enum memmodel'', verify its
7674 correctness and return the memory model enum. */
7675
7676 static enum memmodel
7677 get_memmodel (tree exp)
7678 {
7679 rtx op;
7680 unsigned HOST_WIDE_INT val;
7681 location_t loc
7682 = expansion_point_location_if_in_system_header (input_location);
7683
7684 /* If the parameter is not a constant, it's a run time value so we'll just
7685 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
7686 if (TREE_CODE (exp) != INTEGER_CST)
7687 return MEMMODEL_SEQ_CST;
7688
7689 op = expand_normal (exp);
7690
7691 val = INTVAL (op);
7692 if (targetm.memmodel_check)
7693 val = targetm.memmodel_check (val);
7694 else if (val & ~MEMMODEL_MASK)
7695 {
7696 warning_at (loc, OPT_Winvalid_memory_model,
7697 "unknown architecture specifier in memory model to builtin");
7698 return MEMMODEL_SEQ_CST;
7699 }
7700
7701 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
7702 if (memmodel_base (val) >= MEMMODEL_LAST)
7703 {
7704 warning_at (loc, OPT_Winvalid_memory_model,
7705 "invalid memory model argument to builtin");
7706 return MEMMODEL_SEQ_CST;
7707 }
7708
7709 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
7710 be conservative and promote consume to acquire. */
7711 if (val == MEMMODEL_CONSUME)
7712 val = MEMMODEL_ACQUIRE;
7713
7714 return (enum memmodel) val;
7715 }
7716
7717 /* Expand the __atomic_exchange intrinsic:
7718 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
7719 EXP is the CALL_EXPR.
7720 TARGET is an optional place for us to store the results. */
7721
7722 static rtx
7723 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
7724 {
7725 rtx val, mem;
7726 enum memmodel model;
7727
7728 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7729
7730 if (!flag_inline_atomics)
7731 return NULL_RTX;
7732
7733 /* Expand the operands. */
7734 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7735 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7736
7737 return expand_atomic_exchange (target, mem, val, model);
7738 }
7739
7740 /* Expand the __atomic_compare_exchange intrinsic:
7741 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
7742 TYPE desired, BOOL weak,
7743 enum memmodel success,
7744 enum memmodel failure)
7745 EXP is the CALL_EXPR.
7746 TARGET is an optional place for us to store the results. */
7747
7748 static rtx
7749 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
7750 rtx target)
7751 {
7752 rtx expect, desired, mem, oldval;
7753 rtx_code_label *label;
7754 enum memmodel success, failure;
7755 tree weak;
7756 bool is_weak;
7757 location_t loc
7758 = expansion_point_location_if_in_system_header (input_location);
7759
7760 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
7761 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
7762
7763 if (failure > success)
7764 {
7765 warning_at (loc, OPT_Winvalid_memory_model,
7766 "failure memory model cannot be stronger than success "
7767 "memory model for %<__atomic_compare_exchange%>");
7768 success = MEMMODEL_SEQ_CST;
7769 }
7770
7771 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7772 {
7773 warning_at (loc, OPT_Winvalid_memory_model,
7774 "invalid failure memory model for "
7775 "%<__atomic_compare_exchange%>");
7776 failure = MEMMODEL_SEQ_CST;
7777 success = MEMMODEL_SEQ_CST;
7778 }
7779
7780
7781 if (!flag_inline_atomics)
7782 return NULL_RTX;
7783
7784 /* Expand the operands. */
7785 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7786
7787 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
7788 expect = convert_memory_address (Pmode, expect);
7789 expect = gen_rtx_MEM (mode, expect);
7790 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
7791
7792 weak = CALL_EXPR_ARG (exp, 3);
7793 is_weak = false;
7794 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
7795 is_weak = true;
7796
7797 if (target == const0_rtx)
7798 target = NULL;
7799
7800 /* Lest the rtl backend create a race condition with an imporoper store
7801 to memory, always create a new pseudo for OLDVAL. */
7802 oldval = NULL;
7803
7804 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
7805 is_weak, success, failure))
7806 return NULL_RTX;
7807
7808 /* Conditionally store back to EXPECT, lest we create a race condition
7809 with an improper store to memory. */
7810 /* ??? With a rearrangement of atomics at the gimple level, we can handle
7811 the normal case where EXPECT is totally private, i.e. a register. At
7812 which point the store can be unconditional. */
7813 label = gen_label_rtx ();
7814 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
7815 GET_MODE (target), 1, label);
7816 emit_move_insn (expect, oldval);
7817 emit_label (label);
7818
7819 return target;
7820 }
7821
7822 /* Helper function for expand_ifn_atomic_compare_exchange - expand
7823 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
7824 call. The weak parameter must be dropped to match the expected parameter
7825 list and the expected argument changed from value to pointer to memory
7826 slot. */
7827
7828 static void
7829 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
7830 {
7831 unsigned int z;
7832 vec<tree, va_gc> *vec;
7833
7834 vec_alloc (vec, 5);
7835 vec->quick_push (gimple_call_arg (call, 0));
7836 tree expected = gimple_call_arg (call, 1);
7837 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
7838 TREE_TYPE (expected));
7839 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
7840 if (expd != x)
7841 emit_move_insn (x, expd);
7842 tree v = make_tree (TREE_TYPE (expected), x);
7843 vec->quick_push (build1 (ADDR_EXPR,
7844 build_pointer_type (TREE_TYPE (expected)), v));
7845 vec->quick_push (gimple_call_arg (call, 2));
7846 /* Skip the boolean weak parameter. */
7847 for (z = 4; z < 6; z++)
7848 vec->quick_push (gimple_call_arg (call, z));
7849 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
7850 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
7851 gcc_assert (bytes_log2 < 5);
7852 built_in_function fncode
7853 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
7854 + bytes_log2);
7855 tree fndecl = builtin_decl_explicit (fncode);
7856 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
7857 fndecl);
7858 tree exp = build_call_vec (boolean_type_node, fn, vec);
7859 tree lhs = gimple_call_lhs (call);
7860 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
7861 if (lhs)
7862 {
7863 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7864 if (GET_MODE (boolret) != mode)
7865 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7866 x = force_reg (mode, x);
7867 write_complex_part (target, boolret, true);
7868 write_complex_part (target, x, false);
7869 }
7870 }
7871
7872 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
7873
7874 void
7875 expand_ifn_atomic_compare_exchange (gcall *call)
7876 {
7877 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
7878 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
7879 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
7880 rtx expect, desired, mem, oldval, boolret;
7881 enum memmodel success, failure;
7882 tree lhs;
7883 bool is_weak;
7884 location_t loc
7885 = expansion_point_location_if_in_system_header (gimple_location (call));
7886
7887 success = get_memmodel (gimple_call_arg (call, 4));
7888 failure = get_memmodel (gimple_call_arg (call, 5));
7889
7890 if (failure > success)
7891 {
7892 warning_at (loc, OPT_Winvalid_memory_model,
7893 "failure memory model cannot be stronger than success "
7894 "memory model for %<__atomic_compare_exchange%>");
7895 success = MEMMODEL_SEQ_CST;
7896 }
7897
7898 if (is_mm_release (failure) || is_mm_acq_rel (failure))
7899 {
7900 warning_at (loc, OPT_Winvalid_memory_model,
7901 "invalid failure memory model for "
7902 "%<__atomic_compare_exchange%>");
7903 failure = MEMMODEL_SEQ_CST;
7904 success = MEMMODEL_SEQ_CST;
7905 }
7906
7907 if (!flag_inline_atomics)
7908 {
7909 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7910 return;
7911 }
7912
7913 /* Expand the operands. */
7914 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
7915
7916 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
7917 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
7918
7919 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
7920
7921 boolret = NULL;
7922 oldval = NULL;
7923
7924 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
7925 is_weak, success, failure))
7926 {
7927 expand_ifn_atomic_compare_exchange_into_call (call, mode);
7928 return;
7929 }
7930
7931 lhs = gimple_call_lhs (call);
7932 if (lhs)
7933 {
7934 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7935 if (GET_MODE (boolret) != mode)
7936 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
7937 write_complex_part (target, boolret, true);
7938 write_complex_part (target, oldval, false);
7939 }
7940 }
7941
7942 /* Expand the __atomic_load intrinsic:
7943 TYPE __atomic_load (TYPE *object, enum memmodel)
7944 EXP is the CALL_EXPR.
7945 TARGET is an optional place for us to store the results. */
7946
7947 static rtx
7948 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
7949 {
7950 rtx mem;
7951 enum memmodel model;
7952
7953 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7954 if (is_mm_release (model) || is_mm_acq_rel (model))
7955 {
7956 location_t loc
7957 = expansion_point_location_if_in_system_header (input_location);
7958 warning_at (loc, OPT_Winvalid_memory_model,
7959 "invalid memory model for %<__atomic_load%>");
7960 model = MEMMODEL_SEQ_CST;
7961 }
7962
7963 if (!flag_inline_atomics)
7964 return NULL_RTX;
7965
7966 /* Expand the operand. */
7967 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7968
7969 return expand_atomic_load (target, mem, model);
7970 }
7971
7972
7973 /* Expand the __atomic_store intrinsic:
7974 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
7975 EXP is the CALL_EXPR.
7976 TARGET is an optional place for us to store the results. */
7977
7978 static rtx
7979 expand_builtin_atomic_store (machine_mode mode, tree exp)
7980 {
7981 rtx mem, val;
7982 enum memmodel model;
7983
7984 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7985 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
7986 || is_mm_release (model)))
7987 {
7988 location_t loc
7989 = expansion_point_location_if_in_system_header (input_location);
7990 warning_at (loc, OPT_Winvalid_memory_model,
7991 "invalid memory model for %<__atomic_store%>");
7992 model = MEMMODEL_SEQ_CST;
7993 }
7994
7995 if (!flag_inline_atomics)
7996 return NULL_RTX;
7997
7998 /* Expand the operands. */
7999 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8000 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8001
8002 return expand_atomic_store (mem, val, model, false);
8003 }
8004
8005 /* Expand the __atomic_fetch_XXX intrinsic:
8006 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
8007 EXP is the CALL_EXPR.
8008 TARGET is an optional place for us to store the results.
8009 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
8010 FETCH_AFTER is true if returning the result of the operation.
8011 FETCH_AFTER is false if returning the value before the operation.
8012 IGNORE is true if the result is not used.
8013 EXT_CALL is the correct builtin for an external call if this cannot be
8014 resolved to an instruction sequence. */
8015
8016 static rtx
8017 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
8018 enum rtx_code code, bool fetch_after,
8019 bool ignore, enum built_in_function ext_call)
8020 {
8021 rtx val, mem, ret;
8022 enum memmodel model;
8023 tree fndecl;
8024 tree addr;
8025
8026 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8027
8028 /* Expand the operands. */
8029 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8030 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8031
8032 /* Only try generating instructions if inlining is turned on. */
8033 if (flag_inline_atomics)
8034 {
8035 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
8036 if (ret)
8037 return ret;
8038 }
8039
8040 /* Return if a different routine isn't needed for the library call. */
8041 if (ext_call == BUILT_IN_NONE)
8042 return NULL_RTX;
8043
8044 /* Change the call to the specified function. */
8045 fndecl = get_callee_fndecl (exp);
8046 addr = CALL_EXPR_FN (exp);
8047 STRIP_NOPS (addr);
8048
8049 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
8050 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
8051
8052 /* If we will emit code after the call, the call cannot be a tail call.
8053 If it is emitted as a tail call, a barrier is emitted after it, and
8054 then all trailing code is removed. */
8055 if (!ignore)
8056 CALL_EXPR_TAILCALL (exp) = 0;
8057
8058 /* Expand the call here so we can emit trailing code. */
8059 ret = expand_call (exp, target, ignore);
8060
8061 /* Replace the original function just in case it matters. */
8062 TREE_OPERAND (addr, 0) = fndecl;
8063
8064 /* Then issue the arithmetic correction to return the right result. */
8065 if (!ignore)
8066 {
8067 if (code == NOT)
8068 {
8069 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
8070 OPTAB_LIB_WIDEN);
8071 ret = expand_simple_unop (mode, NOT, ret, target, true);
8072 }
8073 else
8074 ret = expand_simple_binop (mode, code, ret, val, target, true,
8075 OPTAB_LIB_WIDEN);
8076 }
8077 return ret;
8078 }
8079
8080 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
8081
8082 void
8083 expand_ifn_atomic_bit_test_and (gcall *call)
8084 {
8085 tree ptr = gimple_call_arg (call, 0);
8086 tree bit = gimple_call_arg (call, 1);
8087 tree flag = gimple_call_arg (call, 2);
8088 tree lhs = gimple_call_lhs (call);
8089 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
8090 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
8091 enum rtx_code code;
8092 optab optab;
8093 class expand_operand ops[5];
8094
8095 gcc_assert (flag_inline_atomics);
8096
8097 if (gimple_call_num_args (call) == 4)
8098 model = get_memmodel (gimple_call_arg (call, 3));
8099
8100 rtx mem = get_builtin_sync_mem (ptr, mode);
8101 rtx val = expand_expr_force_mode (bit, mode);
8102
8103 switch (gimple_call_internal_fn (call))
8104 {
8105 case IFN_ATOMIC_BIT_TEST_AND_SET:
8106 code = IOR;
8107 optab = atomic_bit_test_and_set_optab;
8108 break;
8109 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
8110 code = XOR;
8111 optab = atomic_bit_test_and_complement_optab;
8112 break;
8113 case IFN_ATOMIC_BIT_TEST_AND_RESET:
8114 code = AND;
8115 optab = atomic_bit_test_and_reset_optab;
8116 break;
8117 default:
8118 gcc_unreachable ();
8119 }
8120
8121 if (lhs == NULL_TREE)
8122 {
8123 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8124 val, NULL_RTX, true, OPTAB_DIRECT);
8125 if (code == AND)
8126 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8127 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
8128 return;
8129 }
8130
8131 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8132 enum insn_code icode = direct_optab_handler (optab, mode);
8133 gcc_assert (icode != CODE_FOR_nothing);
8134 create_output_operand (&ops[0], target, mode);
8135 create_fixed_operand (&ops[1], mem);
8136 create_convert_operand_to (&ops[2], val, mode, true);
8137 create_integer_operand (&ops[3], model);
8138 create_integer_operand (&ops[4], integer_onep (flag));
8139 if (maybe_expand_insn (icode, 5, ops))
8140 return;
8141
8142 rtx bitval = val;
8143 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8144 val, NULL_RTX, true, OPTAB_DIRECT);
8145 rtx maskval = val;
8146 if (code == AND)
8147 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8148 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
8149 code, model, false);
8150 if (integer_onep (flag))
8151 {
8152 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
8153 NULL_RTX, true, OPTAB_DIRECT);
8154 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
8155 true, OPTAB_DIRECT);
8156 }
8157 else
8158 result = expand_simple_binop (mode, AND, result, maskval, target, true,
8159 OPTAB_DIRECT);
8160 if (result != target)
8161 emit_move_insn (target, result);
8162 }
8163
8164 /* Expand an atomic clear operation.
8165 void _atomic_clear (BOOL *obj, enum memmodel)
8166 EXP is the call expression. */
8167
8168 static rtx
8169 expand_builtin_atomic_clear (tree exp)
8170 {
8171 machine_mode mode;
8172 rtx mem, ret;
8173 enum memmodel model;
8174
8175 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8176 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8177 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8178
8179 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
8180 {
8181 location_t loc
8182 = expansion_point_location_if_in_system_header (input_location);
8183 warning_at (loc, OPT_Winvalid_memory_model,
8184 "invalid memory model for %<__atomic_store%>");
8185 model = MEMMODEL_SEQ_CST;
8186 }
8187
8188 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
8189 Failing that, a store is issued by __atomic_store. The only way this can
8190 fail is if the bool type is larger than a word size. Unlikely, but
8191 handle it anyway for completeness. Assume a single threaded model since
8192 there is no atomic support in this case, and no barriers are required. */
8193 ret = expand_atomic_store (mem, const0_rtx, model, true);
8194 if (!ret)
8195 emit_move_insn (mem, const0_rtx);
8196 return const0_rtx;
8197 }
8198
8199 /* Expand an atomic test_and_set operation.
8200 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
8201 EXP is the call expression. */
8202
8203 static rtx
8204 expand_builtin_atomic_test_and_set (tree exp, rtx target)
8205 {
8206 rtx mem;
8207 enum memmodel model;
8208 machine_mode mode;
8209
8210 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
8211 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8212 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8213
8214 return expand_atomic_test_and_set (target, mem, model);
8215 }
8216
8217
8218 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
8219 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
8220
8221 static tree
8222 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
8223 {
8224 int size;
8225 machine_mode mode;
8226 unsigned int mode_align, type_align;
8227
8228 if (TREE_CODE (arg0) != INTEGER_CST)
8229 return NULL_TREE;
8230
8231 /* We need a corresponding integer mode for the access to be lock-free. */
8232 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
8233 if (!int_mode_for_size (size, 0).exists (&mode))
8234 return boolean_false_node;
8235
8236 mode_align = GET_MODE_ALIGNMENT (mode);
8237
8238 if (TREE_CODE (arg1) == INTEGER_CST)
8239 {
8240 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
8241
8242 /* Either this argument is null, or it's a fake pointer encoding
8243 the alignment of the object. */
8244 val = least_bit_hwi (val);
8245 val *= BITS_PER_UNIT;
8246
8247 if (val == 0 || mode_align < val)
8248 type_align = mode_align;
8249 else
8250 type_align = val;
8251 }
8252 else
8253 {
8254 tree ttype = TREE_TYPE (arg1);
8255
8256 /* This function is usually invoked and folded immediately by the front
8257 end before anything else has a chance to look at it. The pointer
8258 parameter at this point is usually cast to a void *, so check for that
8259 and look past the cast. */
8260 if (CONVERT_EXPR_P (arg1)
8261 && POINTER_TYPE_P (ttype)
8262 && VOID_TYPE_P (TREE_TYPE (ttype))
8263 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
8264 arg1 = TREE_OPERAND (arg1, 0);
8265
8266 ttype = TREE_TYPE (arg1);
8267 gcc_assert (POINTER_TYPE_P (ttype));
8268
8269 /* Get the underlying type of the object. */
8270 ttype = TREE_TYPE (ttype);
8271 type_align = TYPE_ALIGN (ttype);
8272 }
8273
8274 /* If the object has smaller alignment, the lock free routines cannot
8275 be used. */
8276 if (type_align < mode_align)
8277 return boolean_false_node;
8278
8279 /* Check if a compare_and_swap pattern exists for the mode which represents
8280 the required size. The pattern is not allowed to fail, so the existence
8281 of the pattern indicates support is present. Also require that an
8282 atomic load exists for the required size. */
8283 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
8284 return boolean_true_node;
8285 else
8286 return boolean_false_node;
8287 }
8288
8289 /* Return true if the parameters to call EXP represent an object which will
8290 always generate lock free instructions. The first argument represents the
8291 size of the object, and the second parameter is a pointer to the object
8292 itself. If NULL is passed for the object, then the result is based on
8293 typical alignment for an object of the specified size. Otherwise return
8294 false. */
8295
8296 static rtx
8297 expand_builtin_atomic_always_lock_free (tree exp)
8298 {
8299 tree size;
8300 tree arg0 = CALL_EXPR_ARG (exp, 0);
8301 tree arg1 = CALL_EXPR_ARG (exp, 1);
8302
8303 if (TREE_CODE (arg0) != INTEGER_CST)
8304 {
8305 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
8306 return const0_rtx;
8307 }
8308
8309 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
8310 if (size == boolean_true_node)
8311 return const1_rtx;
8312 return const0_rtx;
8313 }
8314
8315 /* Return a one or zero if it can be determined that object ARG1 of size ARG
8316 is lock free on this architecture. */
8317
8318 static tree
8319 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
8320 {
8321 if (!flag_inline_atomics)
8322 return NULL_TREE;
8323
8324 /* If it isn't always lock free, don't generate a result. */
8325 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
8326 return boolean_true_node;
8327
8328 return NULL_TREE;
8329 }
8330
8331 /* Return true if the parameters to call EXP represent an object which will
8332 always generate lock free instructions. The first argument represents the
8333 size of the object, and the second parameter is a pointer to the object
8334 itself. If NULL is passed for the object, then the result is based on
8335 typical alignment for an object of the specified size. Otherwise return
8336 NULL*/
8337
8338 static rtx
8339 expand_builtin_atomic_is_lock_free (tree exp)
8340 {
8341 tree size;
8342 tree arg0 = CALL_EXPR_ARG (exp, 0);
8343 tree arg1 = CALL_EXPR_ARG (exp, 1);
8344
8345 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
8346 {
8347 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
8348 return NULL_RTX;
8349 }
8350
8351 if (!flag_inline_atomics)
8352 return NULL_RTX;
8353
8354 /* If the value is known at compile time, return the RTX for it. */
8355 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
8356 if (size == boolean_true_node)
8357 return const1_rtx;
8358
8359 return NULL_RTX;
8360 }
8361
8362 /* Expand the __atomic_thread_fence intrinsic:
8363 void __atomic_thread_fence (enum memmodel)
8364 EXP is the CALL_EXPR. */
8365
8366 static void
8367 expand_builtin_atomic_thread_fence (tree exp)
8368 {
8369 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
8370 expand_mem_thread_fence (model);
8371 }
8372
8373 /* Expand the __atomic_signal_fence intrinsic:
8374 void __atomic_signal_fence (enum memmodel)
8375 EXP is the CALL_EXPR. */
8376
8377 static void
8378 expand_builtin_atomic_signal_fence (tree exp)
8379 {
8380 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
8381 expand_mem_signal_fence (model);
8382 }
8383
8384 /* Expand the __sync_synchronize intrinsic. */
8385
8386 static void
8387 expand_builtin_sync_synchronize (void)
8388 {
8389 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
8390 }
8391
8392 static rtx
8393 expand_builtin_thread_pointer (tree exp, rtx target)
8394 {
8395 enum insn_code icode;
8396 if (!validate_arglist (exp, VOID_TYPE))
8397 return const0_rtx;
8398 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
8399 if (icode != CODE_FOR_nothing)
8400 {
8401 class expand_operand op;
8402 /* If the target is not sutitable then create a new target. */
8403 if (target == NULL_RTX
8404 || !REG_P (target)
8405 || GET_MODE (target) != Pmode)
8406 target = gen_reg_rtx (Pmode);
8407 create_output_operand (&op, target, Pmode);
8408 expand_insn (icode, 1, &op);
8409 return target;
8410 }
8411 error ("%<__builtin_thread_pointer%> is not supported on this target");
8412 return const0_rtx;
8413 }
8414
8415 static void
8416 expand_builtin_set_thread_pointer (tree exp)
8417 {
8418 enum insn_code icode;
8419 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8420 return;
8421 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
8422 if (icode != CODE_FOR_nothing)
8423 {
8424 class expand_operand op;
8425 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
8426 Pmode, EXPAND_NORMAL);
8427 create_input_operand (&op, val, Pmode);
8428 expand_insn (icode, 1, &op);
8429 return;
8430 }
8431 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
8432 }
8433
8434 \f
8435 /* Emit code to restore the current value of stack. */
8436
8437 static void
8438 expand_stack_restore (tree var)
8439 {
8440 rtx_insn *prev;
8441 rtx sa = expand_normal (var);
8442
8443 sa = convert_memory_address (Pmode, sa);
8444
8445 prev = get_last_insn ();
8446 emit_stack_restore (SAVE_BLOCK, sa);
8447
8448 record_new_stack_level ();
8449
8450 fixup_args_size_notes (prev, get_last_insn (), 0);
8451 }
8452
8453 /* Emit code to save the current value of stack. */
8454
8455 static rtx
8456 expand_stack_save (void)
8457 {
8458 rtx ret = NULL_RTX;
8459
8460 emit_stack_save (SAVE_BLOCK, &ret);
8461 return ret;
8462 }
8463
8464 /* Emit code to get the openacc gang, worker or vector id or size. */
8465
8466 static rtx
8467 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
8468 {
8469 const char *name;
8470 rtx fallback_retval;
8471 rtx_insn *(*gen_fn) (rtx, rtx);
8472 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
8473 {
8474 case BUILT_IN_GOACC_PARLEVEL_ID:
8475 name = "__builtin_goacc_parlevel_id";
8476 fallback_retval = const0_rtx;
8477 gen_fn = targetm.gen_oacc_dim_pos;
8478 break;
8479 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8480 name = "__builtin_goacc_parlevel_size";
8481 fallback_retval = const1_rtx;
8482 gen_fn = targetm.gen_oacc_dim_size;
8483 break;
8484 default:
8485 gcc_unreachable ();
8486 }
8487
8488 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
8489 {
8490 error ("%qs only supported in OpenACC code", name);
8491 return const0_rtx;
8492 }
8493
8494 tree arg = CALL_EXPR_ARG (exp, 0);
8495 if (TREE_CODE (arg) != INTEGER_CST)
8496 {
8497 error ("non-constant argument 0 to %qs", name);
8498 return const0_rtx;
8499 }
8500
8501 int dim = TREE_INT_CST_LOW (arg);
8502 switch (dim)
8503 {
8504 case GOMP_DIM_GANG:
8505 case GOMP_DIM_WORKER:
8506 case GOMP_DIM_VECTOR:
8507 break;
8508 default:
8509 error ("illegal argument 0 to %qs", name);
8510 return const0_rtx;
8511 }
8512
8513 if (ignore)
8514 return target;
8515
8516 if (target == NULL_RTX)
8517 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8518
8519 if (!targetm.have_oacc_dim_size ())
8520 {
8521 emit_move_insn (target, fallback_retval);
8522 return target;
8523 }
8524
8525 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
8526 emit_insn (gen_fn (reg, GEN_INT (dim)));
8527 if (reg != target)
8528 emit_move_insn (target, reg);
8529
8530 return target;
8531 }
8532
8533 /* Expand a string compare operation using a sequence of char comparison
8534 to get rid of the calling overhead, with result going to TARGET if
8535 that's convenient.
8536
8537 VAR_STR is the variable string source;
8538 CONST_STR is the constant string source;
8539 LENGTH is the number of chars to compare;
8540 CONST_STR_N indicates which source string is the constant string;
8541 IS_MEMCMP indicates whether it's a memcmp or strcmp.
8542
8543 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
8544
8545 target = (int) (unsigned char) var_str[0]
8546 - (int) (unsigned char) const_str[0];
8547 if (target != 0)
8548 goto ne_label;
8549 ...
8550 target = (int) (unsigned char) var_str[length - 2]
8551 - (int) (unsigned char) const_str[length - 2];
8552 if (target != 0)
8553 goto ne_label;
8554 target = (int) (unsigned char) var_str[length - 1]
8555 - (int) (unsigned char) const_str[length - 1];
8556 ne_label:
8557 */
8558
8559 static rtx
8560 inline_string_cmp (rtx target, tree var_str, const char *const_str,
8561 unsigned HOST_WIDE_INT length,
8562 int const_str_n, machine_mode mode)
8563 {
8564 HOST_WIDE_INT offset = 0;
8565 rtx var_rtx_array
8566 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
8567 rtx var_rtx = NULL_RTX;
8568 rtx const_rtx = NULL_RTX;
8569 rtx result = target ? target : gen_reg_rtx (mode);
8570 rtx_code_label *ne_label = gen_label_rtx ();
8571 tree unit_type_node = unsigned_char_type_node;
8572 scalar_int_mode unit_mode
8573 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
8574
8575 start_sequence ();
8576
8577 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
8578 {
8579 var_rtx
8580 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
8581 const_rtx = c_readstr (const_str + offset, unit_mode);
8582 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
8583 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
8584
8585 op0 = convert_modes (mode, unit_mode, op0, 1);
8586 op1 = convert_modes (mode, unit_mode, op1, 1);
8587 result = expand_simple_binop (mode, MINUS, op0, op1,
8588 result, 1, OPTAB_WIDEN);
8589 if (i < length - 1)
8590 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
8591 mode, true, ne_label);
8592 offset += GET_MODE_SIZE (unit_mode);
8593 }
8594
8595 emit_label (ne_label);
8596 rtx_insn *insns = get_insns ();
8597 end_sequence ();
8598 emit_insn (insns);
8599
8600 return result;
8601 }
8602
8603 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
8604 to TARGET if that's convenient.
8605 If the call is not been inlined, return NULL_RTX. */
8606
8607 static rtx
8608 inline_expand_builtin_bytecmp (tree exp, rtx target)
8609 {
8610 tree fndecl = get_callee_fndecl (exp);
8611 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8612 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
8613
8614 /* Do NOT apply this inlining expansion when optimizing for size or
8615 optimization level below 2. */
8616 if (optimize < 2 || optimize_insn_for_size_p ())
8617 return NULL_RTX;
8618
8619 gcc_checking_assert (fcode == BUILT_IN_STRCMP
8620 || fcode == BUILT_IN_STRNCMP
8621 || fcode == BUILT_IN_MEMCMP);
8622
8623 /* On a target where the type of the call (int) has same or narrower presicion
8624 than unsigned char, give up the inlining expansion. */
8625 if (TYPE_PRECISION (unsigned_char_type_node)
8626 >= TYPE_PRECISION (TREE_TYPE (exp)))
8627 return NULL_RTX;
8628
8629 tree arg1 = CALL_EXPR_ARG (exp, 0);
8630 tree arg2 = CALL_EXPR_ARG (exp, 1);
8631 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
8632
8633 unsigned HOST_WIDE_INT len1 = 0;
8634 unsigned HOST_WIDE_INT len2 = 0;
8635 unsigned HOST_WIDE_INT len3 = 0;
8636
8637 /* Get the object representation of the initializers of ARG1 and ARG2
8638 as strings, provided they refer to constant objects, with their byte
8639 sizes in LEN1 and LEN2, respectively. */
8640 const char *bytes1 = getbyterep (arg1, &len1);
8641 const char *bytes2 = getbyterep (arg2, &len2);
8642
8643 /* Fail if neither argument refers to an initialized constant. */
8644 if (!bytes1 && !bytes2)
8645 return NULL_RTX;
8646
8647 if (is_ncmp)
8648 {
8649 /* Fail if the memcmp/strncmp bound is not a constant. */
8650 if (!tree_fits_uhwi_p (len3_tree))
8651 return NULL_RTX;
8652
8653 len3 = tree_to_uhwi (len3_tree);
8654
8655 if (fcode == BUILT_IN_MEMCMP)
8656 {
8657 /* Fail if the memcmp bound is greater than the size of either
8658 of the two constant objects. */
8659 if ((bytes1 && len1 < len3)
8660 || (bytes2 && len2 < len3))
8661 return NULL_RTX;
8662 }
8663 }
8664
8665 if (fcode != BUILT_IN_MEMCMP)
8666 {
8667 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
8668 and LEN2 to the length of the nul-terminated string stored
8669 in each. */
8670 if (bytes1 != NULL)
8671 len1 = strnlen (bytes1, len1) + 1;
8672 if (bytes2 != NULL)
8673 len2 = strnlen (bytes2, len2) + 1;
8674 }
8675
8676 /* See inline_string_cmp. */
8677 int const_str_n;
8678 if (!len1)
8679 const_str_n = 2;
8680 else if (!len2)
8681 const_str_n = 1;
8682 else if (len2 > len1)
8683 const_str_n = 1;
8684 else
8685 const_str_n = 2;
8686
8687 /* For strncmp only, compute the new bound as the smallest of
8688 the lengths of the two strings (plus 1) and the bound provided
8689 to the function. */
8690 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
8691 if (is_ncmp && len3 < bound)
8692 bound = len3;
8693
8694 /* If the bound of the comparison is larger than the threshold,
8695 do nothing. */
8696 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
8697 return NULL_RTX;
8698
8699 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8700
8701 /* Now, start inline expansion the call. */
8702 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
8703 (const_str_n == 1) ? bytes1 : bytes2, bound,
8704 const_str_n, mode);
8705 }
8706
8707 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
8708 represents the size of the first argument to that call, or VOIDmode
8709 if the argument is a pointer. IGNORE will be true if the result
8710 isn't used. */
8711 static rtx
8712 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
8713 bool ignore)
8714 {
8715 rtx val, failsafe;
8716 unsigned nargs = call_expr_nargs (exp);
8717
8718 tree arg0 = CALL_EXPR_ARG (exp, 0);
8719
8720 if (mode == VOIDmode)
8721 {
8722 mode = TYPE_MODE (TREE_TYPE (arg0));
8723 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
8724 }
8725
8726 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
8727
8728 /* An optional second argument can be used as a failsafe value on
8729 some machines. If it isn't present, then the failsafe value is
8730 assumed to be 0. */
8731 if (nargs > 1)
8732 {
8733 tree arg1 = CALL_EXPR_ARG (exp, 1);
8734 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
8735 }
8736 else
8737 failsafe = const0_rtx;
8738
8739 /* If the result isn't used, the behavior is undefined. It would be
8740 nice to emit a warning here, but path splitting means this might
8741 happen with legitimate code. So simply drop the builtin
8742 expansion in that case; we've handled any side-effects above. */
8743 if (ignore)
8744 return const0_rtx;
8745
8746 /* If we don't have a suitable target, create one to hold the result. */
8747 if (target == NULL || GET_MODE (target) != mode)
8748 target = gen_reg_rtx (mode);
8749
8750 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
8751 val = convert_modes (mode, VOIDmode, val, false);
8752
8753 return targetm.speculation_safe_value (mode, target, val, failsafe);
8754 }
8755
8756 /* Expand an expression EXP that calls a built-in function,
8757 with result going to TARGET if that's convenient
8758 (and in mode MODE if that's convenient).
8759 SUBTARGET may be used as the target for computing one of EXP's operands.
8760 IGNORE is nonzero if the value is to be ignored. */
8761
8762 rtx
8763 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
8764 int ignore)
8765 {
8766 tree fndecl = get_callee_fndecl (exp);
8767 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
8768 int flags;
8769
8770 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8771 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
8772
8773 /* When ASan is enabled, we don't want to expand some memory/string
8774 builtins and rely on libsanitizer's hooks. This allows us to avoid
8775 redundant checks and be sure, that possible overflow will be detected
8776 by ASan. */
8777
8778 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8779 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
8780 return expand_call (exp, target, ignore);
8781
8782 /* When not optimizing, generate calls to library functions for a certain
8783 set of builtins. */
8784 if (!optimize
8785 && !called_as_built_in (fndecl)
8786 && fcode != BUILT_IN_FORK
8787 && fcode != BUILT_IN_EXECL
8788 && fcode != BUILT_IN_EXECV
8789 && fcode != BUILT_IN_EXECLP
8790 && fcode != BUILT_IN_EXECLE
8791 && fcode != BUILT_IN_EXECVP
8792 && fcode != BUILT_IN_EXECVE
8793 && !ALLOCA_FUNCTION_CODE_P (fcode)
8794 && fcode != BUILT_IN_FREE)
8795 return expand_call (exp, target, ignore);
8796
8797 /* The built-in function expanders test for target == const0_rtx
8798 to determine whether the function's result will be ignored. */
8799 if (ignore)
8800 target = const0_rtx;
8801
8802 /* If the result of a pure or const built-in function is ignored, and
8803 none of its arguments are volatile, we can avoid expanding the
8804 built-in call and just evaluate the arguments for side-effects. */
8805 if (target == const0_rtx
8806 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
8807 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8808 {
8809 bool volatilep = false;
8810 tree arg;
8811 call_expr_arg_iterator iter;
8812
8813 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8814 if (TREE_THIS_VOLATILE (arg))
8815 {
8816 volatilep = true;
8817 break;
8818 }
8819
8820 if (! volatilep)
8821 {
8822 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8823 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8824 return const0_rtx;
8825 }
8826 }
8827
8828 switch (fcode)
8829 {
8830 CASE_FLT_FN (BUILT_IN_FABS):
8831 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8832 case BUILT_IN_FABSD32:
8833 case BUILT_IN_FABSD64:
8834 case BUILT_IN_FABSD128:
8835 target = expand_builtin_fabs (exp, target, subtarget);
8836 if (target)
8837 return target;
8838 break;
8839
8840 CASE_FLT_FN (BUILT_IN_COPYSIGN):
8841 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
8842 target = expand_builtin_copysign (exp, target, subtarget);
8843 if (target)
8844 return target;
8845 break;
8846
8847 /* Just do a normal library call if we were unable to fold
8848 the values. */
8849 CASE_FLT_FN (BUILT_IN_CABS):
8850 break;
8851
8852 CASE_FLT_FN (BUILT_IN_FMA):
8853 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
8854 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
8855 if (target)
8856 return target;
8857 break;
8858
8859 CASE_FLT_FN (BUILT_IN_ILOGB):
8860 if (! flag_unsafe_math_optimizations)
8861 break;
8862 gcc_fallthrough ();
8863 CASE_FLT_FN (BUILT_IN_ISINF):
8864 CASE_FLT_FN (BUILT_IN_FINITE):
8865 case BUILT_IN_ISFINITE:
8866 case BUILT_IN_ISNORMAL:
8867 target = expand_builtin_interclass_mathfn (exp, target);
8868 if (target)
8869 return target;
8870 break;
8871
8872 CASE_FLT_FN (BUILT_IN_ICEIL):
8873 CASE_FLT_FN (BUILT_IN_LCEIL):
8874 CASE_FLT_FN (BUILT_IN_LLCEIL):
8875 CASE_FLT_FN (BUILT_IN_LFLOOR):
8876 CASE_FLT_FN (BUILT_IN_IFLOOR):
8877 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8878 target = expand_builtin_int_roundingfn (exp, target);
8879 if (target)
8880 return target;
8881 break;
8882
8883 CASE_FLT_FN (BUILT_IN_IRINT):
8884 CASE_FLT_FN (BUILT_IN_LRINT):
8885 CASE_FLT_FN (BUILT_IN_LLRINT):
8886 CASE_FLT_FN (BUILT_IN_IROUND):
8887 CASE_FLT_FN (BUILT_IN_LROUND):
8888 CASE_FLT_FN (BUILT_IN_LLROUND):
8889 target = expand_builtin_int_roundingfn_2 (exp, target);
8890 if (target)
8891 return target;
8892 break;
8893
8894 CASE_FLT_FN (BUILT_IN_POWI):
8895 target = expand_builtin_powi (exp, target);
8896 if (target)
8897 return target;
8898 break;
8899
8900 CASE_FLT_FN (BUILT_IN_CEXPI):
8901 target = expand_builtin_cexpi (exp, target);
8902 gcc_assert (target);
8903 return target;
8904
8905 CASE_FLT_FN (BUILT_IN_SIN):
8906 CASE_FLT_FN (BUILT_IN_COS):
8907 if (! flag_unsafe_math_optimizations)
8908 break;
8909 target = expand_builtin_mathfn_3 (exp, target, subtarget);
8910 if (target)
8911 return target;
8912 break;
8913
8914 CASE_FLT_FN (BUILT_IN_SINCOS):
8915 if (! flag_unsafe_math_optimizations)
8916 break;
8917 target = expand_builtin_sincos (exp);
8918 if (target)
8919 return target;
8920 break;
8921
8922 case BUILT_IN_APPLY_ARGS:
8923 return expand_builtin_apply_args ();
8924
8925 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8926 FUNCTION with a copy of the parameters described by
8927 ARGUMENTS, and ARGSIZE. It returns a block of memory
8928 allocated on the stack into which is stored all the registers
8929 that might possibly be used for returning the result of a
8930 function. ARGUMENTS is the value returned by
8931 __builtin_apply_args. ARGSIZE is the number of bytes of
8932 arguments that must be copied. ??? How should this value be
8933 computed? We'll also need a safe worst case value for varargs
8934 functions. */
8935 case BUILT_IN_APPLY:
8936 if (!validate_arglist (exp, POINTER_TYPE,
8937 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8938 && !validate_arglist (exp, REFERENCE_TYPE,
8939 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8940 return const0_rtx;
8941 else
8942 {
8943 rtx ops[3];
8944
8945 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8946 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8947 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8948
8949 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8950 }
8951
8952 /* __builtin_return (RESULT) causes the function to return the
8953 value described by RESULT. RESULT is address of the block of
8954 memory returned by __builtin_apply. */
8955 case BUILT_IN_RETURN:
8956 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8957 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8958 return const0_rtx;
8959
8960 case BUILT_IN_SAVEREGS:
8961 return expand_builtin_saveregs ();
8962
8963 case BUILT_IN_VA_ARG_PACK:
8964 /* All valid uses of __builtin_va_arg_pack () are removed during
8965 inlining. */
8966 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8967 return const0_rtx;
8968
8969 case BUILT_IN_VA_ARG_PACK_LEN:
8970 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8971 inlining. */
8972 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
8973 return const0_rtx;
8974
8975 /* Return the address of the first anonymous stack arg. */
8976 case BUILT_IN_NEXT_ARG:
8977 if (fold_builtin_next_arg (exp, false))
8978 return const0_rtx;
8979 return expand_builtin_next_arg ();
8980
8981 case BUILT_IN_CLEAR_CACHE:
8982 target = expand_builtin___clear_cache (exp);
8983 if (target)
8984 return target;
8985 break;
8986
8987 case BUILT_IN_CLASSIFY_TYPE:
8988 return expand_builtin_classify_type (exp);
8989
8990 case BUILT_IN_CONSTANT_P:
8991 return const0_rtx;
8992
8993 case BUILT_IN_FRAME_ADDRESS:
8994 case BUILT_IN_RETURN_ADDRESS:
8995 return expand_builtin_frame_address (fndecl, exp);
8996
8997 /* Returns the address of the area where the structure is returned.
8998 0 otherwise. */
8999 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9000 if (call_expr_nargs (exp) != 0
9001 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9002 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9003 return const0_rtx;
9004 else
9005 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9006
9007 CASE_BUILT_IN_ALLOCA:
9008 target = expand_builtin_alloca (exp);
9009 if (target)
9010 return target;
9011 break;
9012
9013 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
9014 return expand_asan_emit_allocas_unpoison (exp);
9015
9016 case BUILT_IN_STACK_SAVE:
9017 return expand_stack_save ();
9018
9019 case BUILT_IN_STACK_RESTORE:
9020 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
9021 return const0_rtx;
9022
9023 case BUILT_IN_BSWAP16:
9024 case BUILT_IN_BSWAP32:
9025 case BUILT_IN_BSWAP64:
9026 case BUILT_IN_BSWAP128:
9027 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
9028 if (target)
9029 return target;
9030 break;
9031
9032 CASE_INT_FN (BUILT_IN_FFS):
9033 target = expand_builtin_unop (target_mode, exp, target,
9034 subtarget, ffs_optab);
9035 if (target)
9036 return target;
9037 break;
9038
9039 CASE_INT_FN (BUILT_IN_CLZ):
9040 target = expand_builtin_unop (target_mode, exp, target,
9041 subtarget, clz_optab);
9042 if (target)
9043 return target;
9044 break;
9045
9046 CASE_INT_FN (BUILT_IN_CTZ):
9047 target = expand_builtin_unop (target_mode, exp, target,
9048 subtarget, ctz_optab);
9049 if (target)
9050 return target;
9051 break;
9052
9053 CASE_INT_FN (BUILT_IN_CLRSB):
9054 target = expand_builtin_unop (target_mode, exp, target,
9055 subtarget, clrsb_optab);
9056 if (target)
9057 return target;
9058 break;
9059
9060 CASE_INT_FN (BUILT_IN_POPCOUNT):
9061 target = expand_builtin_unop (target_mode, exp, target,
9062 subtarget, popcount_optab);
9063 if (target)
9064 return target;
9065 break;
9066
9067 CASE_INT_FN (BUILT_IN_PARITY):
9068 target = expand_builtin_unop (target_mode, exp, target,
9069 subtarget, parity_optab);
9070 if (target)
9071 return target;
9072 break;
9073
9074 case BUILT_IN_STRLEN:
9075 target = expand_builtin_strlen (exp, target, target_mode);
9076 if (target)
9077 return target;
9078 break;
9079
9080 case BUILT_IN_STRNLEN:
9081 target = expand_builtin_strnlen (exp, target, target_mode);
9082 if (target)
9083 return target;
9084 break;
9085
9086 case BUILT_IN_STRCAT:
9087 target = expand_builtin_strcat (exp);
9088 if (target)
9089 return target;
9090 break;
9091
9092 case BUILT_IN_GETTEXT:
9093 case BUILT_IN_PUTS:
9094 case BUILT_IN_PUTS_UNLOCKED:
9095 case BUILT_IN_STRDUP:
9096 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9097 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9098 break;
9099
9100 case BUILT_IN_INDEX:
9101 case BUILT_IN_RINDEX:
9102 case BUILT_IN_STRCHR:
9103 case BUILT_IN_STRRCHR:
9104 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9105 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9106 break;
9107
9108 case BUILT_IN_FPUTS:
9109 case BUILT_IN_FPUTS_UNLOCKED:
9110 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9111 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9112 break;
9113
9114 case BUILT_IN_STRNDUP:
9115 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9116 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
9117 break;
9118
9119 case BUILT_IN_STRCASECMP:
9120 case BUILT_IN_STRPBRK:
9121 case BUILT_IN_STRSPN:
9122 case BUILT_IN_STRCSPN:
9123 case BUILT_IN_STRSTR:
9124 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9125 {
9126 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9127 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
9128 }
9129 break;
9130
9131 case BUILT_IN_STRCPY:
9132 target = expand_builtin_strcpy (exp, target);
9133 if (target)
9134 return target;
9135 break;
9136
9137 case BUILT_IN_STRNCAT:
9138 target = expand_builtin_strncat (exp, target);
9139 if (target)
9140 return target;
9141 break;
9142
9143 case BUILT_IN_STRNCPY:
9144 target = expand_builtin_strncpy (exp, target);
9145 if (target)
9146 return target;
9147 break;
9148
9149 case BUILT_IN_STPCPY:
9150 target = expand_builtin_stpcpy (exp, target, mode);
9151 if (target)
9152 return target;
9153 break;
9154
9155 case BUILT_IN_STPNCPY:
9156 target = expand_builtin_stpncpy (exp, target);
9157 if (target)
9158 return target;
9159 break;
9160
9161 case BUILT_IN_MEMCHR:
9162 target = expand_builtin_memchr (exp, target);
9163 if (target)
9164 return target;
9165 break;
9166
9167 case BUILT_IN_MEMCPY:
9168 target = expand_builtin_memcpy (exp, target);
9169 if (target)
9170 return target;
9171 break;
9172
9173 case BUILT_IN_MEMMOVE:
9174 target = expand_builtin_memmove (exp, target);
9175 if (target)
9176 return target;
9177 break;
9178
9179 case BUILT_IN_MEMPCPY:
9180 target = expand_builtin_mempcpy (exp, target);
9181 if (target)
9182 return target;
9183 break;
9184
9185 case BUILT_IN_MEMSET:
9186 target = expand_builtin_memset (exp, target, mode);
9187 if (target)
9188 return target;
9189 break;
9190
9191 case BUILT_IN_BZERO:
9192 target = expand_builtin_bzero (exp);
9193 if (target)
9194 return target;
9195 break;
9196
9197 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9198 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
9199 when changing it to a strcmp call. */
9200 case BUILT_IN_STRCMP_EQ:
9201 target = expand_builtin_memcmp (exp, target, true);
9202 if (target)
9203 return target;
9204
9205 /* Change this call back to a BUILT_IN_STRCMP. */
9206 TREE_OPERAND (exp, 1)
9207 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
9208
9209 /* Delete the last parameter. */
9210 unsigned int i;
9211 vec<tree, va_gc> *arg_vec;
9212 vec_alloc (arg_vec, 2);
9213 for (i = 0; i < 2; i++)
9214 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
9215 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
9216 /* FALLTHROUGH */
9217
9218 case BUILT_IN_STRCMP:
9219 target = expand_builtin_strcmp (exp, target);
9220 if (target)
9221 return target;
9222 break;
9223
9224 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9225 back to a BUILT_IN_STRNCMP. */
9226 case BUILT_IN_STRNCMP_EQ:
9227 target = expand_builtin_memcmp (exp, target, true);
9228 if (target)
9229 return target;
9230
9231 /* Change it back to a BUILT_IN_STRNCMP. */
9232 TREE_OPERAND (exp, 1)
9233 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
9234 /* FALLTHROUGH */
9235
9236 case BUILT_IN_STRNCMP:
9237 target = expand_builtin_strncmp (exp, target, mode);
9238 if (target)
9239 return target;
9240 break;
9241
9242 case BUILT_IN_BCMP:
9243 case BUILT_IN_MEMCMP:
9244 case BUILT_IN_MEMCMP_EQ:
9245 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
9246 if (target)
9247 return target;
9248 if (fcode == BUILT_IN_MEMCMP_EQ)
9249 {
9250 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
9251 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
9252 }
9253 break;
9254
9255 case BUILT_IN_SETJMP:
9256 /* This should have been lowered to the builtins below. */
9257 gcc_unreachable ();
9258
9259 case BUILT_IN_SETJMP_SETUP:
9260 /* __builtin_setjmp_setup is passed a pointer to an array of five words
9261 and the receiver label. */
9262 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9263 {
9264 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
9265 VOIDmode, EXPAND_NORMAL);
9266 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9267 rtx_insn *label_r = label_rtx (label);
9268
9269 /* This is copied from the handling of non-local gotos. */
9270 expand_builtin_setjmp_setup (buf_addr, label_r);
9271 nonlocal_goto_handler_labels
9272 = gen_rtx_INSN_LIST (VOIDmode, label_r,
9273 nonlocal_goto_handler_labels);
9274 /* ??? Do not let expand_label treat us as such since we would
9275 not want to be both on the list of non-local labels and on
9276 the list of forced labels. */
9277 FORCED_LABEL (label) = 0;
9278 return const0_rtx;
9279 }
9280 break;
9281
9282 case BUILT_IN_SETJMP_RECEIVER:
9283 /* __builtin_setjmp_receiver is passed the receiver label. */
9284 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9285 {
9286 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9287 rtx_insn *label_r = label_rtx (label);
9288
9289 expand_builtin_setjmp_receiver (label_r);
9290 return const0_rtx;
9291 }
9292 break;
9293
9294 /* __builtin_longjmp is passed a pointer to an array of five words.
9295 It's similar to the C library longjmp function but works with
9296 __builtin_setjmp above. */
9297 case BUILT_IN_LONGJMP:
9298 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9299 {
9300 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
9301 VOIDmode, EXPAND_NORMAL);
9302 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
9303
9304 if (value != const1_rtx)
9305 {
9306 error ("%<__builtin_longjmp%> second argument must be 1");
9307 return const0_rtx;
9308 }
9309
9310 expand_builtin_longjmp (buf_addr, value);
9311 return const0_rtx;
9312 }
9313 break;
9314
9315 case BUILT_IN_NONLOCAL_GOTO:
9316 target = expand_builtin_nonlocal_goto (exp);
9317 if (target)
9318 return target;
9319 break;
9320
9321 /* This updates the setjmp buffer that is its argument with the value
9322 of the current stack pointer. */
9323 case BUILT_IN_UPDATE_SETJMP_BUF:
9324 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9325 {
9326 rtx buf_addr
9327 = expand_normal (CALL_EXPR_ARG (exp, 0));
9328
9329 expand_builtin_update_setjmp_buf (buf_addr);
9330 return const0_rtx;
9331 }
9332 break;
9333
9334 case BUILT_IN_TRAP:
9335 expand_builtin_trap ();
9336 return const0_rtx;
9337
9338 case BUILT_IN_UNREACHABLE:
9339 expand_builtin_unreachable ();
9340 return const0_rtx;
9341
9342 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9343 case BUILT_IN_SIGNBITD32:
9344 case BUILT_IN_SIGNBITD64:
9345 case BUILT_IN_SIGNBITD128:
9346 target = expand_builtin_signbit (exp, target);
9347 if (target)
9348 return target;
9349 break;
9350
9351 /* Various hooks for the DWARF 2 __throw routine. */
9352 case BUILT_IN_UNWIND_INIT:
9353 expand_builtin_unwind_init ();
9354 return const0_rtx;
9355 case BUILT_IN_DWARF_CFA:
9356 return virtual_cfa_rtx;
9357 #ifdef DWARF2_UNWIND_INFO
9358 case BUILT_IN_DWARF_SP_COLUMN:
9359 return expand_builtin_dwarf_sp_column ();
9360 case BUILT_IN_INIT_DWARF_REG_SIZES:
9361 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
9362 return const0_rtx;
9363 #endif
9364 case BUILT_IN_FROB_RETURN_ADDR:
9365 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
9366 case BUILT_IN_EXTRACT_RETURN_ADDR:
9367 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
9368 case BUILT_IN_EH_RETURN:
9369 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
9370 CALL_EXPR_ARG (exp, 1));
9371 return const0_rtx;
9372 case BUILT_IN_EH_RETURN_DATA_REGNO:
9373 return expand_builtin_eh_return_data_regno (exp);
9374 case BUILT_IN_EXTEND_POINTER:
9375 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
9376 case BUILT_IN_EH_POINTER:
9377 return expand_builtin_eh_pointer (exp);
9378 case BUILT_IN_EH_FILTER:
9379 return expand_builtin_eh_filter (exp);
9380 case BUILT_IN_EH_COPY_VALUES:
9381 return expand_builtin_eh_copy_values (exp);
9382
9383 case BUILT_IN_VA_START:
9384 return expand_builtin_va_start (exp);
9385 case BUILT_IN_VA_END:
9386 return expand_builtin_va_end (exp);
9387 case BUILT_IN_VA_COPY:
9388 return expand_builtin_va_copy (exp);
9389 case BUILT_IN_EXPECT:
9390 return expand_builtin_expect (exp, target);
9391 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9392 return expand_builtin_expect_with_probability (exp, target);
9393 case BUILT_IN_ASSUME_ALIGNED:
9394 return expand_builtin_assume_aligned (exp, target);
9395 case BUILT_IN_PREFETCH:
9396 expand_builtin_prefetch (exp);
9397 return const0_rtx;
9398
9399 case BUILT_IN_INIT_TRAMPOLINE:
9400 return expand_builtin_init_trampoline (exp, true);
9401 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
9402 return expand_builtin_init_trampoline (exp, false);
9403 case BUILT_IN_ADJUST_TRAMPOLINE:
9404 return expand_builtin_adjust_trampoline (exp);
9405
9406 case BUILT_IN_INIT_DESCRIPTOR:
9407 return expand_builtin_init_descriptor (exp);
9408 case BUILT_IN_ADJUST_DESCRIPTOR:
9409 return expand_builtin_adjust_descriptor (exp);
9410
9411 case BUILT_IN_FORK:
9412 case BUILT_IN_EXECL:
9413 case BUILT_IN_EXECV:
9414 case BUILT_IN_EXECLP:
9415 case BUILT_IN_EXECLE:
9416 case BUILT_IN_EXECVP:
9417 case BUILT_IN_EXECVE:
9418 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
9419 if (target)
9420 return target;
9421 break;
9422
9423 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
9424 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
9425 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
9426 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
9427 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
9428 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
9429 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
9430 if (target)
9431 return target;
9432 break;
9433
9434 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
9435 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
9436 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
9437 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
9438 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
9439 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
9440 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
9441 if (target)
9442 return target;
9443 break;
9444
9445 case BUILT_IN_SYNC_FETCH_AND_OR_1:
9446 case BUILT_IN_SYNC_FETCH_AND_OR_2:
9447 case BUILT_IN_SYNC_FETCH_AND_OR_4:
9448 case BUILT_IN_SYNC_FETCH_AND_OR_8:
9449 case BUILT_IN_SYNC_FETCH_AND_OR_16:
9450 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
9451 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
9452 if (target)
9453 return target;
9454 break;
9455
9456 case BUILT_IN_SYNC_FETCH_AND_AND_1:
9457 case BUILT_IN_SYNC_FETCH_AND_AND_2:
9458 case BUILT_IN_SYNC_FETCH_AND_AND_4:
9459 case BUILT_IN_SYNC_FETCH_AND_AND_8:
9460 case BUILT_IN_SYNC_FETCH_AND_AND_16:
9461 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
9462 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
9463 if (target)
9464 return target;
9465 break;
9466
9467 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
9468 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
9469 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
9470 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
9471 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
9472 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
9473 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
9474 if (target)
9475 return target;
9476 break;
9477
9478 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
9479 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
9480 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
9481 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
9482 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
9483 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
9484 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
9485 if (target)
9486 return target;
9487 break;
9488
9489 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
9490 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
9491 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
9492 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
9493 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
9494 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
9495 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
9496 if (target)
9497 return target;
9498 break;
9499
9500 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
9501 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
9502 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
9503 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
9504 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
9505 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
9506 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
9507 if (target)
9508 return target;
9509 break;
9510
9511 case BUILT_IN_SYNC_OR_AND_FETCH_1:
9512 case BUILT_IN_SYNC_OR_AND_FETCH_2:
9513 case BUILT_IN_SYNC_OR_AND_FETCH_4:
9514 case BUILT_IN_SYNC_OR_AND_FETCH_8:
9515 case BUILT_IN_SYNC_OR_AND_FETCH_16:
9516 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
9517 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
9518 if (target)
9519 return target;
9520 break;
9521
9522 case BUILT_IN_SYNC_AND_AND_FETCH_1:
9523 case BUILT_IN_SYNC_AND_AND_FETCH_2:
9524 case BUILT_IN_SYNC_AND_AND_FETCH_4:
9525 case BUILT_IN_SYNC_AND_AND_FETCH_8:
9526 case BUILT_IN_SYNC_AND_AND_FETCH_16:
9527 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
9528 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
9529 if (target)
9530 return target;
9531 break;
9532
9533 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
9534 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
9535 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
9536 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
9537 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
9538 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
9539 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
9540 if (target)
9541 return target;
9542 break;
9543
9544 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
9545 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
9546 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
9547 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
9548 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
9549 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
9550 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
9551 if (target)
9552 return target;
9553 break;
9554
9555 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
9556 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
9557 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
9558 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
9559 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
9560 if (mode == VOIDmode)
9561 mode = TYPE_MODE (boolean_type_node);
9562 if (!target || !register_operand (target, mode))
9563 target = gen_reg_rtx (mode);
9564
9565 mode = get_builtin_sync_mode
9566 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
9567 target = expand_builtin_compare_and_swap (mode, exp, true, target);
9568 if (target)
9569 return target;
9570 break;
9571
9572 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
9573 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
9574 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
9575 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
9576 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
9577 mode = get_builtin_sync_mode
9578 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
9579 target = expand_builtin_compare_and_swap (mode, exp, false, target);
9580 if (target)
9581 return target;
9582 break;
9583
9584 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
9585 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
9586 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
9587 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
9588 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
9589 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
9590 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
9591 if (target)
9592 return target;
9593 break;
9594
9595 case BUILT_IN_SYNC_LOCK_RELEASE_1:
9596 case BUILT_IN_SYNC_LOCK_RELEASE_2:
9597 case BUILT_IN_SYNC_LOCK_RELEASE_4:
9598 case BUILT_IN_SYNC_LOCK_RELEASE_8:
9599 case BUILT_IN_SYNC_LOCK_RELEASE_16:
9600 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
9601 expand_builtin_sync_lock_release (mode, exp);
9602 return const0_rtx;
9603
9604 case BUILT_IN_SYNC_SYNCHRONIZE:
9605 expand_builtin_sync_synchronize ();
9606 return const0_rtx;
9607
9608 case BUILT_IN_ATOMIC_EXCHANGE_1:
9609 case BUILT_IN_ATOMIC_EXCHANGE_2:
9610 case BUILT_IN_ATOMIC_EXCHANGE_4:
9611 case BUILT_IN_ATOMIC_EXCHANGE_8:
9612 case BUILT_IN_ATOMIC_EXCHANGE_16:
9613 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
9614 target = expand_builtin_atomic_exchange (mode, exp, target);
9615 if (target)
9616 return target;
9617 break;
9618
9619 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
9620 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
9621 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
9622 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
9623 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
9624 {
9625 unsigned int nargs, z;
9626 vec<tree, va_gc> *vec;
9627
9628 mode =
9629 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
9630 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
9631 if (target)
9632 return target;
9633
9634 /* If this is turned into an external library call, the weak parameter
9635 must be dropped to match the expected parameter list. */
9636 nargs = call_expr_nargs (exp);
9637 vec_alloc (vec, nargs - 1);
9638 for (z = 0; z < 3; z++)
9639 vec->quick_push (CALL_EXPR_ARG (exp, z));
9640 /* Skip the boolean weak parameter. */
9641 for (z = 4; z < 6; z++)
9642 vec->quick_push (CALL_EXPR_ARG (exp, z));
9643 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
9644 break;
9645 }
9646
9647 case BUILT_IN_ATOMIC_LOAD_1:
9648 case BUILT_IN_ATOMIC_LOAD_2:
9649 case BUILT_IN_ATOMIC_LOAD_4:
9650 case BUILT_IN_ATOMIC_LOAD_8:
9651 case BUILT_IN_ATOMIC_LOAD_16:
9652 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
9653 target = expand_builtin_atomic_load (mode, exp, target);
9654 if (target)
9655 return target;
9656 break;
9657
9658 case BUILT_IN_ATOMIC_STORE_1:
9659 case BUILT_IN_ATOMIC_STORE_2:
9660 case BUILT_IN_ATOMIC_STORE_4:
9661 case BUILT_IN_ATOMIC_STORE_8:
9662 case BUILT_IN_ATOMIC_STORE_16:
9663 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
9664 target = expand_builtin_atomic_store (mode, exp);
9665 if (target)
9666 return const0_rtx;
9667 break;
9668
9669 case BUILT_IN_ATOMIC_ADD_FETCH_1:
9670 case BUILT_IN_ATOMIC_ADD_FETCH_2:
9671 case BUILT_IN_ATOMIC_ADD_FETCH_4:
9672 case BUILT_IN_ATOMIC_ADD_FETCH_8:
9673 case BUILT_IN_ATOMIC_ADD_FETCH_16:
9674 {
9675 enum built_in_function lib;
9676 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
9677 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
9678 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
9679 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
9680 ignore, lib);
9681 if (target)
9682 return target;
9683 break;
9684 }
9685 case BUILT_IN_ATOMIC_SUB_FETCH_1:
9686 case BUILT_IN_ATOMIC_SUB_FETCH_2:
9687 case BUILT_IN_ATOMIC_SUB_FETCH_4:
9688 case BUILT_IN_ATOMIC_SUB_FETCH_8:
9689 case BUILT_IN_ATOMIC_SUB_FETCH_16:
9690 {
9691 enum built_in_function lib;
9692 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
9693 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
9694 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
9695 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
9696 ignore, lib);
9697 if (target)
9698 return target;
9699 break;
9700 }
9701 case BUILT_IN_ATOMIC_AND_FETCH_1:
9702 case BUILT_IN_ATOMIC_AND_FETCH_2:
9703 case BUILT_IN_ATOMIC_AND_FETCH_4:
9704 case BUILT_IN_ATOMIC_AND_FETCH_8:
9705 case BUILT_IN_ATOMIC_AND_FETCH_16:
9706 {
9707 enum built_in_function lib;
9708 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
9709 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
9710 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
9711 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
9712 ignore, lib);
9713 if (target)
9714 return target;
9715 break;
9716 }
9717 case BUILT_IN_ATOMIC_NAND_FETCH_1:
9718 case BUILT_IN_ATOMIC_NAND_FETCH_2:
9719 case BUILT_IN_ATOMIC_NAND_FETCH_4:
9720 case BUILT_IN_ATOMIC_NAND_FETCH_8:
9721 case BUILT_IN_ATOMIC_NAND_FETCH_16:
9722 {
9723 enum built_in_function lib;
9724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
9725 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
9726 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
9727 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
9728 ignore, lib);
9729 if (target)
9730 return target;
9731 break;
9732 }
9733 case BUILT_IN_ATOMIC_XOR_FETCH_1:
9734 case BUILT_IN_ATOMIC_XOR_FETCH_2:
9735 case BUILT_IN_ATOMIC_XOR_FETCH_4:
9736 case BUILT_IN_ATOMIC_XOR_FETCH_8:
9737 case BUILT_IN_ATOMIC_XOR_FETCH_16:
9738 {
9739 enum built_in_function lib;
9740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
9741 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
9742 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
9743 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
9744 ignore, lib);
9745 if (target)
9746 return target;
9747 break;
9748 }
9749 case BUILT_IN_ATOMIC_OR_FETCH_1:
9750 case BUILT_IN_ATOMIC_OR_FETCH_2:
9751 case BUILT_IN_ATOMIC_OR_FETCH_4:
9752 case BUILT_IN_ATOMIC_OR_FETCH_8:
9753 case BUILT_IN_ATOMIC_OR_FETCH_16:
9754 {
9755 enum built_in_function lib;
9756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
9757 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
9758 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
9759 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
9760 ignore, lib);
9761 if (target)
9762 return target;
9763 break;
9764 }
9765 case BUILT_IN_ATOMIC_FETCH_ADD_1:
9766 case BUILT_IN_ATOMIC_FETCH_ADD_2:
9767 case BUILT_IN_ATOMIC_FETCH_ADD_4:
9768 case BUILT_IN_ATOMIC_FETCH_ADD_8:
9769 case BUILT_IN_ATOMIC_FETCH_ADD_16:
9770 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
9771 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
9772 ignore, BUILT_IN_NONE);
9773 if (target)
9774 return target;
9775 break;
9776
9777 case BUILT_IN_ATOMIC_FETCH_SUB_1:
9778 case BUILT_IN_ATOMIC_FETCH_SUB_2:
9779 case BUILT_IN_ATOMIC_FETCH_SUB_4:
9780 case BUILT_IN_ATOMIC_FETCH_SUB_8:
9781 case BUILT_IN_ATOMIC_FETCH_SUB_16:
9782 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
9783 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
9784 ignore, BUILT_IN_NONE);
9785 if (target)
9786 return target;
9787 break;
9788
9789 case BUILT_IN_ATOMIC_FETCH_AND_1:
9790 case BUILT_IN_ATOMIC_FETCH_AND_2:
9791 case BUILT_IN_ATOMIC_FETCH_AND_4:
9792 case BUILT_IN_ATOMIC_FETCH_AND_8:
9793 case BUILT_IN_ATOMIC_FETCH_AND_16:
9794 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
9795 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
9796 ignore, BUILT_IN_NONE);
9797 if (target)
9798 return target;
9799 break;
9800
9801 case BUILT_IN_ATOMIC_FETCH_NAND_1:
9802 case BUILT_IN_ATOMIC_FETCH_NAND_2:
9803 case BUILT_IN_ATOMIC_FETCH_NAND_4:
9804 case BUILT_IN_ATOMIC_FETCH_NAND_8:
9805 case BUILT_IN_ATOMIC_FETCH_NAND_16:
9806 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
9807 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
9808 ignore, BUILT_IN_NONE);
9809 if (target)
9810 return target;
9811 break;
9812
9813 case BUILT_IN_ATOMIC_FETCH_XOR_1:
9814 case BUILT_IN_ATOMIC_FETCH_XOR_2:
9815 case BUILT_IN_ATOMIC_FETCH_XOR_4:
9816 case BUILT_IN_ATOMIC_FETCH_XOR_8:
9817 case BUILT_IN_ATOMIC_FETCH_XOR_16:
9818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
9819 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
9820 ignore, BUILT_IN_NONE);
9821 if (target)
9822 return target;
9823 break;
9824
9825 case BUILT_IN_ATOMIC_FETCH_OR_1:
9826 case BUILT_IN_ATOMIC_FETCH_OR_2:
9827 case BUILT_IN_ATOMIC_FETCH_OR_4:
9828 case BUILT_IN_ATOMIC_FETCH_OR_8:
9829 case BUILT_IN_ATOMIC_FETCH_OR_16:
9830 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
9831 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
9832 ignore, BUILT_IN_NONE);
9833 if (target)
9834 return target;
9835 break;
9836
9837 case BUILT_IN_ATOMIC_TEST_AND_SET:
9838 return expand_builtin_atomic_test_and_set (exp, target);
9839
9840 case BUILT_IN_ATOMIC_CLEAR:
9841 return expand_builtin_atomic_clear (exp);
9842
9843 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9844 return expand_builtin_atomic_always_lock_free (exp);
9845
9846 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9847 target = expand_builtin_atomic_is_lock_free (exp);
9848 if (target)
9849 return target;
9850 break;
9851
9852 case BUILT_IN_ATOMIC_THREAD_FENCE:
9853 expand_builtin_atomic_thread_fence (exp);
9854 return const0_rtx;
9855
9856 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
9857 expand_builtin_atomic_signal_fence (exp);
9858 return const0_rtx;
9859
9860 case BUILT_IN_OBJECT_SIZE:
9861 return expand_builtin_object_size (exp);
9862
9863 case BUILT_IN_MEMCPY_CHK:
9864 case BUILT_IN_MEMPCPY_CHK:
9865 case BUILT_IN_MEMMOVE_CHK:
9866 case BUILT_IN_MEMSET_CHK:
9867 target = expand_builtin_memory_chk (exp, target, mode, fcode);
9868 if (target)
9869 return target;
9870 break;
9871
9872 case BUILT_IN_STRCPY_CHK:
9873 case BUILT_IN_STPCPY_CHK:
9874 case BUILT_IN_STRNCPY_CHK:
9875 case BUILT_IN_STPNCPY_CHK:
9876 case BUILT_IN_STRCAT_CHK:
9877 case BUILT_IN_STRNCAT_CHK:
9878 case BUILT_IN_SNPRINTF_CHK:
9879 case BUILT_IN_VSNPRINTF_CHK:
9880 maybe_emit_chk_warning (exp, fcode);
9881 break;
9882
9883 case BUILT_IN_SPRINTF_CHK:
9884 case BUILT_IN_VSPRINTF_CHK:
9885 maybe_emit_sprintf_chk_warning (exp, fcode);
9886 break;
9887
9888 case BUILT_IN_FREE:
9889 if (warn_free_nonheap_object)
9890 maybe_emit_free_warning (exp);
9891 break;
9892
9893 case BUILT_IN_THREAD_POINTER:
9894 return expand_builtin_thread_pointer (exp, target);
9895
9896 case BUILT_IN_SET_THREAD_POINTER:
9897 expand_builtin_set_thread_pointer (exp);
9898 return const0_rtx;
9899
9900 case BUILT_IN_ACC_ON_DEVICE:
9901 /* Do library call, if we failed to expand the builtin when
9902 folding. */
9903 break;
9904
9905 case BUILT_IN_GOACC_PARLEVEL_ID:
9906 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9907 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
9908
9909 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
9910 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
9911
9912 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
9913 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
9914 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
9915 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
9916 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
9917 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
9918 return expand_speculation_safe_value (mode, exp, target, ignore);
9919
9920 default: /* just do library call, if unknown builtin */
9921 break;
9922 }
9923
9924 /* The switch statement above can drop through to cause the function
9925 to be called normally. */
9926 return expand_call (exp, target, ignore);
9927 }
9928
9929 /* Determine whether a tree node represents a call to a built-in
9930 function. If the tree T is a call to a built-in function with
9931 the right number of arguments of the appropriate types, return
9932 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9933 Otherwise the return value is END_BUILTINS. */
9934
9935 enum built_in_function
9936 builtin_mathfn_code (const_tree t)
9937 {
9938 const_tree fndecl, arg, parmlist;
9939 const_tree argtype, parmtype;
9940 const_call_expr_arg_iterator iter;
9941
9942 if (TREE_CODE (t) != CALL_EXPR)
9943 return END_BUILTINS;
9944
9945 fndecl = get_callee_fndecl (t);
9946 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9947 return END_BUILTINS;
9948
9949 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9950 init_const_call_expr_arg_iterator (t, &iter);
9951 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9952 {
9953 /* If a function doesn't take a variable number of arguments,
9954 the last element in the list will have type `void'. */
9955 parmtype = TREE_VALUE (parmlist);
9956 if (VOID_TYPE_P (parmtype))
9957 {
9958 if (more_const_call_expr_args_p (&iter))
9959 return END_BUILTINS;
9960 return DECL_FUNCTION_CODE (fndecl);
9961 }
9962
9963 if (! more_const_call_expr_args_p (&iter))
9964 return END_BUILTINS;
9965
9966 arg = next_const_call_expr_arg (&iter);
9967 argtype = TREE_TYPE (arg);
9968
9969 if (SCALAR_FLOAT_TYPE_P (parmtype))
9970 {
9971 if (! SCALAR_FLOAT_TYPE_P (argtype))
9972 return END_BUILTINS;
9973 }
9974 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9975 {
9976 if (! COMPLEX_FLOAT_TYPE_P (argtype))
9977 return END_BUILTINS;
9978 }
9979 else if (POINTER_TYPE_P (parmtype))
9980 {
9981 if (! POINTER_TYPE_P (argtype))
9982 return END_BUILTINS;
9983 }
9984 else if (INTEGRAL_TYPE_P (parmtype))
9985 {
9986 if (! INTEGRAL_TYPE_P (argtype))
9987 return END_BUILTINS;
9988 }
9989 else
9990 return END_BUILTINS;
9991 }
9992
9993 /* Variable-length argument list. */
9994 return DECL_FUNCTION_CODE (fndecl);
9995 }
9996
9997 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9998 evaluate to a constant. */
9999
10000 static tree
10001 fold_builtin_constant_p (tree arg)
10002 {
10003 /* We return 1 for a numeric type that's known to be a constant
10004 value at compile-time or for an aggregate type that's a
10005 literal constant. */
10006 STRIP_NOPS (arg);
10007
10008 /* If we know this is a constant, emit the constant of one. */
10009 if (CONSTANT_CLASS_P (arg)
10010 || (TREE_CODE (arg) == CONSTRUCTOR
10011 && TREE_CONSTANT (arg)))
10012 return integer_one_node;
10013 if (TREE_CODE (arg) == ADDR_EXPR)
10014 {
10015 tree op = TREE_OPERAND (arg, 0);
10016 if (TREE_CODE (op) == STRING_CST
10017 || (TREE_CODE (op) == ARRAY_REF
10018 && integer_zerop (TREE_OPERAND (op, 1))
10019 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
10020 return integer_one_node;
10021 }
10022
10023 /* If this expression has side effects, show we don't know it to be a
10024 constant. Likewise if it's a pointer or aggregate type since in
10025 those case we only want literals, since those are only optimized
10026 when generating RTL, not later.
10027 And finally, if we are compiling an initializer, not code, we
10028 need to return a definite result now; there's not going to be any
10029 more optimization done. */
10030 if (TREE_SIDE_EFFECTS (arg)
10031 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
10032 || POINTER_TYPE_P (TREE_TYPE (arg))
10033 || cfun == 0
10034 || folding_initializer
10035 || force_folding_builtin_constant_p)
10036 return integer_zero_node;
10037
10038 return NULL_TREE;
10039 }
10040
10041 /* Create builtin_expect or builtin_expect_with_probability
10042 with PRED and EXPECTED as its arguments and return it as a truthvalue.
10043 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
10044 builtin_expect_with_probability instead uses third argument as PROBABILITY
10045 value. */
10046
10047 static tree
10048 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
10049 tree predictor, tree probability)
10050 {
10051 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
10052
10053 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
10054 : BUILT_IN_EXPECT_WITH_PROBABILITY);
10055 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
10056 ret_type = TREE_TYPE (TREE_TYPE (fn));
10057 pred_type = TREE_VALUE (arg_types);
10058 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
10059
10060 pred = fold_convert_loc (loc, pred_type, pred);
10061 expected = fold_convert_loc (loc, expected_type, expected);
10062
10063 if (probability)
10064 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
10065 else
10066 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
10067 predictor);
10068
10069 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
10070 build_int_cst (ret_type, 0));
10071 }
10072
10073 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
10074 NULL_TREE if no simplification is possible. */
10075
10076 tree
10077 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
10078 tree arg3)
10079 {
10080 tree inner, fndecl, inner_arg0;
10081 enum tree_code code;
10082
10083 /* Distribute the expected value over short-circuiting operators.
10084 See through the cast from truthvalue_type_node to long. */
10085 inner_arg0 = arg0;
10086 while (CONVERT_EXPR_P (inner_arg0)
10087 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
10088 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
10089 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
10090
10091 /* If this is a builtin_expect within a builtin_expect keep the
10092 inner one. See through a comparison against a constant. It
10093 might have been added to create a thruthvalue. */
10094 inner = inner_arg0;
10095
10096 if (COMPARISON_CLASS_P (inner)
10097 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
10098 inner = TREE_OPERAND (inner, 0);
10099
10100 if (TREE_CODE (inner) == CALL_EXPR
10101 && (fndecl = get_callee_fndecl (inner))
10102 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
10103 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
10104 return arg0;
10105
10106 inner = inner_arg0;
10107 code = TREE_CODE (inner);
10108 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
10109 {
10110 tree op0 = TREE_OPERAND (inner, 0);
10111 tree op1 = TREE_OPERAND (inner, 1);
10112 arg1 = save_expr (arg1);
10113
10114 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
10115 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
10116 inner = build2 (code, TREE_TYPE (inner), op0, op1);
10117
10118 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
10119 }
10120
10121 /* If the argument isn't invariant then there's nothing else we can do. */
10122 if (!TREE_CONSTANT (inner_arg0))
10123 return NULL_TREE;
10124
10125 /* If we expect that a comparison against the argument will fold to
10126 a constant return the constant. In practice, this means a true
10127 constant or the address of a non-weak symbol. */
10128 inner = inner_arg0;
10129 STRIP_NOPS (inner);
10130 if (TREE_CODE (inner) == ADDR_EXPR)
10131 {
10132 do
10133 {
10134 inner = TREE_OPERAND (inner, 0);
10135 }
10136 while (TREE_CODE (inner) == COMPONENT_REF
10137 || TREE_CODE (inner) == ARRAY_REF);
10138 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
10139 return NULL_TREE;
10140 }
10141
10142 /* Otherwise, ARG0 already has the proper type for the return value. */
10143 return arg0;
10144 }
10145
10146 /* Fold a call to __builtin_classify_type with argument ARG. */
10147
10148 static tree
10149 fold_builtin_classify_type (tree arg)
10150 {
10151 if (arg == 0)
10152 return build_int_cst (integer_type_node, no_type_class);
10153
10154 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
10155 }
10156
10157 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
10158 ARG. */
10159
10160 static tree
10161 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
10162 {
10163 if (!validate_arg (arg, POINTER_TYPE))
10164 return NULL_TREE;
10165 else
10166 {
10167 c_strlen_data lendata = { };
10168 tree len = c_strlen (arg, 0, &lendata);
10169
10170 if (len)
10171 return fold_convert_loc (loc, type, len);
10172
10173 if (!lendata.decl)
10174 c_strlen (arg, 1, &lendata);
10175
10176 if (lendata.decl)
10177 {
10178 if (EXPR_HAS_LOCATION (arg))
10179 loc = EXPR_LOCATION (arg);
10180 else if (loc == UNKNOWN_LOCATION)
10181 loc = input_location;
10182 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
10183 }
10184
10185 return NULL_TREE;
10186 }
10187 }
10188
10189 /* Fold a call to __builtin_inf or __builtin_huge_val. */
10190
10191 static tree
10192 fold_builtin_inf (location_t loc, tree type, int warn)
10193 {
10194 REAL_VALUE_TYPE real;
10195
10196 /* __builtin_inff is intended to be usable to define INFINITY on all
10197 targets. If an infinity is not available, INFINITY expands "to a
10198 positive constant of type float that overflows at translation
10199 time", footnote "In this case, using INFINITY will violate the
10200 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
10201 Thus we pedwarn to ensure this constraint violation is
10202 diagnosed. */
10203 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
10204 pedwarn (loc, 0, "target format does not support infinity");
10205
10206 real_inf (&real);
10207 return build_real (type, real);
10208 }
10209
10210 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
10211 NULL_TREE if no simplification can be made. */
10212
10213 static tree
10214 fold_builtin_sincos (location_t loc,
10215 tree arg0, tree arg1, tree arg2)
10216 {
10217 tree type;
10218 tree fndecl, call = NULL_TREE;
10219
10220 if (!validate_arg (arg0, REAL_TYPE)
10221 || !validate_arg (arg1, POINTER_TYPE)
10222 || !validate_arg (arg2, POINTER_TYPE))
10223 return NULL_TREE;
10224
10225 type = TREE_TYPE (arg0);
10226
10227 /* Calculate the result when the argument is a constant. */
10228 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
10229 if (fn == END_BUILTINS)
10230 return NULL_TREE;
10231
10232 /* Canonicalize sincos to cexpi. */
10233 if (TREE_CODE (arg0) == REAL_CST)
10234 {
10235 tree complex_type = build_complex_type (type);
10236 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
10237 }
10238 if (!call)
10239 {
10240 if (!targetm.libc_has_function (function_c99_math_complex, type)
10241 || !builtin_decl_implicit_p (fn))
10242 return NULL_TREE;
10243 fndecl = builtin_decl_explicit (fn);
10244 call = build_call_expr_loc (loc, fndecl, 1, arg0);
10245 call = builtin_save_expr (call);
10246 }
10247
10248 tree ptype = build_pointer_type (type);
10249 arg1 = fold_convert (ptype, arg1);
10250 arg2 = fold_convert (ptype, arg2);
10251 return build2 (COMPOUND_EXPR, void_type_node,
10252 build2 (MODIFY_EXPR, void_type_node,
10253 build_fold_indirect_ref_loc (loc, arg1),
10254 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
10255 build2 (MODIFY_EXPR, void_type_node,
10256 build_fold_indirect_ref_loc (loc, arg2),
10257 fold_build1_loc (loc, REALPART_EXPR, type, call)));
10258 }
10259
10260 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
10261 Return NULL_TREE if no simplification can be made. */
10262
10263 static tree
10264 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
10265 {
10266 if (!validate_arg (arg1, POINTER_TYPE)
10267 || !validate_arg (arg2, POINTER_TYPE)
10268 || !validate_arg (len, INTEGER_TYPE))
10269 return NULL_TREE;
10270
10271 /* If the LEN parameter is zero, return zero. */
10272 if (integer_zerop (len))
10273 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
10274 arg1, arg2);
10275
10276 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
10277 if (operand_equal_p (arg1, arg2, 0))
10278 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
10279
10280 /* If len parameter is one, return an expression corresponding to
10281 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
10282 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
10283 {
10284 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
10285 tree cst_uchar_ptr_node
10286 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
10287
10288 tree ind1
10289 = fold_convert_loc (loc, integer_type_node,
10290 build1 (INDIRECT_REF, cst_uchar_node,
10291 fold_convert_loc (loc,
10292 cst_uchar_ptr_node,
10293 arg1)));
10294 tree ind2
10295 = fold_convert_loc (loc, integer_type_node,
10296 build1 (INDIRECT_REF, cst_uchar_node,
10297 fold_convert_loc (loc,
10298 cst_uchar_ptr_node,
10299 arg2)));
10300 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
10301 }
10302
10303 return NULL_TREE;
10304 }
10305
10306 /* Fold a call to builtin isascii with argument ARG. */
10307
10308 static tree
10309 fold_builtin_isascii (location_t loc, tree arg)
10310 {
10311 if (!validate_arg (arg, INTEGER_TYPE))
10312 return NULL_TREE;
10313 else
10314 {
10315 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
10316 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
10317 build_int_cst (integer_type_node,
10318 ~ (unsigned HOST_WIDE_INT) 0x7f));
10319 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
10320 arg, integer_zero_node);
10321 }
10322 }
10323
10324 /* Fold a call to builtin toascii with argument ARG. */
10325
10326 static tree
10327 fold_builtin_toascii (location_t loc, tree arg)
10328 {
10329 if (!validate_arg (arg, INTEGER_TYPE))
10330 return NULL_TREE;
10331
10332 /* Transform toascii(c) -> (c & 0x7f). */
10333 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
10334 build_int_cst (integer_type_node, 0x7f));
10335 }
10336
10337 /* Fold a call to builtin isdigit with argument ARG. */
10338
10339 static tree
10340 fold_builtin_isdigit (location_t loc, tree arg)
10341 {
10342 if (!validate_arg (arg, INTEGER_TYPE))
10343 return NULL_TREE;
10344 else
10345 {
10346 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
10347 /* According to the C standard, isdigit is unaffected by locale.
10348 However, it definitely is affected by the target character set. */
10349 unsigned HOST_WIDE_INT target_digit0
10350 = lang_hooks.to_target_charset ('0');
10351
10352 if (target_digit0 == 0)
10353 return NULL_TREE;
10354
10355 arg = fold_convert_loc (loc, unsigned_type_node, arg);
10356 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
10357 build_int_cst (unsigned_type_node, target_digit0));
10358 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
10359 build_int_cst (unsigned_type_node, 9));
10360 }
10361 }
10362
10363 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
10364
10365 static tree
10366 fold_builtin_fabs (location_t loc, tree arg, tree type)
10367 {
10368 if (!validate_arg (arg, REAL_TYPE))
10369 return NULL_TREE;
10370
10371 arg = fold_convert_loc (loc, type, arg);
10372 return fold_build1_loc (loc, ABS_EXPR, type, arg);
10373 }
10374
10375 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
10376
10377 static tree
10378 fold_builtin_abs (location_t loc, tree arg, tree type)
10379 {
10380 if (!validate_arg (arg, INTEGER_TYPE))
10381 return NULL_TREE;
10382
10383 arg = fold_convert_loc (loc, type, arg);
10384 return fold_build1_loc (loc, ABS_EXPR, type, arg);
10385 }
10386
10387 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
10388
10389 static tree
10390 fold_builtin_carg (location_t loc, tree arg, tree type)
10391 {
10392 if (validate_arg (arg, COMPLEX_TYPE)
10393 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
10394 {
10395 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
10396
10397 if (atan2_fn)
10398 {
10399 tree new_arg = builtin_save_expr (arg);
10400 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
10401 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
10402 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
10403 }
10404 }
10405
10406 return NULL_TREE;
10407 }
10408
10409 /* Fold a call to builtin frexp, we can assume the base is 2. */
10410
10411 static tree
10412 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
10413 {
10414 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10415 return NULL_TREE;
10416
10417 STRIP_NOPS (arg0);
10418
10419 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10420 return NULL_TREE;
10421
10422 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10423
10424 /* Proceed if a valid pointer type was passed in. */
10425 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
10426 {
10427 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10428 tree frac, exp;
10429
10430 switch (value->cl)
10431 {
10432 case rvc_zero:
10433 /* For +-0, return (*exp = 0, +-0). */
10434 exp = integer_zero_node;
10435 frac = arg0;
10436 break;
10437 case rvc_nan:
10438 case rvc_inf:
10439 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
10440 return omit_one_operand_loc (loc, rettype, arg0, arg1);
10441 case rvc_normal:
10442 {
10443 /* Since the frexp function always expects base 2, and in
10444 GCC normalized significands are already in the range
10445 [0.5, 1.0), we have exactly what frexp wants. */
10446 REAL_VALUE_TYPE frac_rvt = *value;
10447 SET_REAL_EXP (&frac_rvt, 0);
10448 frac = build_real (rettype, frac_rvt);
10449 exp = build_int_cst (integer_type_node, REAL_EXP (value));
10450 }
10451 break;
10452 default:
10453 gcc_unreachable ();
10454 }
10455
10456 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10457 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
10458 TREE_SIDE_EFFECTS (arg1) = 1;
10459 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
10460 }
10461
10462 return NULL_TREE;
10463 }
10464
10465 /* Fold a call to builtin modf. */
10466
10467 static tree
10468 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
10469 {
10470 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10471 return NULL_TREE;
10472
10473 STRIP_NOPS (arg0);
10474
10475 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10476 return NULL_TREE;
10477
10478 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10479
10480 /* Proceed if a valid pointer type was passed in. */
10481 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
10482 {
10483 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10484 REAL_VALUE_TYPE trunc, frac;
10485
10486 switch (value->cl)
10487 {
10488 case rvc_nan:
10489 case rvc_zero:
10490 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10491 trunc = frac = *value;
10492 break;
10493 case rvc_inf:
10494 /* For +-Inf, return (*arg1 = arg0, +-0). */
10495 frac = dconst0;
10496 frac.sign = value->sign;
10497 trunc = *value;
10498 break;
10499 case rvc_normal:
10500 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10501 real_trunc (&trunc, VOIDmode, value);
10502 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
10503 /* If the original number was negative and already
10504 integral, then the fractional part is -0.0. */
10505 if (value->sign && frac.cl == rvc_zero)
10506 frac.sign = value->sign;
10507 break;
10508 }
10509
10510 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10511 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
10512 build_real (rettype, trunc));
10513 TREE_SIDE_EFFECTS (arg1) = 1;
10514 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
10515 build_real (rettype, frac));
10516 }
10517
10518 return NULL_TREE;
10519 }
10520
10521 /* Given a location LOC, an interclass builtin function decl FNDECL
10522 and its single argument ARG, return an folded expression computing
10523 the same, or NULL_TREE if we either couldn't or didn't want to fold
10524 (the latter happen if there's an RTL instruction available). */
10525
10526 static tree
10527 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
10528 {
10529 machine_mode mode;
10530
10531 if (!validate_arg (arg, REAL_TYPE))
10532 return NULL_TREE;
10533
10534 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
10535 return NULL_TREE;
10536
10537 mode = TYPE_MODE (TREE_TYPE (arg));
10538
10539 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
10540
10541 /* If there is no optab, try generic code. */
10542 switch (DECL_FUNCTION_CODE (fndecl))
10543 {
10544 tree result;
10545
10546 CASE_FLT_FN (BUILT_IN_ISINF):
10547 {
10548 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10549 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10550 tree type = TREE_TYPE (arg);
10551 REAL_VALUE_TYPE r;
10552 char buf[128];
10553
10554 if (is_ibm_extended)
10555 {
10556 /* NaN and Inf are encoded in the high-order double value
10557 only. The low-order value is not significant. */
10558 type = double_type_node;
10559 mode = DFmode;
10560 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
10561 }
10562 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10563 real_from_string (&r, buf);
10564 result = build_call_expr (isgr_fn, 2,
10565 fold_build1_loc (loc, ABS_EXPR, type, arg),
10566 build_real (type, r));
10567 return result;
10568 }
10569 CASE_FLT_FN (BUILT_IN_FINITE):
10570 case BUILT_IN_ISFINITE:
10571 {
10572 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10573 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10574 tree type = TREE_TYPE (arg);
10575 REAL_VALUE_TYPE r;
10576 char buf[128];
10577
10578 if (is_ibm_extended)
10579 {
10580 /* NaN and Inf are encoded in the high-order double value
10581 only. The low-order value is not significant. */
10582 type = double_type_node;
10583 mode = DFmode;
10584 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
10585 }
10586 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10587 real_from_string (&r, buf);
10588 result = build_call_expr (isle_fn, 2,
10589 fold_build1_loc (loc, ABS_EXPR, type, arg),
10590 build_real (type, r));
10591 /*result = fold_build2_loc (loc, UNGT_EXPR,
10592 TREE_TYPE (TREE_TYPE (fndecl)),
10593 fold_build1_loc (loc, ABS_EXPR, type, arg),
10594 build_real (type, r));
10595 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10596 TREE_TYPE (TREE_TYPE (fndecl)),
10597 result);*/
10598 return result;
10599 }
10600 case BUILT_IN_ISNORMAL:
10601 {
10602 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10603 islessequal(fabs(x),DBL_MAX). */
10604 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10605 tree type = TREE_TYPE (arg);
10606 tree orig_arg, max_exp, min_exp;
10607 machine_mode orig_mode = mode;
10608 REAL_VALUE_TYPE rmax, rmin;
10609 char buf[128];
10610
10611 orig_arg = arg = builtin_save_expr (arg);
10612 if (is_ibm_extended)
10613 {
10614 /* Use double to test the normal range of IBM extended
10615 precision. Emin for IBM extended precision is
10616 different to emin for IEEE double, being 53 higher
10617 since the low double exponent is at least 53 lower
10618 than the high double exponent. */
10619 type = double_type_node;
10620 mode = DFmode;
10621 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
10622 }
10623 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
10624
10625 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
10626 real_from_string (&rmax, buf);
10627 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
10628 real_from_string (&rmin, buf);
10629 max_exp = build_real (type, rmax);
10630 min_exp = build_real (type, rmin);
10631
10632 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
10633 if (is_ibm_extended)
10634 {
10635 /* Testing the high end of the range is done just using
10636 the high double, using the same test as isfinite().
10637 For the subnormal end of the range we first test the
10638 high double, then if its magnitude is equal to the
10639 limit of 0x1p-969, we test whether the low double is
10640 non-zero and opposite sign to the high double. */
10641 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
10642 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10643 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
10644 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
10645 arg, min_exp);
10646 tree as_complex = build1 (VIEW_CONVERT_EXPR,
10647 complex_double_type_node, orig_arg);
10648 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
10649 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
10650 tree zero = build_real (type, dconst0);
10651 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
10652 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
10653 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
10654 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
10655 fold_build3 (COND_EXPR,
10656 integer_type_node,
10657 hilt, logt, lolt));
10658 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
10659 eq_min, ok_lo);
10660 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
10661 gt_min, eq_min);
10662 }
10663 else
10664 {
10665 tree const isge_fn
10666 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10667 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
10668 }
10669 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
10670 max_exp, min_exp);
10671 return result;
10672 }
10673 default:
10674 break;
10675 }
10676
10677 return NULL_TREE;
10678 }
10679
10680 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10681 ARG is the argument for the call. */
10682
10683 static tree
10684 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10685 {
10686 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10687
10688 if (!validate_arg (arg, REAL_TYPE))
10689 return NULL_TREE;
10690
10691 switch (builtin_index)
10692 {
10693 case BUILT_IN_ISINF:
10694 if (!HONOR_INFINITIES (arg))
10695 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10696
10697 return NULL_TREE;
10698
10699 case BUILT_IN_ISINF_SIGN:
10700 {
10701 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10702 /* In a boolean context, GCC will fold the inner COND_EXPR to
10703 1. So e.g. "if (isinf_sign(x))" would be folded to just
10704 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10705 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
10706 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10707 tree tmp = NULL_TREE;
10708
10709 arg = builtin_save_expr (arg);
10710
10711 if (signbit_fn && isinf_fn)
10712 {
10713 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10714 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10715
10716 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10717 signbit_call, integer_zero_node);
10718 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10719 isinf_call, integer_zero_node);
10720
10721 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10722 integer_minus_one_node, integer_one_node);
10723 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10724 isinf_call, tmp,
10725 integer_zero_node);
10726 }
10727
10728 return tmp;
10729 }
10730
10731 case BUILT_IN_ISFINITE:
10732 if (!HONOR_NANS (arg)
10733 && !HONOR_INFINITIES (arg))
10734 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10735
10736 return NULL_TREE;
10737
10738 case BUILT_IN_ISNAN:
10739 if (!HONOR_NANS (arg))
10740 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10741
10742 {
10743 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
10744 if (is_ibm_extended)
10745 {
10746 /* NaN and Inf are encoded in the high-order double value
10747 only. The low-order value is not significant. */
10748 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
10749 }
10750 }
10751 arg = builtin_save_expr (arg);
10752 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10753
10754 default:
10755 gcc_unreachable ();
10756 }
10757 }
10758
10759 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10760 This builtin will generate code to return the appropriate floating
10761 point classification depending on the value of the floating point
10762 number passed in. The possible return values must be supplied as
10763 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10764 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10765 one floating point argument which is "type generic". */
10766
10767 static tree
10768 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
10769 {
10770 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10771 arg, type, res, tmp;
10772 machine_mode mode;
10773 REAL_VALUE_TYPE r;
10774 char buf[128];
10775
10776 /* Verify the required arguments in the original call. */
10777 if (nargs != 6
10778 || !validate_arg (args[0], INTEGER_TYPE)
10779 || !validate_arg (args[1], INTEGER_TYPE)
10780 || !validate_arg (args[2], INTEGER_TYPE)
10781 || !validate_arg (args[3], INTEGER_TYPE)
10782 || !validate_arg (args[4], INTEGER_TYPE)
10783 || !validate_arg (args[5], REAL_TYPE))
10784 return NULL_TREE;
10785
10786 fp_nan = args[0];
10787 fp_infinite = args[1];
10788 fp_normal = args[2];
10789 fp_subnormal = args[3];
10790 fp_zero = args[4];
10791 arg = args[5];
10792 type = TREE_TYPE (arg);
10793 mode = TYPE_MODE (type);
10794 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10795
10796 /* fpclassify(x) ->
10797 isnan(x) ? FP_NAN :
10798 (fabs(x) == Inf ? FP_INFINITE :
10799 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10800 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10801
10802 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10803 build_real (type, dconst0));
10804 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10805 tmp, fp_zero, fp_subnormal);
10806
10807 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10808 real_from_string (&r, buf);
10809 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10810 arg, build_real (type, r));
10811 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10812
10813 if (HONOR_INFINITIES (mode))
10814 {
10815 real_inf (&r);
10816 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10817 build_real (type, r));
10818 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10819 fp_infinite, res);
10820 }
10821
10822 if (HONOR_NANS (mode))
10823 {
10824 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10825 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10826 }
10827
10828 return res;
10829 }
10830
10831 /* Fold a call to an unordered comparison function such as
10832 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10833 being called and ARG0 and ARG1 are the arguments for the call.
10834 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10835 the opposite of the desired result. UNORDERED_CODE is used
10836 for modes that can hold NaNs and ORDERED_CODE is used for
10837 the rest. */
10838
10839 static tree
10840 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10841 enum tree_code unordered_code,
10842 enum tree_code ordered_code)
10843 {
10844 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10845 enum tree_code code;
10846 tree type0, type1;
10847 enum tree_code code0, code1;
10848 tree cmp_type = NULL_TREE;
10849
10850 type0 = TREE_TYPE (arg0);
10851 type1 = TREE_TYPE (arg1);
10852
10853 code0 = TREE_CODE (type0);
10854 code1 = TREE_CODE (type1);
10855
10856 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10857 /* Choose the wider of two real types. */
10858 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10859 ? type0 : type1;
10860 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10861 cmp_type = type0;
10862 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10863 cmp_type = type1;
10864
10865 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10866 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10867
10868 if (unordered_code == UNORDERED_EXPR)
10869 {
10870 if (!HONOR_NANS (arg0))
10871 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10872 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10873 }
10874
10875 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
10876 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10877 fold_build2_loc (loc, code, type, arg0, arg1));
10878 }
10879
10880 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10881 arithmetics if it can never overflow, or into internal functions that
10882 return both result of arithmetics and overflowed boolean flag in
10883 a complex integer result, or some other check for overflow.
10884 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10885 checking part of that. */
10886
10887 static tree
10888 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10889 tree arg0, tree arg1, tree arg2)
10890 {
10891 enum internal_fn ifn = IFN_LAST;
10892 /* The code of the expression corresponding to the built-in. */
10893 enum tree_code opcode = ERROR_MARK;
10894 bool ovf_only = false;
10895
10896 switch (fcode)
10897 {
10898 case BUILT_IN_ADD_OVERFLOW_P:
10899 ovf_only = true;
10900 /* FALLTHRU */
10901 case BUILT_IN_ADD_OVERFLOW:
10902 case BUILT_IN_SADD_OVERFLOW:
10903 case BUILT_IN_SADDL_OVERFLOW:
10904 case BUILT_IN_SADDLL_OVERFLOW:
10905 case BUILT_IN_UADD_OVERFLOW:
10906 case BUILT_IN_UADDL_OVERFLOW:
10907 case BUILT_IN_UADDLL_OVERFLOW:
10908 opcode = PLUS_EXPR;
10909 ifn = IFN_ADD_OVERFLOW;
10910 break;
10911 case BUILT_IN_SUB_OVERFLOW_P:
10912 ovf_only = true;
10913 /* FALLTHRU */
10914 case BUILT_IN_SUB_OVERFLOW:
10915 case BUILT_IN_SSUB_OVERFLOW:
10916 case BUILT_IN_SSUBL_OVERFLOW:
10917 case BUILT_IN_SSUBLL_OVERFLOW:
10918 case BUILT_IN_USUB_OVERFLOW:
10919 case BUILT_IN_USUBL_OVERFLOW:
10920 case BUILT_IN_USUBLL_OVERFLOW:
10921 opcode = MINUS_EXPR;
10922 ifn = IFN_SUB_OVERFLOW;
10923 break;
10924 case BUILT_IN_MUL_OVERFLOW_P:
10925 ovf_only = true;
10926 /* FALLTHRU */
10927 case BUILT_IN_MUL_OVERFLOW:
10928 case BUILT_IN_SMUL_OVERFLOW:
10929 case BUILT_IN_SMULL_OVERFLOW:
10930 case BUILT_IN_SMULLL_OVERFLOW:
10931 case BUILT_IN_UMUL_OVERFLOW:
10932 case BUILT_IN_UMULL_OVERFLOW:
10933 case BUILT_IN_UMULLL_OVERFLOW:
10934 opcode = MULT_EXPR;
10935 ifn = IFN_MUL_OVERFLOW;
10936 break;
10937 default:
10938 gcc_unreachable ();
10939 }
10940
10941 /* For the "generic" overloads, the first two arguments can have different
10942 types and the last argument determines the target type to use to check
10943 for overflow. The arguments of the other overloads all have the same
10944 type. */
10945 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10946
10947 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10948 arguments are constant, attempt to fold the built-in call into a constant
10949 expression indicating whether or not it detected an overflow. */
10950 if (ovf_only
10951 && TREE_CODE (arg0) == INTEGER_CST
10952 && TREE_CODE (arg1) == INTEGER_CST)
10953 /* Perform the computation in the target type and check for overflow. */
10954 return omit_one_operand_loc (loc, boolean_type_node,
10955 arith_overflowed_p (opcode, type, arg0, arg1)
10956 ? boolean_true_node : boolean_false_node,
10957 arg2);
10958
10959 tree intres, ovfres;
10960 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10961 {
10962 intres = fold_binary_loc (loc, opcode, type,
10963 fold_convert_loc (loc, type, arg0),
10964 fold_convert_loc (loc, type, arg1));
10965 if (TREE_OVERFLOW (intres))
10966 intres = drop_tree_overflow (intres);
10967 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10968 ? boolean_true_node : boolean_false_node);
10969 }
10970 else
10971 {
10972 tree ctype = build_complex_type (type);
10973 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10974 arg0, arg1);
10975 tree tgt = save_expr (call);
10976 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10977 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10978 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10979 }
10980
10981 if (ovf_only)
10982 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10983
10984 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10985 tree store
10986 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10987 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10988 }
10989
10990 /* Fold a call to __builtin_FILE to a constant string. */
10991
10992 static inline tree
10993 fold_builtin_FILE (location_t loc)
10994 {
10995 if (const char *fname = LOCATION_FILE (loc))
10996 {
10997 /* The documentation says this builtin is equivalent to the preprocessor
10998 __FILE__ macro so it appears appropriate to use the same file prefix
10999 mappings. */
11000 fname = remap_macro_filename (fname);
11001 return build_string_literal (strlen (fname) + 1, fname);
11002 }
11003
11004 return build_string_literal (1, "");
11005 }
11006
11007 /* Fold a call to __builtin_FUNCTION to a constant string. */
11008
11009 static inline tree
11010 fold_builtin_FUNCTION ()
11011 {
11012 const char *name = "";
11013
11014 if (current_function_decl)
11015 name = lang_hooks.decl_printable_name (current_function_decl, 0);
11016
11017 return build_string_literal (strlen (name) + 1, name);
11018 }
11019
11020 /* Fold a call to __builtin_LINE to an integer constant. */
11021
11022 static inline tree
11023 fold_builtin_LINE (location_t loc, tree type)
11024 {
11025 return build_int_cst (type, LOCATION_LINE (loc));
11026 }
11027
11028 /* Fold a call to built-in function FNDECL with 0 arguments.
11029 This function returns NULL_TREE if no simplification was possible. */
11030
11031 static tree
11032 fold_builtin_0 (location_t loc, tree fndecl)
11033 {
11034 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11035 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11036 switch (fcode)
11037 {
11038 case BUILT_IN_FILE:
11039 return fold_builtin_FILE (loc);
11040
11041 case BUILT_IN_FUNCTION:
11042 return fold_builtin_FUNCTION ();
11043
11044 case BUILT_IN_LINE:
11045 return fold_builtin_LINE (loc, type);
11046
11047 CASE_FLT_FN (BUILT_IN_INF):
11048 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
11049 case BUILT_IN_INFD32:
11050 case BUILT_IN_INFD64:
11051 case BUILT_IN_INFD128:
11052 return fold_builtin_inf (loc, type, true);
11053
11054 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
11055 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
11056 return fold_builtin_inf (loc, type, false);
11057
11058 case BUILT_IN_CLASSIFY_TYPE:
11059 return fold_builtin_classify_type (NULL_TREE);
11060
11061 default:
11062 break;
11063 }
11064 return NULL_TREE;
11065 }
11066
11067 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
11068 This function returns NULL_TREE if no simplification was possible. */
11069
11070 static tree
11071 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
11072 {
11073 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11074 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11075
11076 if (TREE_CODE (arg0) == ERROR_MARK)
11077 return NULL_TREE;
11078
11079 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
11080 return ret;
11081
11082 switch (fcode)
11083 {
11084 case BUILT_IN_CONSTANT_P:
11085 {
11086 tree val = fold_builtin_constant_p (arg0);
11087
11088 /* Gimplification will pull the CALL_EXPR for the builtin out of
11089 an if condition. When not optimizing, we'll not CSE it back.
11090 To avoid link error types of regressions, return false now. */
11091 if (!val && !optimize)
11092 val = integer_zero_node;
11093
11094 return val;
11095 }
11096
11097 case BUILT_IN_CLASSIFY_TYPE:
11098 return fold_builtin_classify_type (arg0);
11099
11100 case BUILT_IN_STRLEN:
11101 return fold_builtin_strlen (loc, expr, type, arg0);
11102
11103 CASE_FLT_FN (BUILT_IN_FABS):
11104 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11105 case BUILT_IN_FABSD32:
11106 case BUILT_IN_FABSD64:
11107 case BUILT_IN_FABSD128:
11108 return fold_builtin_fabs (loc, arg0, type);
11109
11110 case BUILT_IN_ABS:
11111 case BUILT_IN_LABS:
11112 case BUILT_IN_LLABS:
11113 case BUILT_IN_IMAXABS:
11114 return fold_builtin_abs (loc, arg0, type);
11115
11116 CASE_FLT_FN (BUILT_IN_CONJ):
11117 if (validate_arg (arg0, COMPLEX_TYPE)
11118 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11119 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
11120 break;
11121
11122 CASE_FLT_FN (BUILT_IN_CREAL):
11123 if (validate_arg (arg0, COMPLEX_TYPE)
11124 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11125 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
11126 break;
11127
11128 CASE_FLT_FN (BUILT_IN_CIMAG):
11129 if (validate_arg (arg0, COMPLEX_TYPE)
11130 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
11131 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
11132 break;
11133
11134 CASE_FLT_FN (BUILT_IN_CARG):
11135 return fold_builtin_carg (loc, arg0, type);
11136
11137 case BUILT_IN_ISASCII:
11138 return fold_builtin_isascii (loc, arg0);
11139
11140 case BUILT_IN_TOASCII:
11141 return fold_builtin_toascii (loc, arg0);
11142
11143 case BUILT_IN_ISDIGIT:
11144 return fold_builtin_isdigit (loc, arg0);
11145
11146 CASE_FLT_FN (BUILT_IN_FINITE):
11147 case BUILT_IN_FINITED32:
11148 case BUILT_IN_FINITED64:
11149 case BUILT_IN_FINITED128:
11150 case BUILT_IN_ISFINITE:
11151 {
11152 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
11153 if (ret)
11154 return ret;
11155 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11156 }
11157
11158 CASE_FLT_FN (BUILT_IN_ISINF):
11159 case BUILT_IN_ISINFD32:
11160 case BUILT_IN_ISINFD64:
11161 case BUILT_IN_ISINFD128:
11162 {
11163 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
11164 if (ret)
11165 return ret;
11166 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11167 }
11168
11169 case BUILT_IN_ISNORMAL:
11170 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11171
11172 case BUILT_IN_ISINF_SIGN:
11173 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
11174
11175 CASE_FLT_FN (BUILT_IN_ISNAN):
11176 case BUILT_IN_ISNAND32:
11177 case BUILT_IN_ISNAND64:
11178 case BUILT_IN_ISNAND128:
11179 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
11180
11181 case BUILT_IN_FREE:
11182 if (integer_zerop (arg0))
11183 return build_empty_stmt (loc);
11184 break;
11185
11186 default:
11187 break;
11188 }
11189
11190 return NULL_TREE;
11191
11192 }
11193
11194 /* Folds a call EXPR (which may be null) to built-in function FNDECL
11195 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
11196 if no simplification was possible. */
11197
11198 static tree
11199 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
11200 {
11201 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11202 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11203
11204 if (TREE_CODE (arg0) == ERROR_MARK
11205 || TREE_CODE (arg1) == ERROR_MARK)
11206 return NULL_TREE;
11207
11208 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
11209 return ret;
11210
11211 switch (fcode)
11212 {
11213 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
11214 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
11215 if (validate_arg (arg0, REAL_TYPE)
11216 && validate_arg (arg1, POINTER_TYPE))
11217 return do_mpfr_lgamma_r (arg0, arg1, type);
11218 break;
11219
11220 CASE_FLT_FN (BUILT_IN_FREXP):
11221 return fold_builtin_frexp (loc, arg0, arg1, type);
11222
11223 CASE_FLT_FN (BUILT_IN_MODF):
11224 return fold_builtin_modf (loc, arg0, arg1, type);
11225
11226 case BUILT_IN_STRSPN:
11227 return fold_builtin_strspn (loc, expr, arg0, arg1);
11228
11229 case BUILT_IN_STRCSPN:
11230 return fold_builtin_strcspn (loc, expr, arg0, arg1);
11231
11232 case BUILT_IN_STRPBRK:
11233 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
11234
11235 case BUILT_IN_EXPECT:
11236 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
11237
11238 case BUILT_IN_ISGREATER:
11239 return fold_builtin_unordered_cmp (loc, fndecl,
11240 arg0, arg1, UNLE_EXPR, LE_EXPR);
11241 case BUILT_IN_ISGREATEREQUAL:
11242 return fold_builtin_unordered_cmp (loc, fndecl,
11243 arg0, arg1, UNLT_EXPR, LT_EXPR);
11244 case BUILT_IN_ISLESS:
11245 return fold_builtin_unordered_cmp (loc, fndecl,
11246 arg0, arg1, UNGE_EXPR, GE_EXPR);
11247 case BUILT_IN_ISLESSEQUAL:
11248 return fold_builtin_unordered_cmp (loc, fndecl,
11249 arg0, arg1, UNGT_EXPR, GT_EXPR);
11250 case BUILT_IN_ISLESSGREATER:
11251 return fold_builtin_unordered_cmp (loc, fndecl,
11252 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
11253 case BUILT_IN_ISUNORDERED:
11254 return fold_builtin_unordered_cmp (loc, fndecl,
11255 arg0, arg1, UNORDERED_EXPR,
11256 NOP_EXPR);
11257
11258 /* We do the folding for va_start in the expander. */
11259 case BUILT_IN_VA_START:
11260 break;
11261
11262 case BUILT_IN_OBJECT_SIZE:
11263 return fold_builtin_object_size (arg0, arg1);
11264
11265 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
11266 return fold_builtin_atomic_always_lock_free (arg0, arg1);
11267
11268 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
11269 return fold_builtin_atomic_is_lock_free (arg0, arg1);
11270
11271 default:
11272 break;
11273 }
11274 return NULL_TREE;
11275 }
11276
11277 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
11278 and ARG2.
11279 This function returns NULL_TREE if no simplification was possible. */
11280
11281 static tree
11282 fold_builtin_3 (location_t loc, tree fndecl,
11283 tree arg0, tree arg1, tree arg2)
11284 {
11285 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11286 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11287
11288 if (TREE_CODE (arg0) == ERROR_MARK
11289 || TREE_CODE (arg1) == ERROR_MARK
11290 || TREE_CODE (arg2) == ERROR_MARK)
11291 return NULL_TREE;
11292
11293 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
11294 arg0, arg1, arg2))
11295 return ret;
11296
11297 switch (fcode)
11298 {
11299
11300 CASE_FLT_FN (BUILT_IN_SINCOS):
11301 return fold_builtin_sincos (loc, arg0, arg1, arg2);
11302
11303 CASE_FLT_FN (BUILT_IN_REMQUO):
11304 if (validate_arg (arg0, REAL_TYPE)
11305 && validate_arg (arg1, REAL_TYPE)
11306 && validate_arg (arg2, POINTER_TYPE))
11307 return do_mpfr_remquo (arg0, arg1, arg2);
11308 break;
11309
11310 case BUILT_IN_MEMCMP:
11311 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
11312
11313 case BUILT_IN_EXPECT:
11314 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
11315
11316 case BUILT_IN_EXPECT_WITH_PROBABILITY:
11317 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
11318
11319 case BUILT_IN_ADD_OVERFLOW:
11320 case BUILT_IN_SUB_OVERFLOW:
11321 case BUILT_IN_MUL_OVERFLOW:
11322 case BUILT_IN_ADD_OVERFLOW_P:
11323 case BUILT_IN_SUB_OVERFLOW_P:
11324 case BUILT_IN_MUL_OVERFLOW_P:
11325 case BUILT_IN_SADD_OVERFLOW:
11326 case BUILT_IN_SADDL_OVERFLOW:
11327 case BUILT_IN_SADDLL_OVERFLOW:
11328 case BUILT_IN_SSUB_OVERFLOW:
11329 case BUILT_IN_SSUBL_OVERFLOW:
11330 case BUILT_IN_SSUBLL_OVERFLOW:
11331 case BUILT_IN_SMUL_OVERFLOW:
11332 case BUILT_IN_SMULL_OVERFLOW:
11333 case BUILT_IN_SMULLL_OVERFLOW:
11334 case BUILT_IN_UADD_OVERFLOW:
11335 case BUILT_IN_UADDL_OVERFLOW:
11336 case BUILT_IN_UADDLL_OVERFLOW:
11337 case BUILT_IN_USUB_OVERFLOW:
11338 case BUILT_IN_USUBL_OVERFLOW:
11339 case BUILT_IN_USUBLL_OVERFLOW:
11340 case BUILT_IN_UMUL_OVERFLOW:
11341 case BUILT_IN_UMULL_OVERFLOW:
11342 case BUILT_IN_UMULLL_OVERFLOW:
11343 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
11344
11345 default:
11346 break;
11347 }
11348 return NULL_TREE;
11349 }
11350
11351 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
11352 ARGS is an array of NARGS arguments. IGNORE is true if the result
11353 of the function call is ignored. This function returns NULL_TREE
11354 if no simplification was possible. */
11355
11356 static tree
11357 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
11358 int nargs, bool)
11359 {
11360 tree ret = NULL_TREE;
11361
11362 switch (nargs)
11363 {
11364 case 0:
11365 ret = fold_builtin_0 (loc, fndecl);
11366 break;
11367 case 1:
11368 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
11369 break;
11370 case 2:
11371 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
11372 break;
11373 case 3:
11374 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
11375 break;
11376 default:
11377 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
11378 break;
11379 }
11380 if (ret)
11381 {
11382 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11383 SET_EXPR_LOCATION (ret, loc);
11384 return ret;
11385 }
11386 return NULL_TREE;
11387 }
11388
11389 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11390 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11391 of arguments in ARGS to be omitted. OLDNARGS is the number of
11392 elements in ARGS. */
11393
11394 static tree
11395 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11396 int skip, tree fndecl, int n, va_list newargs)
11397 {
11398 int nargs = oldnargs - skip + n;
11399 tree *buffer;
11400
11401 if (n > 0)
11402 {
11403 int i, j;
11404
11405 buffer = XALLOCAVEC (tree, nargs);
11406 for (i = 0; i < n; i++)
11407 buffer[i] = va_arg (newargs, tree);
11408 for (j = skip; j < oldnargs; j++, i++)
11409 buffer[i] = args[j];
11410 }
11411 else
11412 buffer = args + skip;
11413
11414 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11415 }
11416
11417 /* Return true if FNDECL shouldn't be folded right now.
11418 If a built-in function has an inline attribute always_inline
11419 wrapper, defer folding it after always_inline functions have
11420 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11421 might not be performed. */
11422
11423 bool
11424 avoid_folding_inline_builtin (tree fndecl)
11425 {
11426 return (DECL_DECLARED_INLINE_P (fndecl)
11427 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11428 && cfun
11429 && !cfun->always_inline_functions_inlined
11430 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11431 }
11432
11433 /* A wrapper function for builtin folding that prevents warnings for
11434 "statement without effect" and the like, caused by removing the
11435 call node earlier than the warning is generated. */
11436
11437 tree
11438 fold_call_expr (location_t loc, tree exp, bool ignore)
11439 {
11440 tree ret = NULL_TREE;
11441 tree fndecl = get_callee_fndecl (exp);
11442 if (fndecl && fndecl_built_in_p (fndecl)
11443 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11444 yet. Defer folding until we see all the arguments
11445 (after inlining). */
11446 && !CALL_EXPR_VA_ARG_PACK (exp))
11447 {
11448 int nargs = call_expr_nargs (exp);
11449
11450 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11451 instead last argument is __builtin_va_arg_pack (). Defer folding
11452 even in that case, until arguments are finalized. */
11453 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11454 {
11455 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11456 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11457 return NULL_TREE;
11458 }
11459
11460 if (avoid_folding_inline_builtin (fndecl))
11461 return NULL_TREE;
11462
11463 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11464 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11465 CALL_EXPR_ARGP (exp), ignore);
11466 else
11467 {
11468 tree *args = CALL_EXPR_ARGP (exp);
11469 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
11470 if (ret)
11471 return ret;
11472 }
11473 }
11474 return NULL_TREE;
11475 }
11476
11477 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
11478 N arguments are passed in the array ARGARRAY. Return a folded
11479 expression or NULL_TREE if no simplification was possible. */
11480
11481 tree
11482 fold_builtin_call_array (location_t loc, tree,
11483 tree fn,
11484 int n,
11485 tree *argarray)
11486 {
11487 if (TREE_CODE (fn) != ADDR_EXPR)
11488 return NULL_TREE;
11489
11490 tree fndecl = TREE_OPERAND (fn, 0);
11491 if (TREE_CODE (fndecl) == FUNCTION_DECL
11492 && fndecl_built_in_p (fndecl))
11493 {
11494 /* If last argument is __builtin_va_arg_pack (), arguments to this
11495 function are not finalized yet. Defer folding until they are. */
11496 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11497 {
11498 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11499 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11500 return NULL_TREE;
11501 }
11502 if (avoid_folding_inline_builtin (fndecl))
11503 return NULL_TREE;
11504 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11505 return targetm.fold_builtin (fndecl, n, argarray, false);
11506 else
11507 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
11508 }
11509
11510 return NULL_TREE;
11511 }
11512
11513 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11514 along with N new arguments specified as the "..." parameters. SKIP
11515 is the number of arguments in EXP to be omitted. This function is used
11516 to do varargs-to-varargs transformations. */
11517
11518 static tree
11519 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11520 {
11521 va_list ap;
11522 tree t;
11523
11524 va_start (ap, n);
11525 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11526 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11527 va_end (ap);
11528
11529 return t;
11530 }
11531
11532 /* Validate a single argument ARG against a tree code CODE representing
11533 a type. Return true when argument is valid. */
11534
11535 static bool
11536 validate_arg (const_tree arg, enum tree_code code)
11537 {
11538 if (!arg)
11539 return false;
11540 else if (code == POINTER_TYPE)
11541 return POINTER_TYPE_P (TREE_TYPE (arg));
11542 else if (code == INTEGER_TYPE)
11543 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11544 return code == TREE_CODE (TREE_TYPE (arg));
11545 }
11546
11547 /* This function validates the types of a function call argument list
11548 against a specified list of tree_codes. If the last specifier is a 0,
11549 that represents an ellipses, otherwise the last specifier must be a
11550 VOID_TYPE.
11551
11552 This is the GIMPLE version of validate_arglist. Eventually we want to
11553 completely convert builtins.c to work from GIMPLEs and the tree based
11554 validate_arglist will then be removed. */
11555
11556 bool
11557 validate_gimple_arglist (const gcall *call, ...)
11558 {
11559 enum tree_code code;
11560 bool res = 0;
11561 va_list ap;
11562 const_tree arg;
11563 size_t i;
11564
11565 va_start (ap, call);
11566 i = 0;
11567
11568 do
11569 {
11570 code = (enum tree_code) va_arg (ap, int);
11571 switch (code)
11572 {
11573 case 0:
11574 /* This signifies an ellipses, any further arguments are all ok. */
11575 res = true;
11576 goto end;
11577 case VOID_TYPE:
11578 /* This signifies an endlink, if no arguments remain, return
11579 true, otherwise return false. */
11580 res = (i == gimple_call_num_args (call));
11581 goto end;
11582 default:
11583 /* If no parameters remain or the parameter's code does not
11584 match the specified code, return false. Otherwise continue
11585 checking any remaining arguments. */
11586 arg = gimple_call_arg (call, i++);
11587 if (!validate_arg (arg, code))
11588 goto end;
11589 break;
11590 }
11591 }
11592 while (1);
11593
11594 /* We need gotos here since we can only have one VA_CLOSE in a
11595 function. */
11596 end: ;
11597 va_end (ap);
11598
11599 return res;
11600 }
11601
11602 /* Default target-specific builtin expander that does nothing. */
11603
11604 rtx
11605 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11606 rtx target ATTRIBUTE_UNUSED,
11607 rtx subtarget ATTRIBUTE_UNUSED,
11608 machine_mode mode ATTRIBUTE_UNUSED,
11609 int ignore ATTRIBUTE_UNUSED)
11610 {
11611 return NULL_RTX;
11612 }
11613
11614 /* Returns true is EXP represents data that would potentially reside
11615 in a readonly section. */
11616
11617 bool
11618 readonly_data_expr (tree exp)
11619 {
11620 STRIP_NOPS (exp);
11621
11622 if (TREE_CODE (exp) != ADDR_EXPR)
11623 return false;
11624
11625 exp = get_base_address (TREE_OPERAND (exp, 0));
11626 if (!exp)
11627 return false;
11628
11629 /* Make sure we call decl_readonly_section only for trees it
11630 can handle (since it returns true for everything it doesn't
11631 understand). */
11632 if (TREE_CODE (exp) == STRING_CST
11633 || TREE_CODE (exp) == CONSTRUCTOR
11634 || (VAR_P (exp) && TREE_STATIC (exp)))
11635 return decl_readonly_section (exp, 0);
11636 else
11637 return false;
11638 }
11639
11640 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11641 to the call, and TYPE is its return type.
11642
11643 Return NULL_TREE if no simplification was possible, otherwise return the
11644 simplified form of the call as a tree.
11645
11646 The simplified form may be a constant or other expression which
11647 computes the same value, but in a more efficient manner (including
11648 calls to other builtin functions).
11649
11650 The call may contain arguments which need to be evaluated, but
11651 which are not useful to determine the result of the call. In
11652 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11653 COMPOUND_EXPR will be an argument which must be evaluated.
11654 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11655 COMPOUND_EXPR in the chain will contain the tree for the simplified
11656 form of the builtin function call. */
11657
11658 static tree
11659 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11660 {
11661 if (!validate_arg (s1, POINTER_TYPE)
11662 || !validate_arg (s2, POINTER_TYPE))
11663 return NULL_TREE;
11664
11665 tree fn;
11666 const char *p1, *p2;
11667
11668 p2 = c_getstr (s2);
11669 if (p2 == NULL)
11670 return NULL_TREE;
11671
11672 p1 = c_getstr (s1);
11673 if (p1 != NULL)
11674 {
11675 const char *r = strpbrk (p1, p2);
11676 tree tem;
11677
11678 if (r == NULL)
11679 return build_int_cst (TREE_TYPE (s1), 0);
11680
11681 /* Return an offset into the constant string argument. */
11682 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11683 return fold_convert_loc (loc, type, tem);
11684 }
11685
11686 if (p2[0] == '\0')
11687 /* strpbrk(x, "") == NULL.
11688 Evaluate and ignore s1 in case it had side-effects. */
11689 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11690
11691 if (p2[1] != '\0')
11692 return NULL_TREE; /* Really call strpbrk. */
11693
11694 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11695 if (!fn)
11696 return NULL_TREE;
11697
11698 /* New argument list transforming strpbrk(s1, s2) to
11699 strchr(s1, s2[0]). */
11700 return build_call_expr_loc (loc, fn, 2, s1,
11701 build_int_cst (integer_type_node, p2[0]));
11702 }
11703
11704 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11705 to the call.
11706
11707 Return NULL_TREE if no simplification was possible, otherwise return the
11708 simplified form of the call as a tree.
11709
11710 The simplified form may be a constant or other expression which
11711 computes the same value, but in a more efficient manner (including
11712 calls to other builtin functions).
11713
11714 The call may contain arguments which need to be evaluated, but
11715 which are not useful to determine the result of the call. In
11716 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11717 COMPOUND_EXPR will be an argument which must be evaluated.
11718 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11719 COMPOUND_EXPR in the chain will contain the tree for the simplified
11720 form of the builtin function call. */
11721
11722 static tree
11723 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
11724 {
11725 if (!validate_arg (s1, POINTER_TYPE)
11726 || !validate_arg (s2, POINTER_TYPE))
11727 return NULL_TREE;
11728
11729 if (!check_nul_terminated_array (expr, s1)
11730 || !check_nul_terminated_array (expr, s2))
11731 return NULL_TREE;
11732
11733 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11734
11735 /* If either argument is "", return NULL_TREE. */
11736 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11737 /* Evaluate and ignore both arguments in case either one has
11738 side-effects. */
11739 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11740 s1, s2);
11741 return NULL_TREE;
11742 }
11743
11744 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11745 to the call.
11746
11747 Return NULL_TREE if no simplification was possible, otherwise return the
11748 simplified form of the call as a tree.
11749
11750 The simplified form may be a constant or other expression which
11751 computes the same value, but in a more efficient manner (including
11752 calls to other builtin functions).
11753
11754 The call may contain arguments which need to be evaluated, but
11755 which are not useful to determine the result of the call. In
11756 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11757 COMPOUND_EXPR will be an argument which must be evaluated.
11758 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11759 COMPOUND_EXPR in the chain will contain the tree for the simplified
11760 form of the builtin function call. */
11761
11762 static tree
11763 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
11764 {
11765 if (!validate_arg (s1, POINTER_TYPE)
11766 || !validate_arg (s2, POINTER_TYPE))
11767 return NULL_TREE;
11768
11769 if (!check_nul_terminated_array (expr, s1)
11770 || !check_nul_terminated_array (expr, s2))
11771 return NULL_TREE;
11772
11773 /* If the first argument is "", return NULL_TREE. */
11774 const char *p1 = c_getstr (s1);
11775 if (p1 && *p1 == '\0')
11776 {
11777 /* Evaluate and ignore argument s2 in case it has
11778 side-effects. */
11779 return omit_one_operand_loc (loc, size_type_node,
11780 size_zero_node, s2);
11781 }
11782
11783 /* If the second argument is "", return __builtin_strlen(s1). */
11784 const char *p2 = c_getstr (s2);
11785 if (p2 && *p2 == '\0')
11786 {
11787 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11788
11789 /* If the replacement _DECL isn't initialized, don't do the
11790 transformation. */
11791 if (!fn)
11792 return NULL_TREE;
11793
11794 return build_call_expr_loc (loc, fn, 1, s1);
11795 }
11796 return NULL_TREE;
11797 }
11798
11799 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11800 produced. False otherwise. This is done so that we don't output the error
11801 or warning twice or three times. */
11802
11803 bool
11804 fold_builtin_next_arg (tree exp, bool va_start_p)
11805 {
11806 tree fntype = TREE_TYPE (current_function_decl);
11807 int nargs = call_expr_nargs (exp);
11808 tree arg;
11809 /* There is good chance the current input_location points inside the
11810 definition of the va_start macro (perhaps on the token for
11811 builtin) in a system header, so warnings will not be emitted.
11812 Use the location in real source code. */
11813 location_t current_location =
11814 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11815 NULL);
11816
11817 if (!stdarg_p (fntype))
11818 {
11819 error ("%<va_start%> used in function with fixed arguments");
11820 return true;
11821 }
11822
11823 if (va_start_p)
11824 {
11825 if (va_start_p && (nargs != 2))
11826 {
11827 error ("wrong number of arguments to function %<va_start%>");
11828 return true;
11829 }
11830 arg = CALL_EXPR_ARG (exp, 1);
11831 }
11832 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11833 when we checked the arguments and if needed issued a warning. */
11834 else
11835 {
11836 if (nargs == 0)
11837 {
11838 /* Evidently an out of date version of <stdarg.h>; can't validate
11839 va_start's second argument, but can still work as intended. */
11840 warning_at (current_location,
11841 OPT_Wvarargs,
11842 "%<__builtin_next_arg%> called without an argument");
11843 return true;
11844 }
11845 else if (nargs > 1)
11846 {
11847 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11848 return true;
11849 }
11850 arg = CALL_EXPR_ARG (exp, 0);
11851 }
11852
11853 if (TREE_CODE (arg) == SSA_NAME)
11854 arg = SSA_NAME_VAR (arg);
11855
11856 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11857 or __builtin_next_arg (0) the first time we see it, after checking
11858 the arguments and if needed issuing a warning. */
11859 if (!integer_zerop (arg))
11860 {
11861 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11862
11863 /* Strip off all nops for the sake of the comparison. This
11864 is not quite the same as STRIP_NOPS. It does more.
11865 We must also strip off INDIRECT_EXPR for C++ reference
11866 parameters. */
11867 while (CONVERT_EXPR_P (arg)
11868 || TREE_CODE (arg) == INDIRECT_REF)
11869 arg = TREE_OPERAND (arg, 0);
11870 if (arg != last_parm)
11871 {
11872 /* FIXME: Sometimes with the tree optimizers we can get the
11873 not the last argument even though the user used the last
11874 argument. We just warn and set the arg to be the last
11875 argument so that we will get wrong-code because of
11876 it. */
11877 warning_at (current_location,
11878 OPT_Wvarargs,
11879 "second parameter of %<va_start%> not last named argument");
11880 }
11881
11882 /* Undefined by C99 7.15.1.4p4 (va_start):
11883 "If the parameter parmN is declared with the register storage
11884 class, with a function or array type, or with a type that is
11885 not compatible with the type that results after application of
11886 the default argument promotions, the behavior is undefined."
11887 */
11888 else if (DECL_REGISTER (arg))
11889 {
11890 warning_at (current_location,
11891 OPT_Wvarargs,
11892 "undefined behavior when second parameter of "
11893 "%<va_start%> is declared with %<register%> storage");
11894 }
11895
11896 /* We want to verify the second parameter just once before the tree
11897 optimizers are run and then avoid keeping it in the tree,
11898 as otherwise we could warn even for correct code like:
11899 void foo (int i, ...)
11900 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11901 if (va_start_p)
11902 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11903 else
11904 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11905 }
11906 return false;
11907 }
11908
11909
11910 /* Expand a call EXP to __builtin_object_size. */
11911
11912 static rtx
11913 expand_builtin_object_size (tree exp)
11914 {
11915 tree ost;
11916 int object_size_type;
11917 tree fndecl = get_callee_fndecl (exp);
11918
11919 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11920 {
11921 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
11922 exp, fndecl);
11923 expand_builtin_trap ();
11924 return const0_rtx;
11925 }
11926
11927 ost = CALL_EXPR_ARG (exp, 1);
11928 STRIP_NOPS (ost);
11929
11930 if (TREE_CODE (ost) != INTEGER_CST
11931 || tree_int_cst_sgn (ost) < 0
11932 || compare_tree_int (ost, 3) > 0)
11933 {
11934 error ("%Klast argument of %qD is not integer constant between 0 and 3",
11935 exp, fndecl);
11936 expand_builtin_trap ();
11937 return const0_rtx;
11938 }
11939
11940 object_size_type = tree_to_shwi (ost);
11941
11942 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11943 }
11944
11945 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11946 FCODE is the BUILT_IN_* to use.
11947 Return NULL_RTX if we failed; the caller should emit a normal call,
11948 otherwise try to get the result in TARGET, if convenient (and in
11949 mode MODE if that's convenient). */
11950
11951 static rtx
11952 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11953 enum built_in_function fcode)
11954 {
11955 if (!validate_arglist (exp,
11956 POINTER_TYPE,
11957 fcode == BUILT_IN_MEMSET_CHK
11958 ? INTEGER_TYPE : POINTER_TYPE,
11959 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11960 return NULL_RTX;
11961
11962 tree dest = CALL_EXPR_ARG (exp, 0);
11963 tree src = CALL_EXPR_ARG (exp, 1);
11964 tree len = CALL_EXPR_ARG (exp, 2);
11965 tree size = CALL_EXPR_ARG (exp, 3);
11966
11967 /* FIXME: Set access mode to write only for memset et al. */
11968 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11969 /*srcstr=*/NULL_TREE, size, access_read_write);
11970
11971 if (!tree_fits_uhwi_p (size))
11972 return NULL_RTX;
11973
11974 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11975 {
11976 /* Avoid transforming the checking call to an ordinary one when
11977 an overflow has been detected or when the call couldn't be
11978 validated because the size is not constant. */
11979 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11980 return NULL_RTX;
11981
11982 tree fn = NULL_TREE;
11983 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11984 mem{cpy,pcpy,move,set} is available. */
11985 switch (fcode)
11986 {
11987 case BUILT_IN_MEMCPY_CHK:
11988 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11989 break;
11990 case BUILT_IN_MEMPCPY_CHK:
11991 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11992 break;
11993 case BUILT_IN_MEMMOVE_CHK:
11994 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11995 break;
11996 case BUILT_IN_MEMSET_CHK:
11997 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11998 break;
11999 default:
12000 break;
12001 }
12002
12003 if (! fn)
12004 return NULL_RTX;
12005
12006 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12007 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12008 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12009 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12010 }
12011 else if (fcode == BUILT_IN_MEMSET_CHK)
12012 return NULL_RTX;
12013 else
12014 {
12015 unsigned int dest_align = get_pointer_alignment (dest);
12016
12017 /* If DEST is not a pointer type, call the normal function. */
12018 if (dest_align == 0)
12019 return NULL_RTX;
12020
12021 /* If SRC and DEST are the same (and not volatile), do nothing. */
12022 if (operand_equal_p (src, dest, 0))
12023 {
12024 tree expr;
12025
12026 if (fcode != BUILT_IN_MEMPCPY_CHK)
12027 {
12028 /* Evaluate and ignore LEN in case it has side-effects. */
12029 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12030 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12031 }
12032
12033 expr = fold_build_pointer_plus (dest, len);
12034 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12035 }
12036
12037 /* __memmove_chk special case. */
12038 if (fcode == BUILT_IN_MEMMOVE_CHK)
12039 {
12040 unsigned int src_align = get_pointer_alignment (src);
12041
12042 if (src_align == 0)
12043 return NULL_RTX;
12044
12045 /* If src is categorized for a readonly section we can use
12046 normal __memcpy_chk. */
12047 if (readonly_data_expr (src))
12048 {
12049 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12050 if (!fn)
12051 return NULL_RTX;
12052 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12053 dest, src, len, size);
12054 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12055 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12056 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12057 }
12058 }
12059 return NULL_RTX;
12060 }
12061 }
12062
12063 /* Emit warning if a buffer overflow is detected at compile time. */
12064
12065 static void
12066 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12067 {
12068 /* The source string. */
12069 tree srcstr = NULL_TREE;
12070 /* The size of the destination object returned by __builtin_object_size. */
12071 tree objsize = NULL_TREE;
12072 /* The string that is being concatenated with (as in __strcat_chk)
12073 or null if it isn't. */
12074 tree catstr = NULL_TREE;
12075 /* The maximum length of the source sequence in a bounded operation
12076 (such as __strncat_chk) or null if the operation isn't bounded
12077 (such as __strcat_chk). */
12078 tree maxread = NULL_TREE;
12079 /* The exact size of the access (such as in __strncpy_chk). */
12080 tree size = NULL_TREE;
12081 /* The access by the function that's checked. Except for snprintf
12082 both writing and reading is checked. */
12083 access_mode mode = access_read_write;
12084
12085 switch (fcode)
12086 {
12087 case BUILT_IN_STRCPY_CHK:
12088 case BUILT_IN_STPCPY_CHK:
12089 srcstr = CALL_EXPR_ARG (exp, 1);
12090 objsize = CALL_EXPR_ARG (exp, 2);
12091 break;
12092
12093 case BUILT_IN_STRCAT_CHK:
12094 /* For __strcat_chk the warning will be emitted only if overflowing
12095 by at least strlen (dest) + 1 bytes. */
12096 catstr = CALL_EXPR_ARG (exp, 0);
12097 srcstr = CALL_EXPR_ARG (exp, 1);
12098 objsize = CALL_EXPR_ARG (exp, 2);
12099 break;
12100
12101 case BUILT_IN_STRNCAT_CHK:
12102 catstr = CALL_EXPR_ARG (exp, 0);
12103 srcstr = CALL_EXPR_ARG (exp, 1);
12104 maxread = CALL_EXPR_ARG (exp, 2);
12105 objsize = CALL_EXPR_ARG (exp, 3);
12106 break;
12107
12108 case BUILT_IN_STRNCPY_CHK:
12109 case BUILT_IN_STPNCPY_CHK:
12110 srcstr = CALL_EXPR_ARG (exp, 1);
12111 size = CALL_EXPR_ARG (exp, 2);
12112 objsize = CALL_EXPR_ARG (exp, 3);
12113 break;
12114
12115 case BUILT_IN_SNPRINTF_CHK:
12116 case BUILT_IN_VSNPRINTF_CHK:
12117 maxread = CALL_EXPR_ARG (exp, 1);
12118 objsize = CALL_EXPR_ARG (exp, 3);
12119 /* The only checked access the write to the destination. */
12120 mode = access_write_only;
12121 break;
12122 default:
12123 gcc_unreachable ();
12124 }
12125
12126 if (catstr && maxread)
12127 {
12128 /* Check __strncat_chk. There is no way to determine the length
12129 of the string to which the source string is being appended so
12130 just warn when the length of the source string is not known. */
12131 check_strncat_sizes (exp, objsize);
12132 return;
12133 }
12134
12135 check_access (exp, size, maxread, srcstr, objsize, mode);
12136 }
12137
12138 /* Emit warning if a buffer overflow is detected at compile time
12139 in __sprintf_chk/__vsprintf_chk calls. */
12140
12141 static void
12142 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12143 {
12144 tree size, len, fmt;
12145 const char *fmt_str;
12146 int nargs = call_expr_nargs (exp);
12147
12148 /* Verify the required arguments in the original call. */
12149
12150 if (nargs < 4)
12151 return;
12152 size = CALL_EXPR_ARG (exp, 2);
12153 fmt = CALL_EXPR_ARG (exp, 3);
12154
12155 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12156 return;
12157
12158 /* Check whether the format is a literal string constant. */
12159 fmt_str = c_getstr (fmt);
12160 if (fmt_str == NULL)
12161 return;
12162
12163 if (!init_target_chars ())
12164 return;
12165
12166 /* If the format doesn't contain % args or %%, we know its size. */
12167 if (strchr (fmt_str, target_percent) == 0)
12168 len = build_int_cstu (size_type_node, strlen (fmt_str));
12169 /* If the format is "%s" and first ... argument is a string literal,
12170 we know it too. */
12171 else if (fcode == BUILT_IN_SPRINTF_CHK
12172 && strcmp (fmt_str, target_percent_s) == 0)
12173 {
12174 tree arg;
12175
12176 if (nargs < 5)
12177 return;
12178 arg = CALL_EXPR_ARG (exp, 4);
12179 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12180 return;
12181
12182 len = c_strlen (arg, 1);
12183 if (!len || ! tree_fits_uhwi_p (len))
12184 return;
12185 }
12186 else
12187 return;
12188
12189 /* Add one for the terminating nul. */
12190 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
12191
12192 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
12193 access_write_only);
12194 }
12195
12196 /* Emit warning if a free is called with address of a variable. */
12197
12198 static void
12199 maybe_emit_free_warning (tree exp)
12200 {
12201 if (call_expr_nargs (exp) != 1)
12202 return;
12203
12204 tree arg = CALL_EXPR_ARG (exp, 0);
12205
12206 STRIP_NOPS (arg);
12207 if (TREE_CODE (arg) != ADDR_EXPR)
12208 return;
12209
12210 arg = get_base_address (TREE_OPERAND (arg, 0));
12211 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12212 return;
12213
12214 if (SSA_VAR_P (arg))
12215 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12216 "%Kattempt to free a non-heap object %qD", exp, arg);
12217 else
12218 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12219 "%Kattempt to free a non-heap object", exp);
12220 }
12221
12222 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12223 if possible. */
12224
12225 static tree
12226 fold_builtin_object_size (tree ptr, tree ost)
12227 {
12228 unsigned HOST_WIDE_INT bytes;
12229 int object_size_type;
12230
12231 if (!validate_arg (ptr, POINTER_TYPE)
12232 || !validate_arg (ost, INTEGER_TYPE))
12233 return NULL_TREE;
12234
12235 STRIP_NOPS (ost);
12236
12237 if (TREE_CODE (ost) != INTEGER_CST
12238 || tree_int_cst_sgn (ost) < 0
12239 || compare_tree_int (ost, 3) > 0)
12240 return NULL_TREE;
12241
12242 object_size_type = tree_to_shwi (ost);
12243
12244 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12245 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12246 and (size_t) 0 for types 2 and 3. */
12247 if (TREE_SIDE_EFFECTS (ptr))
12248 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12249
12250 if (TREE_CODE (ptr) == ADDR_EXPR)
12251 {
12252 compute_builtin_object_size (ptr, object_size_type, &bytes);
12253 if (wi::fits_to_tree_p (bytes, size_type_node))
12254 return build_int_cstu (size_type_node, bytes);
12255 }
12256 else if (TREE_CODE (ptr) == SSA_NAME)
12257 {
12258 /* If object size is not known yet, delay folding until
12259 later. Maybe subsequent passes will help determining
12260 it. */
12261 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
12262 && wi::fits_to_tree_p (bytes, size_type_node))
12263 return build_int_cstu (size_type_node, bytes);
12264 }
12265
12266 return NULL_TREE;
12267 }
12268
12269 /* Builtins with folding operations that operate on "..." arguments
12270 need special handling; we need to store the arguments in a convenient
12271 data structure before attempting any folding. Fortunately there are
12272 only a few builtins that fall into this category. FNDECL is the
12273 function, EXP is the CALL_EXPR for the call. */
12274
12275 static tree
12276 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
12277 {
12278 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12279 tree ret = NULL_TREE;
12280
12281 switch (fcode)
12282 {
12283 case BUILT_IN_FPCLASSIFY:
12284 ret = fold_builtin_fpclassify (loc, args, nargs);
12285 break;
12286
12287 default:
12288 break;
12289 }
12290 if (ret)
12291 {
12292 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
12293 SET_EXPR_LOCATION (ret, loc);
12294 TREE_NO_WARNING (ret) = 1;
12295 return ret;
12296 }
12297 return NULL_TREE;
12298 }
12299
12300 /* Initialize format string characters in the target charset. */
12301
12302 bool
12303 init_target_chars (void)
12304 {
12305 static bool init;
12306 if (!init)
12307 {
12308 target_newline = lang_hooks.to_target_charset ('\n');
12309 target_percent = lang_hooks.to_target_charset ('%');
12310 target_c = lang_hooks.to_target_charset ('c');
12311 target_s = lang_hooks.to_target_charset ('s');
12312 if (target_newline == 0 || target_percent == 0 || target_c == 0
12313 || target_s == 0)
12314 return false;
12315
12316 target_percent_c[0] = target_percent;
12317 target_percent_c[1] = target_c;
12318 target_percent_c[2] = '\0';
12319
12320 target_percent_s[0] = target_percent;
12321 target_percent_s[1] = target_s;
12322 target_percent_s[2] = '\0';
12323
12324 target_percent_s_newline[0] = target_percent;
12325 target_percent_s_newline[1] = target_s;
12326 target_percent_s_newline[2] = target_newline;
12327 target_percent_s_newline[3] = '\0';
12328
12329 init = true;
12330 }
12331 return true;
12332 }
12333
12334 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12335 and no overflow/underflow occurred. INEXACT is true if M was not
12336 exactly calculated. TYPE is the tree type for the result. This
12337 function assumes that you cleared the MPFR flags and then
12338 calculated M to see if anything subsequently set a flag prior to
12339 entering this function. Return NULL_TREE if any checks fail. */
12340
12341 static tree
12342 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12343 {
12344 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12345 overflow/underflow occurred. If -frounding-math, proceed iff the
12346 result of calling FUNC was exact. */
12347 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12348 && (!flag_rounding_math || !inexact))
12349 {
12350 REAL_VALUE_TYPE rr;
12351
12352 real_from_mpfr (&rr, m, type, MPFR_RNDN);
12353 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12354 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12355 but the mpft_t is not, then we underflowed in the
12356 conversion. */
12357 if (real_isfinite (&rr)
12358 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12359 {
12360 REAL_VALUE_TYPE rmode;
12361
12362 real_convert (&rmode, TYPE_MODE (type), &rr);
12363 /* Proceed iff the specified mode can hold the value. */
12364 if (real_identical (&rmode, &rr))
12365 return build_real (type, rmode);
12366 }
12367 }
12368 return NULL_TREE;
12369 }
12370
12371 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12372 number and no overflow/underflow occurred. INEXACT is true if M
12373 was not exactly calculated. TYPE is the tree type for the result.
12374 This function assumes that you cleared the MPFR flags and then
12375 calculated M to see if anything subsequently set a flag prior to
12376 entering this function. Return NULL_TREE if any checks fail, if
12377 FORCE_CONVERT is true, then bypass the checks. */
12378
12379 static tree
12380 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12381 {
12382 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12383 overflow/underflow occurred. If -frounding-math, proceed iff the
12384 result of calling FUNC was exact. */
12385 if (force_convert
12386 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12387 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12388 && (!flag_rounding_math || !inexact)))
12389 {
12390 REAL_VALUE_TYPE re, im;
12391
12392 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
12393 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
12394 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12395 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12396 but the mpft_t is not, then we underflowed in the
12397 conversion. */
12398 if (force_convert
12399 || (real_isfinite (&re) && real_isfinite (&im)
12400 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12401 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12402 {
12403 REAL_VALUE_TYPE re_mode, im_mode;
12404
12405 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12406 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12407 /* Proceed iff the specified mode can hold the value. */
12408 if (force_convert
12409 || (real_identical (&re_mode, &re)
12410 && real_identical (&im_mode, &im)))
12411 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12412 build_real (TREE_TYPE (type), im_mode));
12413 }
12414 }
12415 return NULL_TREE;
12416 }
12417
12418 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12419 the pointer *(ARG_QUO) and return the result. The type is taken
12420 from the type of ARG0 and is used for setting the precision of the
12421 calculation and results. */
12422
12423 static tree
12424 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12425 {
12426 tree const type = TREE_TYPE (arg0);
12427 tree result = NULL_TREE;
12428
12429 STRIP_NOPS (arg0);
12430 STRIP_NOPS (arg1);
12431
12432 /* To proceed, MPFR must exactly represent the target floating point
12433 format, which only happens when the target base equals two. */
12434 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12435 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12436 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12437 {
12438 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12439 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12440
12441 if (real_isfinite (ra0) && real_isfinite (ra1))
12442 {
12443 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12444 const int prec = fmt->p;
12445 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12446 tree result_rem;
12447 long integer_quo;
12448 mpfr_t m0, m1;
12449
12450 mpfr_inits2 (prec, m0, m1, NULL);
12451 mpfr_from_real (m0, ra0, MPFR_RNDN);
12452 mpfr_from_real (m1, ra1, MPFR_RNDN);
12453 mpfr_clear_flags ();
12454 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12455 /* Remquo is independent of the rounding mode, so pass
12456 inexact=0 to do_mpfr_ckconv(). */
12457 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12458 mpfr_clears (m0, m1, NULL);
12459 if (result_rem)
12460 {
12461 /* MPFR calculates quo in the host's long so it may
12462 return more bits in quo than the target int can hold
12463 if sizeof(host long) > sizeof(target int). This can
12464 happen even for native compilers in LP64 mode. In
12465 these cases, modulo the quo value with the largest
12466 number that the target int can hold while leaving one
12467 bit for the sign. */
12468 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12469 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12470
12471 /* Dereference the quo pointer argument. */
12472 arg_quo = build_fold_indirect_ref (arg_quo);
12473 /* Proceed iff a valid pointer type was passed in. */
12474 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12475 {
12476 /* Set the value. */
12477 tree result_quo
12478 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12479 build_int_cst (TREE_TYPE (arg_quo),
12480 integer_quo));
12481 TREE_SIDE_EFFECTS (result_quo) = 1;
12482 /* Combine the quo assignment with the rem. */
12483 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12484 result_quo, result_rem));
12485 }
12486 }
12487 }
12488 }
12489 return result;
12490 }
12491
12492 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12493 resulting value as a tree with type TYPE. The mpfr precision is
12494 set to the precision of TYPE. We assume that this mpfr function
12495 returns zero if the result could be calculated exactly within the
12496 requested precision. In addition, the integer pointer represented
12497 by ARG_SG will be dereferenced and set to the appropriate signgam
12498 (-1,1) value. */
12499
12500 static tree
12501 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12502 {
12503 tree result = NULL_TREE;
12504
12505 STRIP_NOPS (arg);
12506
12507 /* To proceed, MPFR must exactly represent the target floating point
12508 format, which only happens when the target base equals two. Also
12509 verify ARG is a constant and that ARG_SG is an int pointer. */
12510 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12511 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12512 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12513 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12514 {
12515 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12516
12517 /* In addition to NaN and Inf, the argument cannot be zero or a
12518 negative integer. */
12519 if (real_isfinite (ra)
12520 && ra->cl != rvc_zero
12521 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12522 {
12523 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12524 const int prec = fmt->p;
12525 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12526 int inexact, sg;
12527 mpfr_t m;
12528 tree result_lg;
12529
12530 mpfr_init2 (m, prec);
12531 mpfr_from_real (m, ra, MPFR_RNDN);
12532 mpfr_clear_flags ();
12533 inexact = mpfr_lgamma (m, &sg, m, rnd);
12534 result_lg = do_mpfr_ckconv (m, type, inexact);
12535 mpfr_clear (m);
12536 if (result_lg)
12537 {
12538 tree result_sg;
12539
12540 /* Dereference the arg_sg pointer argument. */
12541 arg_sg = build_fold_indirect_ref (arg_sg);
12542 /* Assign the signgam value into *arg_sg. */
12543 result_sg = fold_build2 (MODIFY_EXPR,
12544 TREE_TYPE (arg_sg), arg_sg,
12545 build_int_cst (TREE_TYPE (arg_sg), sg));
12546 TREE_SIDE_EFFECTS (result_sg) = 1;
12547 /* Combine the signgam assignment with the lgamma result. */
12548 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12549 result_sg, result_lg));
12550 }
12551 }
12552 }
12553
12554 return result;
12555 }
12556
12557 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12558 mpc function FUNC on it and return the resulting value as a tree
12559 with type TYPE. The mpfr precision is set to the precision of
12560 TYPE. We assume that function FUNC returns zero if the result
12561 could be calculated exactly within the requested precision. If
12562 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12563 in the arguments and/or results. */
12564
12565 tree
12566 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12567 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12568 {
12569 tree result = NULL_TREE;
12570
12571 STRIP_NOPS (arg0);
12572 STRIP_NOPS (arg1);
12573
12574 /* To proceed, MPFR must exactly represent the target floating point
12575 format, which only happens when the target base equals two. */
12576 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12577 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12578 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12579 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12580 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12581 {
12582 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12583 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12584 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12585 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12586
12587 if (do_nonfinite
12588 || (real_isfinite (re0) && real_isfinite (im0)
12589 && real_isfinite (re1) && real_isfinite (im1)))
12590 {
12591 const struct real_format *const fmt =
12592 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12593 const int prec = fmt->p;
12594 const mpfr_rnd_t rnd = fmt->round_towards_zero
12595 ? MPFR_RNDZ : MPFR_RNDN;
12596 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12597 int inexact;
12598 mpc_t m0, m1;
12599
12600 mpc_init2 (m0, prec);
12601 mpc_init2 (m1, prec);
12602 mpfr_from_real (mpc_realref (m0), re0, rnd);
12603 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12604 mpfr_from_real (mpc_realref (m1), re1, rnd);
12605 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12606 mpfr_clear_flags ();
12607 inexact = func (m0, m0, m1, crnd);
12608 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12609 mpc_clear (m0);
12610 mpc_clear (m1);
12611 }
12612 }
12613
12614 return result;
12615 }
12616
12617 /* A wrapper function for builtin folding that prevents warnings for
12618 "statement without effect" and the like, caused by removing the
12619 call node earlier than the warning is generated. */
12620
12621 tree
12622 fold_call_stmt (gcall *stmt, bool ignore)
12623 {
12624 tree ret = NULL_TREE;
12625 tree fndecl = gimple_call_fndecl (stmt);
12626 location_t loc = gimple_location (stmt);
12627 if (fndecl && fndecl_built_in_p (fndecl)
12628 && !gimple_call_va_arg_pack_p (stmt))
12629 {
12630 int nargs = gimple_call_num_args (stmt);
12631 tree *args = (nargs > 0
12632 ? gimple_call_arg_ptr (stmt, 0)
12633 : &error_mark_node);
12634
12635 if (avoid_folding_inline_builtin (fndecl))
12636 return NULL_TREE;
12637 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12638 {
12639 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12640 }
12641 else
12642 {
12643 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12644 if (ret)
12645 {
12646 /* Propagate location information from original call to
12647 expansion of builtin. Otherwise things like
12648 maybe_emit_chk_warning, that operate on the expansion
12649 of a builtin, will use the wrong location information. */
12650 if (gimple_has_location (stmt))
12651 {
12652 tree realret = ret;
12653 if (TREE_CODE (ret) == NOP_EXPR)
12654 realret = TREE_OPERAND (ret, 0);
12655 if (CAN_HAVE_LOCATION_P (realret)
12656 && !EXPR_HAS_LOCATION (realret))
12657 SET_EXPR_LOCATION (realret, loc);
12658 return realret;
12659 }
12660 return ret;
12661 }
12662 }
12663 }
12664 return NULL_TREE;
12665 }
12666
12667 /* Look up the function in builtin_decl that corresponds to DECL
12668 and set ASMSPEC as its user assembler name. DECL must be a
12669 function decl that declares a builtin. */
12670
12671 void
12672 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12673 {
12674 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12675 && asmspec != 0);
12676
12677 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12678 set_user_assembler_name (builtin, asmspec);
12679
12680 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12681 && INT_TYPE_SIZE < BITS_PER_WORD)
12682 {
12683 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
12684 set_user_assembler_libfunc ("ffs", asmspec);
12685 set_optab_libfunc (ffs_optab, mode, "ffs");
12686 }
12687 }
12688
12689 /* Return true if DECL is a builtin that expands to a constant or similarly
12690 simple code. */
12691 bool
12692 is_simple_builtin (tree decl)
12693 {
12694 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
12695 switch (DECL_FUNCTION_CODE (decl))
12696 {
12697 /* Builtins that expand to constants. */
12698 case BUILT_IN_CONSTANT_P:
12699 case BUILT_IN_EXPECT:
12700 case BUILT_IN_OBJECT_SIZE:
12701 case BUILT_IN_UNREACHABLE:
12702 /* Simple register moves or loads from stack. */
12703 case BUILT_IN_ASSUME_ALIGNED:
12704 case BUILT_IN_RETURN_ADDRESS:
12705 case BUILT_IN_EXTRACT_RETURN_ADDR:
12706 case BUILT_IN_FROB_RETURN_ADDR:
12707 case BUILT_IN_RETURN:
12708 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12709 case BUILT_IN_FRAME_ADDRESS:
12710 case BUILT_IN_VA_END:
12711 case BUILT_IN_STACK_SAVE:
12712 case BUILT_IN_STACK_RESTORE:
12713 /* Exception state returns or moves registers around. */
12714 case BUILT_IN_EH_FILTER:
12715 case BUILT_IN_EH_POINTER:
12716 case BUILT_IN_EH_COPY_VALUES:
12717 return true;
12718
12719 default:
12720 return false;
12721 }
12722
12723 return false;
12724 }
12725
12726 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12727 most probably expanded inline into reasonably simple code. This is a
12728 superset of is_simple_builtin. */
12729 bool
12730 is_inexpensive_builtin (tree decl)
12731 {
12732 if (!decl)
12733 return false;
12734 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12735 return true;
12736 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12737 switch (DECL_FUNCTION_CODE (decl))
12738 {
12739 case BUILT_IN_ABS:
12740 CASE_BUILT_IN_ALLOCA:
12741 case BUILT_IN_BSWAP16:
12742 case BUILT_IN_BSWAP32:
12743 case BUILT_IN_BSWAP64:
12744 case BUILT_IN_BSWAP128:
12745 case BUILT_IN_CLZ:
12746 case BUILT_IN_CLZIMAX:
12747 case BUILT_IN_CLZL:
12748 case BUILT_IN_CLZLL:
12749 case BUILT_IN_CTZ:
12750 case BUILT_IN_CTZIMAX:
12751 case BUILT_IN_CTZL:
12752 case BUILT_IN_CTZLL:
12753 case BUILT_IN_FFS:
12754 case BUILT_IN_FFSIMAX:
12755 case BUILT_IN_FFSL:
12756 case BUILT_IN_FFSLL:
12757 case BUILT_IN_IMAXABS:
12758 case BUILT_IN_FINITE:
12759 case BUILT_IN_FINITEF:
12760 case BUILT_IN_FINITEL:
12761 case BUILT_IN_FINITED32:
12762 case BUILT_IN_FINITED64:
12763 case BUILT_IN_FINITED128:
12764 case BUILT_IN_FPCLASSIFY:
12765 case BUILT_IN_ISFINITE:
12766 case BUILT_IN_ISINF_SIGN:
12767 case BUILT_IN_ISINF:
12768 case BUILT_IN_ISINFF:
12769 case BUILT_IN_ISINFL:
12770 case BUILT_IN_ISINFD32:
12771 case BUILT_IN_ISINFD64:
12772 case BUILT_IN_ISINFD128:
12773 case BUILT_IN_ISNAN:
12774 case BUILT_IN_ISNANF:
12775 case BUILT_IN_ISNANL:
12776 case BUILT_IN_ISNAND32:
12777 case BUILT_IN_ISNAND64:
12778 case BUILT_IN_ISNAND128:
12779 case BUILT_IN_ISNORMAL:
12780 case BUILT_IN_ISGREATER:
12781 case BUILT_IN_ISGREATEREQUAL:
12782 case BUILT_IN_ISLESS:
12783 case BUILT_IN_ISLESSEQUAL:
12784 case BUILT_IN_ISLESSGREATER:
12785 case BUILT_IN_ISUNORDERED:
12786 case BUILT_IN_VA_ARG_PACK:
12787 case BUILT_IN_VA_ARG_PACK_LEN:
12788 case BUILT_IN_VA_COPY:
12789 case BUILT_IN_TRAP:
12790 case BUILT_IN_SAVEREGS:
12791 case BUILT_IN_POPCOUNTL:
12792 case BUILT_IN_POPCOUNTLL:
12793 case BUILT_IN_POPCOUNTIMAX:
12794 case BUILT_IN_POPCOUNT:
12795 case BUILT_IN_PARITYL:
12796 case BUILT_IN_PARITYLL:
12797 case BUILT_IN_PARITYIMAX:
12798 case BUILT_IN_PARITY:
12799 case BUILT_IN_LABS:
12800 case BUILT_IN_LLABS:
12801 case BUILT_IN_PREFETCH:
12802 case BUILT_IN_ACC_ON_DEVICE:
12803 return true;
12804
12805 default:
12806 return is_simple_builtin (decl);
12807 }
12808
12809 return false;
12810 }
12811
12812 /* Return true if T is a constant and the value cast to a target char
12813 can be represented by a host char.
12814 Store the casted char constant in *P if so. */
12815
12816 bool
12817 target_char_cst_p (tree t, char *p)
12818 {
12819 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12820 return false;
12821
12822 *p = (char)tree_to_uhwi (t);
12823 return true;
12824 }
12825
12826 /* Return true if the builtin DECL is implemented in a standard library.
12827 Otherwise returns false which doesn't guarantee it is not (thus the list of
12828 handled builtins below may be incomplete). */
12829
12830 bool
12831 builtin_with_linkage_p (tree decl)
12832 {
12833 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12834 switch (DECL_FUNCTION_CODE (decl))
12835 {
12836 CASE_FLT_FN (BUILT_IN_ACOS):
12837 CASE_FLT_FN (BUILT_IN_ACOSH):
12838 CASE_FLT_FN (BUILT_IN_ASIN):
12839 CASE_FLT_FN (BUILT_IN_ASINH):
12840 CASE_FLT_FN (BUILT_IN_ATAN):
12841 CASE_FLT_FN (BUILT_IN_ATANH):
12842 CASE_FLT_FN (BUILT_IN_ATAN2):
12843 CASE_FLT_FN (BUILT_IN_CBRT):
12844 CASE_FLT_FN (BUILT_IN_CEIL):
12845 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12846 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12847 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12848 CASE_FLT_FN (BUILT_IN_COS):
12849 CASE_FLT_FN (BUILT_IN_COSH):
12850 CASE_FLT_FN (BUILT_IN_ERF):
12851 CASE_FLT_FN (BUILT_IN_ERFC):
12852 CASE_FLT_FN (BUILT_IN_EXP):
12853 CASE_FLT_FN (BUILT_IN_EXP2):
12854 CASE_FLT_FN (BUILT_IN_EXPM1):
12855 CASE_FLT_FN (BUILT_IN_FABS):
12856 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12857 CASE_FLT_FN (BUILT_IN_FDIM):
12858 CASE_FLT_FN (BUILT_IN_FLOOR):
12859 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12860 CASE_FLT_FN (BUILT_IN_FMA):
12861 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12862 CASE_FLT_FN (BUILT_IN_FMAX):
12863 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12864 CASE_FLT_FN (BUILT_IN_FMIN):
12865 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12866 CASE_FLT_FN (BUILT_IN_FMOD):
12867 CASE_FLT_FN (BUILT_IN_FREXP):
12868 CASE_FLT_FN (BUILT_IN_HYPOT):
12869 CASE_FLT_FN (BUILT_IN_ILOGB):
12870 CASE_FLT_FN (BUILT_IN_LDEXP):
12871 CASE_FLT_FN (BUILT_IN_LGAMMA):
12872 CASE_FLT_FN (BUILT_IN_LLRINT):
12873 CASE_FLT_FN (BUILT_IN_LLROUND):
12874 CASE_FLT_FN (BUILT_IN_LOG):
12875 CASE_FLT_FN (BUILT_IN_LOG10):
12876 CASE_FLT_FN (BUILT_IN_LOG1P):
12877 CASE_FLT_FN (BUILT_IN_LOG2):
12878 CASE_FLT_FN (BUILT_IN_LOGB):
12879 CASE_FLT_FN (BUILT_IN_LRINT):
12880 CASE_FLT_FN (BUILT_IN_LROUND):
12881 CASE_FLT_FN (BUILT_IN_MODF):
12882 CASE_FLT_FN (BUILT_IN_NAN):
12883 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12884 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12885 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12886 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12887 CASE_FLT_FN (BUILT_IN_POW):
12888 CASE_FLT_FN (BUILT_IN_REMAINDER):
12889 CASE_FLT_FN (BUILT_IN_REMQUO):
12890 CASE_FLT_FN (BUILT_IN_RINT):
12891 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12892 CASE_FLT_FN (BUILT_IN_ROUND):
12893 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12894 CASE_FLT_FN (BUILT_IN_SCALBLN):
12895 CASE_FLT_FN (BUILT_IN_SCALBN):
12896 CASE_FLT_FN (BUILT_IN_SIN):
12897 CASE_FLT_FN (BUILT_IN_SINH):
12898 CASE_FLT_FN (BUILT_IN_SINCOS):
12899 CASE_FLT_FN (BUILT_IN_SQRT):
12900 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12901 CASE_FLT_FN (BUILT_IN_TAN):
12902 CASE_FLT_FN (BUILT_IN_TANH):
12903 CASE_FLT_FN (BUILT_IN_TGAMMA):
12904 CASE_FLT_FN (BUILT_IN_TRUNC):
12905 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12906 return true;
12907 default:
12908 break;
12909 }
12910 return false;
12911 }
12912
12913 /* Return true if OFFRNG is bounded to a subrange of offset values
12914 valid for the largest possible object. */
12915
12916 bool
12917 access_ref::offset_bounded () const
12918 {
12919 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
12920 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
12921 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
12922 }
12923
12924 /* If CALLEE has known side effects, fill in INFO and return true.
12925 See tree-ssa-structalias.c:find_func_aliases
12926 for the list of builtins we might need to handle here. */
12927
12928 attr_fnspec
12929 builtin_fnspec (tree callee)
12930 {
12931 built_in_function code = DECL_FUNCTION_CODE (callee);
12932
12933 switch (code)
12934 {
12935 /* All the following functions read memory pointed to by
12936 their second argument and write memory pointed to by first
12937 argument.
12938 strcat/strncat additionally reads memory pointed to by the first
12939 argument. */
12940 case BUILT_IN_STRCAT:
12941 case BUILT_IN_STRCAT_CHK:
12942 return "1cW R ";
12943 case BUILT_IN_STRNCAT:
12944 case BUILT_IN_STRNCAT_CHK:
12945 return "1cW R3";
12946 case BUILT_IN_STRCPY:
12947 case BUILT_IN_STRCPY_CHK:
12948 return "1cO R ";
12949 case BUILT_IN_STPCPY:
12950 case BUILT_IN_STPCPY_CHK:
12951 return ".cO R ";
12952 case BUILT_IN_STRNCPY:
12953 case BUILT_IN_MEMCPY:
12954 case BUILT_IN_MEMMOVE:
12955 case BUILT_IN_TM_MEMCPY:
12956 case BUILT_IN_TM_MEMMOVE:
12957 case BUILT_IN_STRNCPY_CHK:
12958 case BUILT_IN_MEMCPY_CHK:
12959 case BUILT_IN_MEMMOVE_CHK:
12960 return "1cO3R3";
12961 case BUILT_IN_MEMPCPY:
12962 case BUILT_IN_MEMPCPY_CHK:
12963 return ".cO3R3";
12964 case BUILT_IN_STPNCPY:
12965 case BUILT_IN_STPNCPY_CHK:
12966 return ".cO3R3";
12967 case BUILT_IN_BCOPY:
12968 return ".cR3O3";
12969 case BUILT_IN_BZERO:
12970 return ".cO2";
12971 case BUILT_IN_MEMCMP:
12972 case BUILT_IN_MEMCMP_EQ:
12973 case BUILT_IN_BCMP:
12974 case BUILT_IN_STRNCMP:
12975 case BUILT_IN_STRNCMP_EQ:
12976 case BUILT_IN_STRNCASECMP:
12977 return ".cR3R3";
12978
12979 /* The following functions read memory pointed to by their
12980 first argument. */
12981 CASE_BUILT_IN_TM_LOAD (1):
12982 CASE_BUILT_IN_TM_LOAD (2):
12983 CASE_BUILT_IN_TM_LOAD (4):
12984 CASE_BUILT_IN_TM_LOAD (8):
12985 CASE_BUILT_IN_TM_LOAD (FLOAT):
12986 CASE_BUILT_IN_TM_LOAD (DOUBLE):
12987 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
12988 CASE_BUILT_IN_TM_LOAD (M64):
12989 CASE_BUILT_IN_TM_LOAD (M128):
12990 CASE_BUILT_IN_TM_LOAD (M256):
12991 case BUILT_IN_TM_LOG:
12992 case BUILT_IN_TM_LOG_1:
12993 case BUILT_IN_TM_LOG_2:
12994 case BUILT_IN_TM_LOG_4:
12995 case BUILT_IN_TM_LOG_8:
12996 case BUILT_IN_TM_LOG_FLOAT:
12997 case BUILT_IN_TM_LOG_DOUBLE:
12998 case BUILT_IN_TM_LOG_LDOUBLE:
12999 case BUILT_IN_TM_LOG_M64:
13000 case BUILT_IN_TM_LOG_M128:
13001 case BUILT_IN_TM_LOG_M256:
13002 return ".cR ";
13003
13004 case BUILT_IN_INDEX:
13005 case BUILT_IN_RINDEX:
13006 case BUILT_IN_STRCHR:
13007 case BUILT_IN_STRLEN:
13008 case BUILT_IN_STRRCHR:
13009 return ".cR ";
13010 case BUILT_IN_STRNLEN:
13011 return ".cR2";
13012
13013 /* These read memory pointed to by the first argument.
13014 Allocating memory does not have any side-effects apart from
13015 being the definition point for the pointer.
13016 Unix98 specifies that errno is set on allocation failure. */
13017 case BUILT_IN_STRDUP:
13018 return "mCR ";
13019 case BUILT_IN_STRNDUP:
13020 return "mCR2";
13021 /* Allocating memory does not have any side-effects apart from
13022 being the definition point for the pointer. */
13023 case BUILT_IN_MALLOC:
13024 case BUILT_IN_ALIGNED_ALLOC:
13025 case BUILT_IN_CALLOC:
13026 return "mC";
13027 CASE_BUILT_IN_ALLOCA:
13028 return "mc";
13029 /* These read memory pointed to by the first argument with size
13030 in the third argument. */
13031 case BUILT_IN_MEMCHR:
13032 return ".cR3";
13033 /* These read memory pointed to by the first and second arguments. */
13034 case BUILT_IN_STRSTR:
13035 case BUILT_IN_STRPBRK:
13036 case BUILT_IN_STRCASECMP:
13037 case BUILT_IN_STRCSPN:
13038 case BUILT_IN_STRSPN:
13039 case BUILT_IN_STRCMP:
13040 case BUILT_IN_STRCMP_EQ:
13041 return ".cR R ";
13042 /* Freeing memory kills the pointed-to memory. More importantly
13043 the call has to serve as a barrier for moving loads and stores
13044 across it. */
13045 case BUILT_IN_STACK_RESTORE:
13046 case BUILT_IN_FREE:
13047 return ".co ";
13048 case BUILT_IN_VA_END:
13049 return ".cO ";
13050 /* Realloc serves both as allocation point and deallocation point. */
13051 case BUILT_IN_REALLOC:
13052 return ".cw ";
13053 case BUILT_IN_GAMMA_R:
13054 case BUILT_IN_GAMMAF_R:
13055 case BUILT_IN_GAMMAL_R:
13056 case BUILT_IN_LGAMMA_R:
13057 case BUILT_IN_LGAMMAF_R:
13058 case BUILT_IN_LGAMMAL_R:
13059 return ".C. Ot";
13060 case BUILT_IN_FREXP:
13061 case BUILT_IN_FREXPF:
13062 case BUILT_IN_FREXPL:
13063 case BUILT_IN_MODF:
13064 case BUILT_IN_MODFF:
13065 case BUILT_IN_MODFL:
13066 return ".c. Ot";
13067 case BUILT_IN_REMQUO:
13068 case BUILT_IN_REMQUOF:
13069 case BUILT_IN_REMQUOL:
13070 return ".c. . Ot";
13071 case BUILT_IN_SINCOS:
13072 case BUILT_IN_SINCOSF:
13073 case BUILT_IN_SINCOSL:
13074 return ".c. OtOt";
13075 case BUILT_IN_MEMSET:
13076 case BUILT_IN_MEMSET_CHK:
13077 case BUILT_IN_TM_MEMSET:
13078 return "1cO3";
13079 CASE_BUILT_IN_TM_STORE (1):
13080 CASE_BUILT_IN_TM_STORE (2):
13081 CASE_BUILT_IN_TM_STORE (4):
13082 CASE_BUILT_IN_TM_STORE (8):
13083 CASE_BUILT_IN_TM_STORE (FLOAT):
13084 CASE_BUILT_IN_TM_STORE (DOUBLE):
13085 CASE_BUILT_IN_TM_STORE (LDOUBLE):
13086 CASE_BUILT_IN_TM_STORE (M64):
13087 CASE_BUILT_IN_TM_STORE (M128):
13088 CASE_BUILT_IN_TM_STORE (M256):
13089 return ".cO ";
13090 case BUILT_IN_STACK_SAVE:
13091 return ".c";
13092 case BUILT_IN_ASSUME_ALIGNED:
13093 return "1cX ";
13094 /* But posix_memalign stores a pointer into the memory pointed to
13095 by its first argument. */
13096 case BUILT_IN_POSIX_MEMALIGN:
13097 return ".cOt";
13098
13099 default:
13100 return "";
13101 }
13102 }