tree.h (memmodel_from_int, [...]): Move to ...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70
71
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
76
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
83 {
84 #include "builtins.def"
85 };
86
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
90
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
93
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_strchr (location_t, tree, tree, tree);
152 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
154 static tree fold_builtin_strcmp (location_t, tree, tree);
155 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
156 static tree fold_builtin_isascii (location_t, tree);
157 static tree fold_builtin_toascii (location_t, tree);
158 static tree fold_builtin_isdigit (location_t, tree);
159 static tree fold_builtin_fabs (location_t, tree, tree);
160 static tree fold_builtin_abs (location_t, tree, tree);
161 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
162 enum tree_code);
163 static tree fold_builtin_0 (location_t, tree);
164 static tree fold_builtin_1 (location_t, tree, tree);
165 static tree fold_builtin_2 (location_t, tree, tree, tree);
166 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_varargs (location_t, tree, tree*, int);
168
169 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
170 static tree fold_builtin_strstr (location_t, tree, tree, tree);
171 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
172 static tree fold_builtin_strspn (location_t, tree, tree);
173 static tree fold_builtin_strcspn (location_t, tree, tree);
174
175 static rtx expand_builtin_object_size (tree);
176 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
177 enum built_in_function);
178 static void maybe_emit_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_free_warning (tree);
181 static tree fold_builtin_object_size (tree, tree);
182
183 unsigned HOST_WIDE_INT target_newline;
184 unsigned HOST_WIDE_INT target_percent;
185 static unsigned HOST_WIDE_INT target_c;
186 static unsigned HOST_WIDE_INT target_s;
187 char target_percent_c[3];
188 char target_percent_s[3];
189 char target_percent_s_newline[4];
190 static tree do_mpfr_remquo (tree, tree, tree);
191 static tree do_mpfr_lgamma_r (tree, tree, tree);
192 static void expand_builtin_sync_synchronize (void);
193
194 /* Return true if NAME starts with __builtin_ or __sync_. */
195
196 static bool
197 is_builtin_name (const char *name)
198 {
199 if (strncmp (name, "__builtin_", 10) == 0)
200 return true;
201 if (strncmp (name, "__sync_", 7) == 0)
202 return true;
203 if (strncmp (name, "__atomic_", 9) == 0)
204 return true;
205 if (flag_cilkplus
206 && (!strcmp (name, "__cilkrts_detach")
207 || !strcmp (name, "__cilkrts_pop_frame")))
208 return true;
209 return false;
210 }
211
212
213 /* Return true if DECL is a function symbol representing a built-in. */
214
215 bool
216 is_builtin_fn (tree decl)
217 {
218 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219 }
220
221 /* Return true if NODE should be considered for inline expansion regardless
222 of the optimization level. This means whenever a function is invoked with
223 its "internal" name, which normally contains the prefix "__builtin". */
224
225 bool
226 called_as_built_in (tree node)
227 {
228 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
229 we want the name used to call the function, not the name it
230 will have. */
231 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
232 return is_builtin_name (name);
233 }
234
235 /* Compute values M and N such that M divides (address of EXP - N) and such
236 that N < M. If these numbers can be determined, store M in alignp and N in
237 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
238 *alignp and any bit-offset to *bitposp.
239
240 Note that the address (and thus the alignment) computed here is based
241 on the address to which a symbol resolves, whereas DECL_ALIGN is based
242 on the address at which an object is actually located. These two
243 addresses are not always the same. For example, on ARM targets,
244 the address &foo of a Thumb function foo() has the lowest bit set,
245 whereas foo() itself starts on an even address.
246
247 If ADDR_P is true we are taking the address of the memory reference EXP
248 and thus cannot rely on the access taking place. */
249
250 static bool
251 get_object_alignment_2 (tree exp, unsigned int *alignp,
252 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
253 {
254 HOST_WIDE_INT bitsize, bitpos;
255 tree offset;
256 machine_mode mode;
257 int unsignedp, reversep, volatilep;
258 unsigned int align = BITS_PER_UNIT;
259 bool known_alignment = false;
260
261 /* Get the innermost object and the constant (bitpos) and possibly
262 variable (offset) offset of the access. */
263 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
264 &unsignedp, &reversep, &volatilep);
265
266 /* Extract alignment information from the innermost object and
267 possibly adjust bitpos and offset. */
268 if (TREE_CODE (exp) == FUNCTION_DECL)
269 {
270 /* Function addresses can encode extra information besides their
271 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
272 allows the low bit to be used as a virtual bit, we know
273 that the address itself must be at least 2-byte aligned. */
274 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
275 align = 2 * BITS_PER_UNIT;
276 }
277 else if (TREE_CODE (exp) == LABEL_DECL)
278 ;
279 else if (TREE_CODE (exp) == CONST_DECL)
280 {
281 /* The alignment of a CONST_DECL is determined by its initializer. */
282 exp = DECL_INITIAL (exp);
283 align = TYPE_ALIGN (TREE_TYPE (exp));
284 if (CONSTANT_CLASS_P (exp))
285 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
286
287 known_alignment = true;
288 }
289 else if (DECL_P (exp))
290 {
291 align = DECL_ALIGN (exp);
292 known_alignment = true;
293 }
294 else if (TREE_CODE (exp) == INDIRECT_REF
295 || TREE_CODE (exp) == MEM_REF
296 || TREE_CODE (exp) == TARGET_MEM_REF)
297 {
298 tree addr = TREE_OPERAND (exp, 0);
299 unsigned ptr_align;
300 unsigned HOST_WIDE_INT ptr_bitpos;
301 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
302
303 /* If the address is explicitely aligned, handle that. */
304 if (TREE_CODE (addr) == BIT_AND_EXPR
305 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
306 {
307 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
308 ptr_bitmask *= BITS_PER_UNIT;
309 align = least_bit_hwi (ptr_bitmask);
310 addr = TREE_OPERAND (addr, 0);
311 }
312
313 known_alignment
314 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
315 align = MAX (ptr_align, align);
316
317 /* Re-apply explicit alignment to the bitpos. */
318 ptr_bitpos &= ptr_bitmask;
319
320 /* The alignment of the pointer operand in a TARGET_MEM_REF
321 has to take the variable offset parts into account. */
322 if (TREE_CODE (exp) == TARGET_MEM_REF)
323 {
324 if (TMR_INDEX (exp))
325 {
326 unsigned HOST_WIDE_INT step = 1;
327 if (TMR_STEP (exp))
328 step = TREE_INT_CST_LOW (TMR_STEP (exp));
329 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
330 }
331 if (TMR_INDEX2 (exp))
332 align = BITS_PER_UNIT;
333 known_alignment = false;
334 }
335
336 /* When EXP is an actual memory reference then we can use
337 TYPE_ALIGN of a pointer indirection to derive alignment.
338 Do so only if get_pointer_alignment_1 did not reveal absolute
339 alignment knowledge and if using that alignment would
340 improve the situation. */
341 if (!addr_p && !known_alignment
342 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
343 align = TYPE_ALIGN (TREE_TYPE (exp));
344 else
345 {
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
350 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
351 }
352 }
353 else if (TREE_CODE (exp) == STRING_CST)
354 {
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
358 if (CONSTANT_CLASS_P (exp))
359 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
360
361 known_alignment = true;
362 }
363
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
366 if (offset)
367 {
368 unsigned int trailing_zeros = tree_ctz (offset);
369 if (trailing_zeros < HOST_BITS_PER_INT)
370 {
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
374 }
375 }
376
377 *alignp = align;
378 *bitposp = bitpos & (*alignp - 1);
379 return known_alignment;
380 }
381
382 /* For a memory reference expression EXP compute values M and N such that M
383 divides (&EXP - N) and such that N < M. If these numbers can be determined,
384 store M in alignp and N in *BITPOSP and return true. Otherwise return false
385 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386
387 bool
388 get_object_alignment_1 (tree exp, unsigned int *alignp,
389 unsigned HOST_WIDE_INT *bitposp)
390 {
391 return get_object_alignment_2 (exp, alignp, bitposp, false);
392 }
393
394 /* Return the alignment in bits of EXP, an object. */
395
396 unsigned int
397 get_object_alignment (tree exp)
398 {
399 unsigned HOST_WIDE_INT bitpos = 0;
400 unsigned int align;
401
402 get_object_alignment_1 (exp, &align, &bitpos);
403
404 /* align and bitpos now specify known low bits of the pointer.
405 ptr & (align - 1) == bitpos. */
406
407 if (bitpos != 0)
408 align = least_bit_hwi (bitpos);
409 return align;
410 }
411
412 /* For a pointer valued expression EXP compute values M and N such that M
413 divides (EXP - N) and such that N < M. If these numbers can be determined,
414 store M in alignp and N in *BITPOSP and return true. Return false if
415 the results are just a conservative approximation.
416
417 If EXP is not a pointer, false is returned too. */
418
419 bool
420 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
421 unsigned HOST_WIDE_INT *bitposp)
422 {
423 STRIP_NOPS (exp);
424
425 if (TREE_CODE (exp) == ADDR_EXPR)
426 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
427 alignp, bitposp, true);
428 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
429 {
430 unsigned int align;
431 unsigned HOST_WIDE_INT bitpos;
432 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
433 &align, &bitpos);
434 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
435 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
436 else
437 {
438 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
439 if (trailing_zeros < HOST_BITS_PER_INT)
440 {
441 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
442 if (inner)
443 align = MIN (align, inner);
444 }
445 }
446 *alignp = align;
447 *bitposp = bitpos & (align - 1);
448 return res;
449 }
450 else if (TREE_CODE (exp) == SSA_NAME
451 && POINTER_TYPE_P (TREE_TYPE (exp)))
452 {
453 unsigned int ptr_align, ptr_misalign;
454 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
455
456 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
457 {
458 *bitposp = ptr_misalign * BITS_PER_UNIT;
459 *alignp = ptr_align * BITS_PER_UNIT;
460 /* Make sure to return a sensible alignment when the multiplication
461 by BITS_PER_UNIT overflowed. */
462 if (*alignp == 0)
463 *alignp = 1u << (HOST_BITS_PER_INT - 1);
464 /* We cannot really tell whether this result is an approximation. */
465 return false;
466 }
467 else
468 {
469 *bitposp = 0;
470 *alignp = BITS_PER_UNIT;
471 return false;
472 }
473 }
474 else if (TREE_CODE (exp) == INTEGER_CST)
475 {
476 *alignp = BIGGEST_ALIGNMENT;
477 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
478 & (BIGGEST_ALIGNMENT - 1));
479 return true;
480 }
481
482 *bitposp = 0;
483 *alignp = BITS_PER_UNIT;
484 return false;
485 }
486
487 /* Return the alignment in bits of EXP, a pointer valued expression.
488 The alignment returned is, by default, the alignment of the thing that
489 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
490
491 Otherwise, look at the expression to see if we can do better, i.e., if the
492 expression is actually pointing at an object whose alignment is tighter. */
493
494 unsigned int
495 get_pointer_alignment (tree exp)
496 {
497 unsigned HOST_WIDE_INT bitpos = 0;
498 unsigned int align;
499
500 get_pointer_alignment_1 (exp, &align, &bitpos);
501
502 /* align and bitpos now specify known low bits of the pointer.
503 ptr & (align - 1) == bitpos. */
504
505 if (bitpos != 0)
506 align = least_bit_hwi (bitpos);
507
508 return align;
509 }
510
511 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
512 way, because it could contain a zero byte in the middle.
513 TREE_STRING_LENGTH is the size of the character array, not the string.
514
515 ONLY_VALUE should be nonzero if the result is not going to be emitted
516 into the instruction stream and zero if it is going to be expanded.
517 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
518 is returned, otherwise NULL, since
519 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
520 evaluate the side-effects.
521
522 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
523 accesses. Note that this implies the result is not going to be emitted
524 into the instruction stream.
525
526 The value returned is of type `ssizetype'.
527
528 Unfortunately, string_constant can't access the values of const char
529 arrays with initializers, so neither can we do so here. */
530
531 tree
532 c_strlen (tree src, int only_value)
533 {
534 tree offset_node;
535 HOST_WIDE_INT offset;
536 int max;
537 const char *ptr;
538 location_t loc;
539
540 STRIP_NOPS (src);
541 if (TREE_CODE (src) == COND_EXPR
542 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
543 {
544 tree len1, len2;
545
546 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
547 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
548 if (tree_int_cst_equal (len1, len2))
549 return len1;
550 }
551
552 if (TREE_CODE (src) == COMPOUND_EXPR
553 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
554 return c_strlen (TREE_OPERAND (src, 1), only_value);
555
556 loc = EXPR_LOC_OR_LOC (src, input_location);
557
558 src = string_constant (src, &offset_node);
559 if (src == 0)
560 return NULL_TREE;
561
562 max = TREE_STRING_LENGTH (src) - 1;
563 ptr = TREE_STRING_POINTER (src);
564
565 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
566 {
567 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
568 compute the offset to the following null if we don't know where to
569 start searching for it. */
570 int i;
571
572 for (i = 0; i < max; i++)
573 if (ptr[i] == 0)
574 return NULL_TREE;
575
576 /* We don't know the starting offset, but we do know that the string
577 has no internal zero bytes. We can assume that the offset falls
578 within the bounds of the string; otherwise, the programmer deserves
579 what he gets. Subtract the offset from the length of the string,
580 and return that. This would perhaps not be valid if we were dealing
581 with named arrays in addition to literal string constants. */
582
583 return size_diffop_loc (loc, size_int (max), offset_node);
584 }
585
586 /* We have a known offset into the string. Start searching there for
587 a null character if we can represent it as a single HOST_WIDE_INT. */
588 if (offset_node == 0)
589 offset = 0;
590 else if (! tree_fits_shwi_p (offset_node))
591 offset = -1;
592 else
593 offset = tree_to_shwi (offset_node);
594
595 /* If the offset is known to be out of bounds, warn, and call strlen at
596 runtime. */
597 if (offset < 0 || offset > max)
598 {
599 /* Suppress multiple warnings for propagated constant strings. */
600 if (only_value != 2
601 && !TREE_NO_WARNING (src))
602 {
603 warning_at (loc, 0, "offset outside bounds of constant string");
604 TREE_NO_WARNING (src) = 1;
605 }
606 return NULL_TREE;
607 }
608
609 /* Use strlen to search for the first zero byte. Since any strings
610 constructed with build_string will have nulls appended, we win even
611 if we get handed something like (char[4])"abcd".
612
613 Since OFFSET is our starting index into the string, no further
614 calculation is needed. */
615 return ssize_int (strlen (ptr + offset));
616 }
617
618 /* Return a constant integer corresponding to target reading
619 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
620
621 static rtx
622 c_readstr (const char *str, machine_mode mode)
623 {
624 HOST_WIDE_INT ch;
625 unsigned int i, j;
626 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
627
628 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
629 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
630 / HOST_BITS_PER_WIDE_INT;
631
632 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
633 for (i = 0; i < len; i++)
634 tmp[i] = 0;
635
636 ch = 1;
637 for (i = 0; i < GET_MODE_SIZE (mode); i++)
638 {
639 j = i;
640 if (WORDS_BIG_ENDIAN)
641 j = GET_MODE_SIZE (mode) - i - 1;
642 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
643 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
644 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
645 j *= BITS_PER_UNIT;
646
647 if (ch)
648 ch = (unsigned char) str[i];
649 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
650 }
651
652 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
653 return immed_wide_int_const (c, mode);
654 }
655
656 /* Cast a target constant CST to target CHAR and if that value fits into
657 host char type, return zero and put that value into variable pointed to by
658 P. */
659
660 static int
661 target_char_cast (tree cst, char *p)
662 {
663 unsigned HOST_WIDE_INT val, hostval;
664
665 if (TREE_CODE (cst) != INTEGER_CST
666 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
667 return 1;
668
669 /* Do not care if it fits or not right here. */
670 val = TREE_INT_CST_LOW (cst);
671
672 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
673 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
674
675 hostval = val;
676 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
677 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
678
679 if (val != hostval)
680 return 1;
681
682 *p = hostval;
683 return 0;
684 }
685
686 /* Similar to save_expr, but assumes that arbitrary code is not executed
687 in between the multiple evaluations. In particular, we assume that a
688 non-addressable local variable will not be modified. */
689
690 static tree
691 builtin_save_expr (tree exp)
692 {
693 if (TREE_CODE (exp) == SSA_NAME
694 || (TREE_ADDRESSABLE (exp) == 0
695 && (TREE_CODE (exp) == PARM_DECL
696 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
697 return exp;
698
699 return save_expr (exp);
700 }
701
702 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
703 times to get the address of either a higher stack frame, or a return
704 address located within it (depending on FNDECL_CODE). */
705
706 static rtx
707 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
708 {
709 int i;
710 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
711 if (tem == NULL_RTX)
712 {
713 /* For a zero count with __builtin_return_address, we don't care what
714 frame address we return, because target-specific definitions will
715 override us. Therefore frame pointer elimination is OK, and using
716 the soft frame pointer is OK.
717
718 For a nonzero count, or a zero count with __builtin_frame_address,
719 we require a stable offset from the current frame pointer to the
720 previous one, so we must use the hard frame pointer, and
721 we must disable frame pointer elimination. */
722 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
723 tem = frame_pointer_rtx;
724 else
725 {
726 tem = hard_frame_pointer_rtx;
727
728 /* Tell reload not to eliminate the frame pointer. */
729 crtl->accesses_prior_frames = 1;
730 }
731 }
732
733 if (count > 0)
734 SETUP_FRAME_ADDRESSES ();
735
736 /* On the SPARC, the return address is not in the frame, it is in a
737 register. There is no way to access it off of the current frame
738 pointer, but it can be accessed off the previous frame pointer by
739 reading the value from the register window save area. */
740 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
741 count--;
742
743 /* Scan back COUNT frames to the specified frame. */
744 for (i = 0; i < count; i++)
745 {
746 /* Assume the dynamic chain pointer is in the word that the
747 frame address points to, unless otherwise specified. */
748 tem = DYNAMIC_CHAIN_ADDRESS (tem);
749 tem = memory_address (Pmode, tem);
750 tem = gen_frame_mem (Pmode, tem);
751 tem = copy_to_reg (tem);
752 }
753
754 /* For __builtin_frame_address, return what we've got. But, on
755 the SPARC for example, we may have to add a bias. */
756 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
757 return FRAME_ADDR_RTX (tem);
758
759 /* For __builtin_return_address, get the return address from that frame. */
760 #ifdef RETURN_ADDR_RTX
761 tem = RETURN_ADDR_RTX (count, tem);
762 #else
763 tem = memory_address (Pmode,
764 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
765 tem = gen_frame_mem (Pmode, tem);
766 #endif
767 return tem;
768 }
769
770 /* Alias set used for setjmp buffer. */
771 static alias_set_type setjmp_alias_set = -1;
772
773 /* Construct the leading half of a __builtin_setjmp call. Control will
774 return to RECEIVER_LABEL. This is also called directly by the SJLJ
775 exception handling code. */
776
777 void
778 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
779 {
780 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 rtx stack_save;
782 rtx mem;
783
784 if (setjmp_alias_set == -1)
785 setjmp_alias_set = new_alias_set ();
786
787 buf_addr = convert_memory_address (Pmode, buf_addr);
788
789 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
790
791 /* We store the frame pointer and the address of receiver_label in
792 the buffer and use the rest of it for the stack save area, which
793 is machine-dependent. */
794
795 mem = gen_rtx_MEM (Pmode, buf_addr);
796 set_mem_alias_set (mem, setjmp_alias_set);
797 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
798
799 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
800 GET_MODE_SIZE (Pmode))),
801 set_mem_alias_set (mem, setjmp_alias_set);
802
803 emit_move_insn (validize_mem (mem),
804 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
805
806 stack_save = gen_rtx_MEM (sa_mode,
807 plus_constant (Pmode, buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (stack_save, setjmp_alias_set);
810 emit_stack_save (SAVE_NONLOCAL, &stack_save);
811
812 /* If there is further processing to do, do it. */
813 if (targetm.have_builtin_setjmp_setup ())
814 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
815
816 /* We have a nonlocal label. */
817 cfun->has_nonlocal_label = 1;
818 }
819
820 /* Construct the trailing part of a __builtin_setjmp call. This is
821 also called directly by the SJLJ exception handling code.
822 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
823
824 void
825 expand_builtin_setjmp_receiver (rtx receiver_label)
826 {
827 rtx chain;
828
829 /* Mark the FP as used when we get here, so we have to make sure it's
830 marked as used by this function. */
831 emit_use (hard_frame_pointer_rtx);
832
833 /* Mark the static chain as clobbered here so life information
834 doesn't get messed up for it. */
835 chain = targetm.calls.static_chain (current_function_decl, true);
836 if (chain && REG_P (chain))
837 emit_clobber (chain);
838
839 /* Now put in the code to restore the frame pointer, and argument
840 pointer, if needed. */
841 if (! targetm.have_nonlocal_goto ())
842 {
843 /* First adjust our frame pointer to its actual value. It was
844 previously set to the start of the virtual area corresponding to
845 the stacked variables when we branched here and now needs to be
846 adjusted to the actual hardware fp value.
847
848 Assignments to virtual registers are converted by
849 instantiate_virtual_regs into the corresponding assignment
850 to the underlying register (fp in this case) that makes
851 the original assignment true.
852 So the following insn will actually be decrementing fp by
853 STARTING_FRAME_OFFSET. */
854 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
855
856 /* Restoring the frame pointer also modifies the hard frame pointer.
857 Mark it used (so that the previous assignment remains live once
858 the frame pointer is eliminated) and clobbered (to represent the
859 implicit update from the assignment). */
860 emit_use (hard_frame_pointer_rtx);
861 emit_clobber (hard_frame_pointer_rtx);
862 }
863
864 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
865 {
866 /* If the argument pointer can be eliminated in favor of the
867 frame pointer, we don't need to restore it. We assume here
868 that if such an elimination is present, it can always be used.
869 This is the case on all known machines; if we don't make this
870 assumption, we do unnecessary saving on many machines. */
871 size_t i;
872 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
873
874 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
875 if (elim_regs[i].from == ARG_POINTER_REGNUM
876 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
877 break;
878
879 if (i == ARRAY_SIZE (elim_regs))
880 {
881 /* Now restore our arg pointer from the address at which it
882 was saved in our stack frame. */
883 emit_move_insn (crtl->args.internal_arg_pointer,
884 copy_to_reg (get_arg_pointer_save_area ()));
885 }
886 }
887
888 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
889 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
890 else if (targetm.have_nonlocal_goto_receiver ())
891 emit_insn (targetm.gen_nonlocal_goto_receiver ());
892 else
893 { /* Nothing */ }
894
895 /* We must not allow the code we just generated to be reordered by
896 scheduling. Specifically, the update of the frame pointer must
897 happen immediately, not later. */
898 emit_insn (gen_blockage ());
899 }
900
901 /* __builtin_longjmp is passed a pointer to an array of five words (not
902 all will be used on all machines). It operates similarly to the C
903 library function of the same name, but is more efficient. Much of
904 the code below is copied from the handling of non-local gotos. */
905
906 static void
907 expand_builtin_longjmp (rtx buf_addr, rtx value)
908 {
909 rtx fp, lab, stack;
910 rtx_insn *insn, *last;
911 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
912
913 /* DRAP is needed for stack realign if longjmp is expanded to current
914 function */
915 if (SUPPORTS_STACK_ALIGNMENT)
916 crtl->need_drap = true;
917
918 if (setjmp_alias_set == -1)
919 setjmp_alias_set = new_alias_set ();
920
921 buf_addr = convert_memory_address (Pmode, buf_addr);
922
923 buf_addr = force_reg (Pmode, buf_addr);
924
925 /* We require that the user must pass a second argument of 1, because
926 that is what builtin_setjmp will return. */
927 gcc_assert (value == const1_rtx);
928
929 last = get_last_insn ();
930 if (targetm.have_builtin_longjmp ())
931 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
932 else
933 {
934 fp = gen_rtx_MEM (Pmode, buf_addr);
935 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
936 GET_MODE_SIZE (Pmode)));
937
938 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
939 2 * GET_MODE_SIZE (Pmode)));
940 set_mem_alias_set (fp, setjmp_alias_set);
941 set_mem_alias_set (lab, setjmp_alias_set);
942 set_mem_alias_set (stack, setjmp_alias_set);
943
944 /* Pick up FP, label, and SP from the block and jump. This code is
945 from expand_goto in stmt.c; see there for detailed comments. */
946 if (targetm.have_nonlocal_goto ())
947 /* We have to pass a value to the nonlocal_goto pattern that will
948 get copied into the static_chain pointer, but it does not matter
949 what that value is, because builtin_setjmp does not use it. */
950 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
951 else
952 {
953 lab = copy_to_reg (lab);
954
955 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
956 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
957
958 emit_move_insn (hard_frame_pointer_rtx, fp);
959 emit_stack_restore (SAVE_NONLOCAL, stack);
960
961 emit_use (hard_frame_pointer_rtx);
962 emit_use (stack_pointer_rtx);
963 emit_indirect_jump (lab);
964 }
965 }
966
967 /* Search backwards and mark the jump insn as a non-local goto.
968 Note that this precludes the use of __builtin_longjmp to a
969 __builtin_setjmp target in the same function. However, we've
970 already cautioned the user that these functions are for
971 internal exception handling use only. */
972 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
973 {
974 gcc_assert (insn != last);
975
976 if (JUMP_P (insn))
977 {
978 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
979 break;
980 }
981 else if (CALL_P (insn))
982 break;
983 }
984 }
985
986 static inline bool
987 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
988 {
989 return (iter->i < iter->n);
990 }
991
992 /* This function validates the types of a function call argument list
993 against a specified list of tree_codes. If the last specifier is a 0,
994 that represents an ellipses, otherwise the last specifier must be a
995 VOID_TYPE. */
996
997 static bool
998 validate_arglist (const_tree callexpr, ...)
999 {
1000 enum tree_code code;
1001 bool res = 0;
1002 va_list ap;
1003 const_call_expr_arg_iterator iter;
1004 const_tree arg;
1005
1006 va_start (ap, callexpr);
1007 init_const_call_expr_arg_iterator (callexpr, &iter);
1008
1009 do
1010 {
1011 code = (enum tree_code) va_arg (ap, int);
1012 switch (code)
1013 {
1014 case 0:
1015 /* This signifies an ellipses, any further arguments are all ok. */
1016 res = true;
1017 goto end;
1018 case VOID_TYPE:
1019 /* This signifies an endlink, if no arguments remain, return
1020 true, otherwise return false. */
1021 res = !more_const_call_expr_args_p (&iter);
1022 goto end;
1023 default:
1024 /* If no parameters remain or the parameter's code does not
1025 match the specified code, return false. Otherwise continue
1026 checking any remaining arguments. */
1027 arg = next_const_call_expr_arg (&iter);
1028 if (!validate_arg (arg, code))
1029 goto end;
1030 break;
1031 }
1032 }
1033 while (1);
1034
1035 /* We need gotos here since we can only have one VA_CLOSE in a
1036 function. */
1037 end: ;
1038 va_end (ap);
1039
1040 return res;
1041 }
1042
1043 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1044 and the address of the save area. */
1045
1046 static rtx
1047 expand_builtin_nonlocal_goto (tree exp)
1048 {
1049 tree t_label, t_save_area;
1050 rtx r_label, r_save_area, r_fp, r_sp;
1051 rtx_insn *insn;
1052
1053 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1054 return NULL_RTX;
1055
1056 t_label = CALL_EXPR_ARG (exp, 0);
1057 t_save_area = CALL_EXPR_ARG (exp, 1);
1058
1059 r_label = expand_normal (t_label);
1060 r_label = convert_memory_address (Pmode, r_label);
1061 r_save_area = expand_normal (t_save_area);
1062 r_save_area = convert_memory_address (Pmode, r_save_area);
1063 /* Copy the address of the save location to a register just in case it was
1064 based on the frame pointer. */
1065 r_save_area = copy_to_reg (r_save_area);
1066 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1067 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1068 plus_constant (Pmode, r_save_area,
1069 GET_MODE_SIZE (Pmode)));
1070
1071 crtl->has_nonlocal_goto = 1;
1072
1073 /* ??? We no longer need to pass the static chain value, afaik. */
1074 if (targetm.have_nonlocal_goto ())
1075 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1076 else
1077 {
1078 r_label = copy_to_reg (r_label);
1079
1080 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1081 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1082
1083 /* Restore frame pointer for containing function. */
1084 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1085 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1086
1087 /* USE of hard_frame_pointer_rtx added for consistency;
1088 not clear if really needed. */
1089 emit_use (hard_frame_pointer_rtx);
1090 emit_use (stack_pointer_rtx);
1091
1092 /* If the architecture is using a GP register, we must
1093 conservatively assume that the target function makes use of it.
1094 The prologue of functions with nonlocal gotos must therefore
1095 initialize the GP register to the appropriate value, and we
1096 must then make sure that this value is live at the point
1097 of the jump. (Note that this doesn't necessarily apply
1098 to targets with a nonlocal_goto pattern; they are free
1099 to implement it in their own way. Note also that this is
1100 a no-op if the GP register is a global invariant.) */
1101 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1102 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1103 emit_use (pic_offset_table_rtx);
1104
1105 emit_indirect_jump (r_label);
1106 }
1107
1108 /* Search backwards to the jump insn and mark it as a
1109 non-local goto. */
1110 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1111 {
1112 if (JUMP_P (insn))
1113 {
1114 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1115 break;
1116 }
1117 else if (CALL_P (insn))
1118 break;
1119 }
1120
1121 return const0_rtx;
1122 }
1123
1124 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1125 (not all will be used on all machines) that was passed to __builtin_setjmp.
1126 It updates the stack pointer in that block to the current value. This is
1127 also called directly by the SJLJ exception handling code. */
1128
1129 void
1130 expand_builtin_update_setjmp_buf (rtx buf_addr)
1131 {
1132 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1133 rtx stack_save
1134 = gen_rtx_MEM (sa_mode,
1135 memory_address
1136 (sa_mode,
1137 plus_constant (Pmode, buf_addr,
1138 2 * GET_MODE_SIZE (Pmode))));
1139
1140 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1141 }
1142
1143 /* Expand a call to __builtin_prefetch. For a target that does not support
1144 data prefetch, evaluate the memory address argument in case it has side
1145 effects. */
1146
1147 static void
1148 expand_builtin_prefetch (tree exp)
1149 {
1150 tree arg0, arg1, arg2;
1151 int nargs;
1152 rtx op0, op1, op2;
1153
1154 if (!validate_arglist (exp, POINTER_TYPE, 0))
1155 return;
1156
1157 arg0 = CALL_EXPR_ARG (exp, 0);
1158
1159 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1160 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1161 locality). */
1162 nargs = call_expr_nargs (exp);
1163 if (nargs > 1)
1164 arg1 = CALL_EXPR_ARG (exp, 1);
1165 else
1166 arg1 = integer_zero_node;
1167 if (nargs > 2)
1168 arg2 = CALL_EXPR_ARG (exp, 2);
1169 else
1170 arg2 = integer_three_node;
1171
1172 /* Argument 0 is an address. */
1173 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1174
1175 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1176 if (TREE_CODE (arg1) != INTEGER_CST)
1177 {
1178 error ("second argument to %<__builtin_prefetch%> must be a constant");
1179 arg1 = integer_zero_node;
1180 }
1181 op1 = expand_normal (arg1);
1182 /* Argument 1 must be either zero or one. */
1183 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1184 {
1185 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1186 " using zero");
1187 op1 = const0_rtx;
1188 }
1189
1190 /* Argument 2 (locality) must be a compile-time constant int. */
1191 if (TREE_CODE (arg2) != INTEGER_CST)
1192 {
1193 error ("third argument to %<__builtin_prefetch%> must be a constant");
1194 arg2 = integer_zero_node;
1195 }
1196 op2 = expand_normal (arg2);
1197 /* Argument 2 must be 0, 1, 2, or 3. */
1198 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1199 {
1200 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1201 op2 = const0_rtx;
1202 }
1203
1204 if (targetm.have_prefetch ())
1205 {
1206 struct expand_operand ops[3];
1207
1208 create_address_operand (&ops[0], op0);
1209 create_integer_operand (&ops[1], INTVAL (op1));
1210 create_integer_operand (&ops[2], INTVAL (op2));
1211 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1212 return;
1213 }
1214
1215 /* Don't do anything with direct references to volatile memory, but
1216 generate code to handle other side effects. */
1217 if (!MEM_P (op0) && side_effects_p (op0))
1218 emit_insn (op0);
1219 }
1220
1221 /* Get a MEM rtx for expression EXP which is the address of an operand
1222 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1223 the maximum length of the block of memory that might be accessed or
1224 NULL if unknown. */
1225
1226 static rtx
1227 get_memory_rtx (tree exp, tree len)
1228 {
1229 tree orig_exp = exp;
1230 rtx addr, mem;
1231
1232 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1233 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1234 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1235 exp = TREE_OPERAND (exp, 0);
1236
1237 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1238 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1239
1240 /* Get an expression we can use to find the attributes to assign to MEM.
1241 First remove any nops. */
1242 while (CONVERT_EXPR_P (exp)
1243 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1244 exp = TREE_OPERAND (exp, 0);
1245
1246 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1247 (as builtin stringops may alias with anything). */
1248 exp = fold_build2 (MEM_REF,
1249 build_array_type (char_type_node,
1250 build_range_type (sizetype,
1251 size_one_node, len)),
1252 exp, build_int_cst (ptr_type_node, 0));
1253
1254 /* If the MEM_REF has no acceptable address, try to get the base object
1255 from the original address we got, and build an all-aliasing
1256 unknown-sized access to that one. */
1257 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1258 set_mem_attributes (mem, exp, 0);
1259 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1260 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1261 0))))
1262 {
1263 exp = build_fold_addr_expr (exp);
1264 exp = fold_build2 (MEM_REF,
1265 build_array_type (char_type_node,
1266 build_range_type (sizetype,
1267 size_zero_node,
1268 NULL)),
1269 exp, build_int_cst (ptr_type_node, 0));
1270 set_mem_attributes (mem, exp, 0);
1271 }
1272 set_mem_alias_set (mem, 0);
1273 return mem;
1274 }
1275 \f
1276 /* Built-in functions to perform an untyped call and return. */
1277
1278 #define apply_args_mode \
1279 (this_target_builtins->x_apply_args_mode)
1280 #define apply_result_mode \
1281 (this_target_builtins->x_apply_result_mode)
1282
1283 /* Return the size required for the block returned by __builtin_apply_args,
1284 and initialize apply_args_mode. */
1285
1286 static int
1287 apply_args_size (void)
1288 {
1289 static int size = -1;
1290 int align;
1291 unsigned int regno;
1292 machine_mode mode;
1293
1294 /* The values computed by this function never change. */
1295 if (size < 0)
1296 {
1297 /* The first value is the incoming arg-pointer. */
1298 size = GET_MODE_SIZE (Pmode);
1299
1300 /* The second value is the structure value address unless this is
1301 passed as an "invisible" first argument. */
1302 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1303 size += GET_MODE_SIZE (Pmode);
1304
1305 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1306 if (FUNCTION_ARG_REGNO_P (regno))
1307 {
1308 mode = targetm.calls.get_raw_arg_mode (regno);
1309
1310 gcc_assert (mode != VOIDmode);
1311
1312 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1313 if (size % align != 0)
1314 size = CEIL (size, align) * align;
1315 size += GET_MODE_SIZE (mode);
1316 apply_args_mode[regno] = mode;
1317 }
1318 else
1319 {
1320 apply_args_mode[regno] = VOIDmode;
1321 }
1322 }
1323 return size;
1324 }
1325
1326 /* Return the size required for the block returned by __builtin_apply,
1327 and initialize apply_result_mode. */
1328
1329 static int
1330 apply_result_size (void)
1331 {
1332 static int size = -1;
1333 int align, regno;
1334 machine_mode mode;
1335
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1338 {
1339 size = 0;
1340
1341 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1342 if (targetm.calls.function_value_regno_p (regno))
1343 {
1344 mode = targetm.calls.get_raw_result_mode (regno);
1345
1346 gcc_assert (mode != VOIDmode);
1347
1348 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1349 if (size % align != 0)
1350 size = CEIL (size, align) * align;
1351 size += GET_MODE_SIZE (mode);
1352 apply_result_mode[regno] = mode;
1353 }
1354 else
1355 apply_result_mode[regno] = VOIDmode;
1356
1357 /* Allow targets that use untyped_call and untyped_return to override
1358 the size so that machine-specific information can be stored here. */
1359 #ifdef APPLY_RESULT_SIZE
1360 size = APPLY_RESULT_SIZE;
1361 #endif
1362 }
1363 return size;
1364 }
1365
1366 /* Create a vector describing the result block RESULT. If SAVEP is true,
1367 the result block is used to save the values; otherwise it is used to
1368 restore the values. */
1369
1370 static rtx
1371 result_vector (int savep, rtx result)
1372 {
1373 int regno, size, align, nelts;
1374 machine_mode mode;
1375 rtx reg, mem;
1376 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1377
1378 size = nelts = 0;
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if ((mode = apply_result_mode[regno]) != VOIDmode)
1381 {
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1386 mem = adjust_address (result, mode, size);
1387 savevec[nelts++] = (savep
1388 ? gen_rtx_SET (mem, reg)
1389 : gen_rtx_SET (reg, mem));
1390 size += GET_MODE_SIZE (mode);
1391 }
1392 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1393 }
1394
1395 /* Save the state required to perform an untyped call with the same
1396 arguments as were passed to the current function. */
1397
1398 static rtx
1399 expand_builtin_apply_args_1 (void)
1400 {
1401 rtx registers, tem;
1402 int size, align, regno;
1403 machine_mode mode;
1404 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1405
1406 /* Create a block where the arg-pointer, structure value address,
1407 and argument registers can be saved. */
1408 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1409
1410 /* Walk past the arg-pointer and structure value address. */
1411 size = GET_MODE_SIZE (Pmode);
1412 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1413 size += GET_MODE_SIZE (Pmode);
1414
1415 /* Save each register used in calling a function to the block. */
1416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1417 if ((mode = apply_args_mode[regno]) != VOIDmode)
1418 {
1419 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1420 if (size % align != 0)
1421 size = CEIL (size, align) * align;
1422
1423 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1424
1425 emit_move_insn (adjust_address (registers, mode, size), tem);
1426 size += GET_MODE_SIZE (mode);
1427 }
1428
1429 /* Save the arg pointer to the block. */
1430 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1431 /* We need the pointer as the caller actually passed them to us, not
1432 as we might have pretended they were passed. Make sure it's a valid
1433 operand, as emit_move_insn isn't expected to handle a PLUS. */
1434 if (STACK_GROWS_DOWNWARD)
1435 tem
1436 = force_operand (plus_constant (Pmode, tem,
1437 crtl->args.pretend_args_size),
1438 NULL_RTX);
1439 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1440
1441 size = GET_MODE_SIZE (Pmode);
1442
1443 /* Save the structure value address unless this is passed as an
1444 "invisible" first argument. */
1445 if (struct_incoming_value)
1446 {
1447 emit_move_insn (adjust_address (registers, Pmode, size),
1448 copy_to_reg (struct_incoming_value));
1449 size += GET_MODE_SIZE (Pmode);
1450 }
1451
1452 /* Return the address of the block. */
1453 return copy_addr_to_reg (XEXP (registers, 0));
1454 }
1455
1456 /* __builtin_apply_args returns block of memory allocated on
1457 the stack into which is stored the arg pointer, structure
1458 value address, static chain, and all the registers that might
1459 possibly be used in performing a function call. The code is
1460 moved to the start of the function so the incoming values are
1461 saved. */
1462
1463 static rtx
1464 expand_builtin_apply_args (void)
1465 {
1466 /* Don't do __builtin_apply_args more than once in a function.
1467 Save the result of the first call and reuse it. */
1468 if (apply_args_value != 0)
1469 return apply_args_value;
1470 {
1471 /* When this function is called, it means that registers must be
1472 saved on entry to this function. So we migrate the
1473 call to the first insn of this function. */
1474 rtx temp;
1475
1476 start_sequence ();
1477 temp = expand_builtin_apply_args_1 ();
1478 rtx_insn *seq = get_insns ();
1479 end_sequence ();
1480
1481 apply_args_value = temp;
1482
1483 /* Put the insns after the NOTE that starts the function.
1484 If this is inside a start_sequence, make the outer-level insn
1485 chain current, so the code is placed at the start of the
1486 function. If internal_arg_pointer is a non-virtual pseudo,
1487 it needs to be placed after the function that initializes
1488 that pseudo. */
1489 push_topmost_sequence ();
1490 if (REG_P (crtl->args.internal_arg_pointer)
1491 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1492 emit_insn_before (seq, parm_birth_insn);
1493 else
1494 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1495 pop_topmost_sequence ();
1496 return temp;
1497 }
1498 }
1499
1500 /* Perform an untyped call and save the state required to perform an
1501 untyped return of whatever value was returned by the given function. */
1502
1503 static rtx
1504 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1505 {
1506 int size, align, regno;
1507 machine_mode mode;
1508 rtx incoming_args, result, reg, dest, src;
1509 rtx_call_insn *call_insn;
1510 rtx old_stack_level = 0;
1511 rtx call_fusage = 0;
1512 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1513
1514 arguments = convert_memory_address (Pmode, arguments);
1515
1516 /* Create a block where the return registers can be saved. */
1517 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1518
1519 /* Fetch the arg pointer from the ARGUMENTS block. */
1520 incoming_args = gen_reg_rtx (Pmode);
1521 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1522 if (!STACK_GROWS_DOWNWARD)
1523 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1524 incoming_args, 0, OPTAB_LIB_WIDEN);
1525
1526 /* Push a new argument block and copy the arguments. Do not allow
1527 the (potential) memcpy call below to interfere with our stack
1528 manipulations. */
1529 do_pending_stack_adjust ();
1530 NO_DEFER_POP;
1531
1532 /* Save the stack with nonlocal if available. */
1533 if (targetm.have_save_stack_nonlocal ())
1534 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1535 else
1536 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1537
1538 /* Allocate a block of memory onto the stack and copy the memory
1539 arguments to the outgoing arguments address. We can pass TRUE
1540 as the 4th argument because we just saved the stack pointer
1541 and will restore it right after the call. */
1542 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1543
1544 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1545 may have already set current_function_calls_alloca to true.
1546 current_function_calls_alloca won't be set if argsize is zero,
1547 so we have to guarantee need_drap is true here. */
1548 if (SUPPORTS_STACK_ALIGNMENT)
1549 crtl->need_drap = true;
1550
1551 dest = virtual_outgoing_args_rtx;
1552 if (!STACK_GROWS_DOWNWARD)
1553 {
1554 if (CONST_INT_P (argsize))
1555 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1556 else
1557 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1558 }
1559 dest = gen_rtx_MEM (BLKmode, dest);
1560 set_mem_align (dest, PARM_BOUNDARY);
1561 src = gen_rtx_MEM (BLKmode, incoming_args);
1562 set_mem_align (src, PARM_BOUNDARY);
1563 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1564
1565 /* Refer to the argument block. */
1566 apply_args_size ();
1567 arguments = gen_rtx_MEM (BLKmode, arguments);
1568 set_mem_align (arguments, PARM_BOUNDARY);
1569
1570 /* Walk past the arg-pointer and structure value address. */
1571 size = GET_MODE_SIZE (Pmode);
1572 if (struct_value)
1573 size += GET_MODE_SIZE (Pmode);
1574
1575 /* Restore each of the registers previously saved. Make USE insns
1576 for each of these registers for use in making the call. */
1577 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1578 if ((mode = apply_args_mode[regno]) != VOIDmode)
1579 {
1580 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1581 if (size % align != 0)
1582 size = CEIL (size, align) * align;
1583 reg = gen_rtx_REG (mode, regno);
1584 emit_move_insn (reg, adjust_address (arguments, mode, size));
1585 use_reg (&call_fusage, reg);
1586 size += GET_MODE_SIZE (mode);
1587 }
1588
1589 /* Restore the structure value address unless this is passed as an
1590 "invisible" first argument. */
1591 size = GET_MODE_SIZE (Pmode);
1592 if (struct_value)
1593 {
1594 rtx value = gen_reg_rtx (Pmode);
1595 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1596 emit_move_insn (struct_value, value);
1597 if (REG_P (struct_value))
1598 use_reg (&call_fusage, struct_value);
1599 size += GET_MODE_SIZE (Pmode);
1600 }
1601
1602 /* All arguments and registers used for the call are set up by now! */
1603 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1604
1605 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1606 and we don't want to load it into a register as an optimization,
1607 because prepare_call_address already did it if it should be done. */
1608 if (GET_CODE (function) != SYMBOL_REF)
1609 function = memory_address (FUNCTION_MODE, function);
1610
1611 /* Generate the actual call instruction and save the return value. */
1612 if (targetm.have_untyped_call ())
1613 {
1614 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1615 emit_call_insn (targetm.gen_untyped_call (mem, result,
1616 result_vector (1, result)));
1617 }
1618 else if (targetm.have_call_value ())
1619 {
1620 rtx valreg = 0;
1621
1622 /* Locate the unique return register. It is not possible to
1623 express a call that sets more than one return register using
1624 call_value; use untyped_call for that. In fact, untyped_call
1625 only needs to save the return registers in the given block. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_result_mode[regno]) != VOIDmode)
1628 {
1629 gcc_assert (!valreg); /* have_untyped_call required. */
1630
1631 valreg = gen_rtx_REG (mode, regno);
1632 }
1633
1634 emit_insn (targetm.gen_call_value (valreg,
1635 gen_rtx_MEM (FUNCTION_MODE, function),
1636 const0_rtx, NULL_RTX, const0_rtx));
1637
1638 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1639 }
1640 else
1641 gcc_unreachable ();
1642
1643 /* Find the CALL insn we just emitted, and attach the register usage
1644 information. */
1645 call_insn = last_call_insn ();
1646 add_function_usage_to (call_insn, call_fusage);
1647
1648 /* Restore the stack. */
1649 if (targetm.have_save_stack_nonlocal ())
1650 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1651 else
1652 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1653 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1654
1655 OK_DEFER_POP;
1656
1657 /* Return the address of the result block. */
1658 result = copy_addr_to_reg (XEXP (result, 0));
1659 return convert_memory_address (ptr_mode, result);
1660 }
1661
1662 /* Perform an untyped return. */
1663
1664 static void
1665 expand_builtin_return (rtx result)
1666 {
1667 int size, align, regno;
1668 machine_mode mode;
1669 rtx reg;
1670 rtx_insn *call_fusage = 0;
1671
1672 result = convert_memory_address (Pmode, result);
1673
1674 apply_result_size ();
1675 result = gen_rtx_MEM (BLKmode, result);
1676
1677 if (targetm.have_untyped_return ())
1678 {
1679 rtx vector = result_vector (0, result);
1680 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1681 emit_barrier ();
1682 return;
1683 }
1684
1685 /* Restore the return value and note that each value is used. */
1686 size = 0;
1687 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1688 if ((mode = apply_result_mode[regno]) != VOIDmode)
1689 {
1690 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1691 if (size % align != 0)
1692 size = CEIL (size, align) * align;
1693 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1694 emit_move_insn (reg, adjust_address (result, mode, size));
1695
1696 push_to_sequence (call_fusage);
1697 emit_use (reg);
1698 call_fusage = get_insns ();
1699 end_sequence ();
1700 size += GET_MODE_SIZE (mode);
1701 }
1702
1703 /* Put the USE insns before the return. */
1704 emit_insn (call_fusage);
1705
1706 /* Return whatever values was restored by jumping directly to the end
1707 of the function. */
1708 expand_naked_return ();
1709 }
1710
1711 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1712
1713 static enum type_class
1714 type_to_class (tree type)
1715 {
1716 switch (TREE_CODE (type))
1717 {
1718 case VOID_TYPE: return void_type_class;
1719 case INTEGER_TYPE: return integer_type_class;
1720 case ENUMERAL_TYPE: return enumeral_type_class;
1721 case BOOLEAN_TYPE: return boolean_type_class;
1722 case POINTER_TYPE: return pointer_type_class;
1723 case REFERENCE_TYPE: return reference_type_class;
1724 case OFFSET_TYPE: return offset_type_class;
1725 case REAL_TYPE: return real_type_class;
1726 case COMPLEX_TYPE: return complex_type_class;
1727 case FUNCTION_TYPE: return function_type_class;
1728 case METHOD_TYPE: return method_type_class;
1729 case RECORD_TYPE: return record_type_class;
1730 case UNION_TYPE:
1731 case QUAL_UNION_TYPE: return union_type_class;
1732 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1733 ? string_type_class : array_type_class);
1734 case LANG_TYPE: return lang_type_class;
1735 default: return no_type_class;
1736 }
1737 }
1738
1739 /* Expand a call EXP to __builtin_classify_type. */
1740
1741 static rtx
1742 expand_builtin_classify_type (tree exp)
1743 {
1744 if (call_expr_nargs (exp))
1745 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1746 return GEN_INT (no_type_class);
1747 }
1748
1749 /* This helper macro, meant to be used in mathfn_built_in below,
1750 determines which among a set of three builtin math functions is
1751 appropriate for a given type mode. The `F' and `L' cases are
1752 automatically generated from the `double' case. */
1753 #define CASE_MATHFN(MATHFN) \
1754 CASE_CFN_##MATHFN: \
1755 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1756 fcodel = BUILT_IN_##MATHFN##L ; break;
1757 /* Similar to above, but appends _R after any F/L suffix. */
1758 #define CASE_MATHFN_REENT(MATHFN) \
1759 case CFN_BUILT_IN_##MATHFN##_R: \
1760 case CFN_BUILT_IN_##MATHFN##F_R: \
1761 case CFN_BUILT_IN_##MATHFN##L_R: \
1762 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1763 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1764
1765 /* Return a function equivalent to FN but operating on floating-point
1766 values of type TYPE, or END_BUILTINS if no such function exists.
1767 This is purely an operation on function codes; it does not guarantee
1768 that the target actually has an implementation of the function. */
1769
1770 static built_in_function
1771 mathfn_built_in_2 (tree type, combined_fn fn)
1772 {
1773 built_in_function fcode, fcodef, fcodel;
1774
1775 switch (fn)
1776 {
1777 CASE_MATHFN (ACOS)
1778 CASE_MATHFN (ACOSH)
1779 CASE_MATHFN (ASIN)
1780 CASE_MATHFN (ASINH)
1781 CASE_MATHFN (ATAN)
1782 CASE_MATHFN (ATAN2)
1783 CASE_MATHFN (ATANH)
1784 CASE_MATHFN (CBRT)
1785 CASE_MATHFN (CEIL)
1786 CASE_MATHFN (CEXPI)
1787 CASE_MATHFN (COPYSIGN)
1788 CASE_MATHFN (COS)
1789 CASE_MATHFN (COSH)
1790 CASE_MATHFN (DREM)
1791 CASE_MATHFN (ERF)
1792 CASE_MATHFN (ERFC)
1793 CASE_MATHFN (EXP)
1794 CASE_MATHFN (EXP10)
1795 CASE_MATHFN (EXP2)
1796 CASE_MATHFN (EXPM1)
1797 CASE_MATHFN (FABS)
1798 CASE_MATHFN (FDIM)
1799 CASE_MATHFN (FLOOR)
1800 CASE_MATHFN (FMA)
1801 CASE_MATHFN (FMAX)
1802 CASE_MATHFN (FMIN)
1803 CASE_MATHFN (FMOD)
1804 CASE_MATHFN (FREXP)
1805 CASE_MATHFN (GAMMA)
1806 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1807 CASE_MATHFN (HUGE_VAL)
1808 CASE_MATHFN (HYPOT)
1809 CASE_MATHFN (ILOGB)
1810 CASE_MATHFN (ICEIL)
1811 CASE_MATHFN (IFLOOR)
1812 CASE_MATHFN (INF)
1813 CASE_MATHFN (IRINT)
1814 CASE_MATHFN (IROUND)
1815 CASE_MATHFN (ISINF)
1816 CASE_MATHFN (J0)
1817 CASE_MATHFN (J1)
1818 CASE_MATHFN (JN)
1819 CASE_MATHFN (LCEIL)
1820 CASE_MATHFN (LDEXP)
1821 CASE_MATHFN (LFLOOR)
1822 CASE_MATHFN (LGAMMA)
1823 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1824 CASE_MATHFN (LLCEIL)
1825 CASE_MATHFN (LLFLOOR)
1826 CASE_MATHFN (LLRINT)
1827 CASE_MATHFN (LLROUND)
1828 CASE_MATHFN (LOG)
1829 CASE_MATHFN (LOG10)
1830 CASE_MATHFN (LOG1P)
1831 CASE_MATHFN (LOG2)
1832 CASE_MATHFN (LOGB)
1833 CASE_MATHFN (LRINT)
1834 CASE_MATHFN (LROUND)
1835 CASE_MATHFN (MODF)
1836 CASE_MATHFN (NAN)
1837 CASE_MATHFN (NANS)
1838 CASE_MATHFN (NEARBYINT)
1839 CASE_MATHFN (NEXTAFTER)
1840 CASE_MATHFN (NEXTTOWARD)
1841 CASE_MATHFN (POW)
1842 CASE_MATHFN (POWI)
1843 CASE_MATHFN (POW10)
1844 CASE_MATHFN (REMAINDER)
1845 CASE_MATHFN (REMQUO)
1846 CASE_MATHFN (RINT)
1847 CASE_MATHFN (ROUND)
1848 CASE_MATHFN (SCALB)
1849 CASE_MATHFN (SCALBLN)
1850 CASE_MATHFN (SCALBN)
1851 CASE_MATHFN (SIGNBIT)
1852 CASE_MATHFN (SIGNIFICAND)
1853 CASE_MATHFN (SIN)
1854 CASE_MATHFN (SINCOS)
1855 CASE_MATHFN (SINH)
1856 CASE_MATHFN (SQRT)
1857 CASE_MATHFN (TAN)
1858 CASE_MATHFN (TANH)
1859 CASE_MATHFN (TGAMMA)
1860 CASE_MATHFN (TRUNC)
1861 CASE_MATHFN (Y0)
1862 CASE_MATHFN (Y1)
1863 CASE_MATHFN (YN)
1864
1865 default:
1866 return END_BUILTINS;
1867 }
1868
1869 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1870 return fcode;
1871 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1872 return fcodef;
1873 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1874 return fcodel;
1875 else
1876 return END_BUILTINS;
1877 }
1878
1879 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1880 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1881 otherwise use the explicit declaration. If we can't do the conversion,
1882 return null. */
1883
1884 static tree
1885 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1886 {
1887 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1888 if (fcode2 == END_BUILTINS)
1889 return NULL_TREE;
1890
1891 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1892 return NULL_TREE;
1893
1894 return builtin_decl_explicit (fcode2);
1895 }
1896
1897 /* Like mathfn_built_in_1, but always use the implicit array. */
1898
1899 tree
1900 mathfn_built_in (tree type, combined_fn fn)
1901 {
1902 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1903 }
1904
1905 /* Like mathfn_built_in_1, but take a built_in_function and
1906 always use the implicit array. */
1907
1908 tree
1909 mathfn_built_in (tree type, enum built_in_function fn)
1910 {
1911 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1912 }
1913
1914 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1915 return its code, otherwise return IFN_LAST. Note that this function
1916 only tests whether the function is defined in internals.def, not whether
1917 it is actually available on the target. */
1918
1919 internal_fn
1920 associated_internal_fn (tree fndecl)
1921 {
1922 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1923 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1924 switch (DECL_FUNCTION_CODE (fndecl))
1925 {
1926 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1927 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1928 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1929 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1930 #include "internal-fn.def"
1931
1932 CASE_FLT_FN (BUILT_IN_POW10):
1933 return IFN_EXP10;
1934
1935 CASE_FLT_FN (BUILT_IN_DREM):
1936 return IFN_REMAINDER;
1937
1938 CASE_FLT_FN (BUILT_IN_SCALBN):
1939 CASE_FLT_FN (BUILT_IN_SCALBLN):
1940 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1941 return IFN_LDEXP;
1942 return IFN_LAST;
1943
1944 default:
1945 return IFN_LAST;
1946 }
1947 }
1948
1949 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1950 on the current target by a call to an internal function, return the
1951 code of that internal function, otherwise return IFN_LAST. The caller
1952 is responsible for ensuring that any side-effects of the built-in
1953 call are dealt with correctly. E.g. if CALL sets errno, the caller
1954 must decide that the errno result isn't needed or make it available
1955 in some other way. */
1956
1957 internal_fn
1958 replacement_internal_fn (gcall *call)
1959 {
1960 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1961 {
1962 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1963 if (ifn != IFN_LAST)
1964 {
1965 tree_pair types = direct_internal_fn_types (ifn, call);
1966 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1967 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1968 return ifn;
1969 }
1970 }
1971 return IFN_LAST;
1972 }
1973
1974 /* Expand a call to the builtin trinary math functions (fma).
1975 Return NULL_RTX if a normal call should be emitted rather than expanding the
1976 function in-line. EXP is the expression that is a call to the builtin
1977 function; if convenient, the result should be placed in TARGET.
1978 SUBTARGET may be used as the target for computing one of EXP's
1979 operands. */
1980
1981 static rtx
1982 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1983 {
1984 optab builtin_optab;
1985 rtx op0, op1, op2, result;
1986 rtx_insn *insns;
1987 tree fndecl = get_callee_fndecl (exp);
1988 tree arg0, arg1, arg2;
1989 machine_mode mode;
1990
1991 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1992 return NULL_RTX;
1993
1994 arg0 = CALL_EXPR_ARG (exp, 0);
1995 arg1 = CALL_EXPR_ARG (exp, 1);
1996 arg2 = CALL_EXPR_ARG (exp, 2);
1997
1998 switch (DECL_FUNCTION_CODE (fndecl))
1999 {
2000 CASE_FLT_FN (BUILT_IN_FMA):
2001 builtin_optab = fma_optab; break;
2002 default:
2003 gcc_unreachable ();
2004 }
2005
2006 /* Make a suitable register to place result in. */
2007 mode = TYPE_MODE (TREE_TYPE (exp));
2008
2009 /* Before working hard, check whether the instruction is available. */
2010 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2011 return NULL_RTX;
2012
2013 result = gen_reg_rtx (mode);
2014
2015 /* Always stabilize the argument list. */
2016 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2017 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2018 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2019
2020 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2021 op1 = expand_normal (arg1);
2022 op2 = expand_normal (arg2);
2023
2024 start_sequence ();
2025
2026 /* Compute into RESULT.
2027 Set RESULT to wherever the result comes back. */
2028 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2029 result, 0);
2030
2031 /* If we were unable to expand via the builtin, stop the sequence
2032 (without outputting the insns) and call to the library function
2033 with the stabilized argument list. */
2034 if (result == 0)
2035 {
2036 end_sequence ();
2037 return expand_call (exp, target, target == const0_rtx);
2038 }
2039
2040 /* Output the entire sequence. */
2041 insns = get_insns ();
2042 end_sequence ();
2043 emit_insn (insns);
2044
2045 return result;
2046 }
2047
2048 /* Expand a call to the builtin sin and cos math functions.
2049 Return NULL_RTX if a normal call should be emitted rather than expanding the
2050 function in-line. EXP is the expression that is a call to the builtin
2051 function; if convenient, the result should be placed in TARGET.
2052 SUBTARGET may be used as the target for computing one of EXP's
2053 operands. */
2054
2055 static rtx
2056 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2057 {
2058 optab builtin_optab;
2059 rtx op0;
2060 rtx_insn *insns;
2061 tree fndecl = get_callee_fndecl (exp);
2062 machine_mode mode;
2063 tree arg;
2064
2065 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2066 return NULL_RTX;
2067
2068 arg = CALL_EXPR_ARG (exp, 0);
2069
2070 switch (DECL_FUNCTION_CODE (fndecl))
2071 {
2072 CASE_FLT_FN (BUILT_IN_SIN):
2073 CASE_FLT_FN (BUILT_IN_COS):
2074 builtin_optab = sincos_optab; break;
2075 default:
2076 gcc_unreachable ();
2077 }
2078
2079 /* Make a suitable register to place result in. */
2080 mode = TYPE_MODE (TREE_TYPE (exp));
2081
2082 /* Check if sincos insn is available, otherwise fallback
2083 to sin or cos insn. */
2084 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2085 switch (DECL_FUNCTION_CODE (fndecl))
2086 {
2087 CASE_FLT_FN (BUILT_IN_SIN):
2088 builtin_optab = sin_optab; break;
2089 CASE_FLT_FN (BUILT_IN_COS):
2090 builtin_optab = cos_optab; break;
2091 default:
2092 gcc_unreachable ();
2093 }
2094
2095 /* Before working hard, check whether the instruction is available. */
2096 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2097 {
2098 rtx result = gen_reg_rtx (mode);
2099
2100 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2101 need to expand the argument again. This way, we will not perform
2102 side-effects more the once. */
2103 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2104
2105 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2106
2107 start_sequence ();
2108
2109 /* Compute into RESULT.
2110 Set RESULT to wherever the result comes back. */
2111 if (builtin_optab == sincos_optab)
2112 {
2113 int ok;
2114
2115 switch (DECL_FUNCTION_CODE (fndecl))
2116 {
2117 CASE_FLT_FN (BUILT_IN_SIN):
2118 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2119 break;
2120 CASE_FLT_FN (BUILT_IN_COS):
2121 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2122 break;
2123 default:
2124 gcc_unreachable ();
2125 }
2126 gcc_assert (ok);
2127 }
2128 else
2129 result = expand_unop (mode, builtin_optab, op0, result, 0);
2130
2131 if (result != 0)
2132 {
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2137 return result;
2138 }
2139
2140 /* If we were unable to expand via the builtin, stop the sequence
2141 (without outputting the insns) and call to the library function
2142 with the stabilized argument list. */
2143 end_sequence ();
2144 }
2145
2146 return expand_call (exp, target, target == const0_rtx);
2147 }
2148
2149 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2150 return an RTL instruction code that implements the functionality.
2151 If that isn't possible or available return CODE_FOR_nothing. */
2152
2153 static enum insn_code
2154 interclass_mathfn_icode (tree arg, tree fndecl)
2155 {
2156 bool errno_set = false;
2157 optab builtin_optab = unknown_optab;
2158 machine_mode mode;
2159
2160 switch (DECL_FUNCTION_CODE (fndecl))
2161 {
2162 CASE_FLT_FN (BUILT_IN_ILOGB):
2163 errno_set = true; builtin_optab = ilogb_optab; break;
2164 CASE_FLT_FN (BUILT_IN_ISINF):
2165 builtin_optab = isinf_optab; break;
2166 case BUILT_IN_ISNORMAL:
2167 case BUILT_IN_ISFINITE:
2168 CASE_FLT_FN (BUILT_IN_FINITE):
2169 case BUILT_IN_FINITED32:
2170 case BUILT_IN_FINITED64:
2171 case BUILT_IN_FINITED128:
2172 case BUILT_IN_ISINFD32:
2173 case BUILT_IN_ISINFD64:
2174 case BUILT_IN_ISINFD128:
2175 /* These builtins have no optabs (yet). */
2176 break;
2177 default:
2178 gcc_unreachable ();
2179 }
2180
2181 /* There's no easy way to detect the case we need to set EDOM. */
2182 if (flag_errno_math && errno_set)
2183 return CODE_FOR_nothing;
2184
2185 /* Optab mode depends on the mode of the input argument. */
2186 mode = TYPE_MODE (TREE_TYPE (arg));
2187
2188 if (builtin_optab)
2189 return optab_handler (builtin_optab, mode);
2190 return CODE_FOR_nothing;
2191 }
2192
2193 /* Expand a call to one of the builtin math functions that operate on
2194 floating point argument and output an integer result (ilogb, isinf,
2195 isnan, etc).
2196 Return 0 if a normal call should be emitted rather than expanding the
2197 function in-line. EXP is the expression that is a call to the builtin
2198 function; if convenient, the result should be placed in TARGET. */
2199
2200 static rtx
2201 expand_builtin_interclass_mathfn (tree exp, rtx target)
2202 {
2203 enum insn_code icode = CODE_FOR_nothing;
2204 rtx op0;
2205 tree fndecl = get_callee_fndecl (exp);
2206 machine_mode mode;
2207 tree arg;
2208
2209 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2210 return NULL_RTX;
2211
2212 arg = CALL_EXPR_ARG (exp, 0);
2213 icode = interclass_mathfn_icode (arg, fndecl);
2214 mode = TYPE_MODE (TREE_TYPE (arg));
2215
2216 if (icode != CODE_FOR_nothing)
2217 {
2218 struct expand_operand ops[1];
2219 rtx_insn *last = get_last_insn ();
2220 tree orig_arg = arg;
2221
2222 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2223 need to expand the argument again. This way, we will not perform
2224 side-effects more the once. */
2225 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2226
2227 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2228
2229 if (mode != GET_MODE (op0))
2230 op0 = convert_to_mode (mode, op0, 0);
2231
2232 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2233 if (maybe_legitimize_operands (icode, 0, 1, ops)
2234 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2235 return ops[0].value;
2236
2237 delete_insns_since (last);
2238 CALL_EXPR_ARG (exp, 0) = orig_arg;
2239 }
2240
2241 return NULL_RTX;
2242 }
2243
2244 /* Expand a call to the builtin sincos math function.
2245 Return NULL_RTX if a normal call should be emitted rather than expanding the
2246 function in-line. EXP is the expression that is a call to the builtin
2247 function. */
2248
2249 static rtx
2250 expand_builtin_sincos (tree exp)
2251 {
2252 rtx op0, op1, op2, target1, target2;
2253 machine_mode mode;
2254 tree arg, sinp, cosp;
2255 int result;
2256 location_t loc = EXPR_LOCATION (exp);
2257 tree alias_type, alias_off;
2258
2259 if (!validate_arglist (exp, REAL_TYPE,
2260 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2261 return NULL_RTX;
2262
2263 arg = CALL_EXPR_ARG (exp, 0);
2264 sinp = CALL_EXPR_ARG (exp, 1);
2265 cosp = CALL_EXPR_ARG (exp, 2);
2266
2267 /* Make a suitable register to place result in. */
2268 mode = TYPE_MODE (TREE_TYPE (arg));
2269
2270 /* Check if sincos insn is available, otherwise emit the call. */
2271 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2272 return NULL_RTX;
2273
2274 target1 = gen_reg_rtx (mode);
2275 target2 = gen_reg_rtx (mode);
2276
2277 op0 = expand_normal (arg);
2278 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2279 alias_off = build_int_cst (alias_type, 0);
2280 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2281 sinp, alias_off));
2282 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2283 cosp, alias_off));
2284
2285 /* Compute into target1 and target2.
2286 Set TARGET to wherever the result comes back. */
2287 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2288 gcc_assert (result);
2289
2290 /* Move target1 and target2 to the memory locations indicated
2291 by op1 and op2. */
2292 emit_move_insn (op1, target1);
2293 emit_move_insn (op2, target2);
2294
2295 return const0_rtx;
2296 }
2297
2298 /* Expand a call to the internal cexpi builtin to the sincos math function.
2299 EXP is the expression that is a call to the builtin function; if convenient,
2300 the result should be placed in TARGET. */
2301
2302 static rtx
2303 expand_builtin_cexpi (tree exp, rtx target)
2304 {
2305 tree fndecl = get_callee_fndecl (exp);
2306 tree arg, type;
2307 machine_mode mode;
2308 rtx op0, op1, op2;
2309 location_t loc = EXPR_LOCATION (exp);
2310
2311 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2312 return NULL_RTX;
2313
2314 arg = CALL_EXPR_ARG (exp, 0);
2315 type = TREE_TYPE (arg);
2316 mode = TYPE_MODE (TREE_TYPE (arg));
2317
2318 /* Try expanding via a sincos optab, fall back to emitting a libcall
2319 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2320 is only generated from sincos, cexp or if we have either of them. */
2321 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2322 {
2323 op1 = gen_reg_rtx (mode);
2324 op2 = gen_reg_rtx (mode);
2325
2326 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2327
2328 /* Compute into op1 and op2. */
2329 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2330 }
2331 else if (targetm.libc_has_function (function_sincos))
2332 {
2333 tree call, fn = NULL_TREE;
2334 tree top1, top2;
2335 rtx op1a, op2a;
2336
2337 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2338 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2339 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2340 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2341 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2342 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2343 else
2344 gcc_unreachable ();
2345
2346 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2347 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2348 op1a = copy_addr_to_reg (XEXP (op1, 0));
2349 op2a = copy_addr_to_reg (XEXP (op2, 0));
2350 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2351 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2352
2353 /* Make sure not to fold the sincos call again. */
2354 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2355 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2356 call, 3, arg, top1, top2));
2357 }
2358 else
2359 {
2360 tree call, fn = NULL_TREE, narg;
2361 tree ctype = build_complex_type (type);
2362
2363 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2364 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2365 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2366 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2367 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2368 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2369 else
2370 gcc_unreachable ();
2371
2372 /* If we don't have a decl for cexp create one. This is the
2373 friendliest fallback if the user calls __builtin_cexpi
2374 without full target C99 function support. */
2375 if (fn == NULL_TREE)
2376 {
2377 tree fntype;
2378 const char *name = NULL;
2379
2380 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2381 name = "cexpf";
2382 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2383 name = "cexp";
2384 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2385 name = "cexpl";
2386
2387 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2388 fn = build_fn_decl (name, fntype);
2389 }
2390
2391 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2392 build_real (type, dconst0), arg);
2393
2394 /* Make sure not to fold the cexp call again. */
2395 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2396 return expand_expr (build_call_nary (ctype, call, 1, narg),
2397 target, VOIDmode, EXPAND_NORMAL);
2398 }
2399
2400 /* Now build the proper return type. */
2401 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2402 make_tree (TREE_TYPE (arg), op2),
2403 make_tree (TREE_TYPE (arg), op1)),
2404 target, VOIDmode, EXPAND_NORMAL);
2405 }
2406
2407 /* Conveniently construct a function call expression. FNDECL names the
2408 function to be called, N is the number of arguments, and the "..."
2409 parameters are the argument expressions. Unlike build_call_exr
2410 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2411
2412 static tree
2413 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2414 {
2415 va_list ap;
2416 tree fntype = TREE_TYPE (fndecl);
2417 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2418
2419 va_start (ap, n);
2420 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2421 va_end (ap);
2422 SET_EXPR_LOCATION (fn, loc);
2423 return fn;
2424 }
2425
2426 /* Expand a call to one of the builtin rounding functions gcc defines
2427 as an extension (lfloor and lceil). As these are gcc extensions we
2428 do not need to worry about setting errno to EDOM.
2429 If expanding via optab fails, lower expression to (int)(floor(x)).
2430 EXP is the expression that is a call to the builtin function;
2431 if convenient, the result should be placed in TARGET. */
2432
2433 static rtx
2434 expand_builtin_int_roundingfn (tree exp, rtx target)
2435 {
2436 convert_optab builtin_optab;
2437 rtx op0, tmp;
2438 rtx_insn *insns;
2439 tree fndecl = get_callee_fndecl (exp);
2440 enum built_in_function fallback_fn;
2441 tree fallback_fndecl;
2442 machine_mode mode;
2443 tree arg;
2444
2445 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2446 gcc_unreachable ();
2447
2448 arg = CALL_EXPR_ARG (exp, 0);
2449
2450 switch (DECL_FUNCTION_CODE (fndecl))
2451 {
2452 CASE_FLT_FN (BUILT_IN_ICEIL):
2453 CASE_FLT_FN (BUILT_IN_LCEIL):
2454 CASE_FLT_FN (BUILT_IN_LLCEIL):
2455 builtin_optab = lceil_optab;
2456 fallback_fn = BUILT_IN_CEIL;
2457 break;
2458
2459 CASE_FLT_FN (BUILT_IN_IFLOOR):
2460 CASE_FLT_FN (BUILT_IN_LFLOOR):
2461 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2462 builtin_optab = lfloor_optab;
2463 fallback_fn = BUILT_IN_FLOOR;
2464 break;
2465
2466 default:
2467 gcc_unreachable ();
2468 }
2469
2470 /* Make a suitable register to place result in. */
2471 mode = TYPE_MODE (TREE_TYPE (exp));
2472
2473 target = gen_reg_rtx (mode);
2474
2475 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2476 need to expand the argument again. This way, we will not perform
2477 side-effects more the once. */
2478 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2479
2480 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2481
2482 start_sequence ();
2483
2484 /* Compute into TARGET. */
2485 if (expand_sfix_optab (target, op0, builtin_optab))
2486 {
2487 /* Output the entire sequence. */
2488 insns = get_insns ();
2489 end_sequence ();
2490 emit_insn (insns);
2491 return target;
2492 }
2493
2494 /* If we were unable to expand via the builtin, stop the sequence
2495 (without outputting the insns). */
2496 end_sequence ();
2497
2498 /* Fall back to floating point rounding optab. */
2499 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2500
2501 /* For non-C99 targets we may end up without a fallback fndecl here
2502 if the user called __builtin_lfloor directly. In this case emit
2503 a call to the floor/ceil variants nevertheless. This should result
2504 in the best user experience for not full C99 targets. */
2505 if (fallback_fndecl == NULL_TREE)
2506 {
2507 tree fntype;
2508 const char *name = NULL;
2509
2510 switch (DECL_FUNCTION_CODE (fndecl))
2511 {
2512 case BUILT_IN_ICEIL:
2513 case BUILT_IN_LCEIL:
2514 case BUILT_IN_LLCEIL:
2515 name = "ceil";
2516 break;
2517 case BUILT_IN_ICEILF:
2518 case BUILT_IN_LCEILF:
2519 case BUILT_IN_LLCEILF:
2520 name = "ceilf";
2521 break;
2522 case BUILT_IN_ICEILL:
2523 case BUILT_IN_LCEILL:
2524 case BUILT_IN_LLCEILL:
2525 name = "ceill";
2526 break;
2527 case BUILT_IN_IFLOOR:
2528 case BUILT_IN_LFLOOR:
2529 case BUILT_IN_LLFLOOR:
2530 name = "floor";
2531 break;
2532 case BUILT_IN_IFLOORF:
2533 case BUILT_IN_LFLOORF:
2534 case BUILT_IN_LLFLOORF:
2535 name = "floorf";
2536 break;
2537 case BUILT_IN_IFLOORL:
2538 case BUILT_IN_LFLOORL:
2539 case BUILT_IN_LLFLOORL:
2540 name = "floorl";
2541 break;
2542 default:
2543 gcc_unreachable ();
2544 }
2545
2546 fntype = build_function_type_list (TREE_TYPE (arg),
2547 TREE_TYPE (arg), NULL_TREE);
2548 fallback_fndecl = build_fn_decl (name, fntype);
2549 }
2550
2551 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2552
2553 tmp = expand_normal (exp);
2554 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2555
2556 /* Truncate the result of floating point optab to integer
2557 via expand_fix (). */
2558 target = gen_reg_rtx (mode);
2559 expand_fix (target, tmp, 0);
2560
2561 return target;
2562 }
2563
2564 /* Expand a call to one of the builtin math functions doing integer
2565 conversion (lrint).
2566 Return 0 if a normal call should be emitted rather than expanding the
2567 function in-line. EXP is the expression that is a call to the builtin
2568 function; if convenient, the result should be placed in TARGET. */
2569
2570 static rtx
2571 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2572 {
2573 convert_optab builtin_optab;
2574 rtx op0;
2575 rtx_insn *insns;
2576 tree fndecl = get_callee_fndecl (exp);
2577 tree arg;
2578 machine_mode mode;
2579 enum built_in_function fallback_fn = BUILT_IN_NONE;
2580
2581 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2582 gcc_unreachable ();
2583
2584 arg = CALL_EXPR_ARG (exp, 0);
2585
2586 switch (DECL_FUNCTION_CODE (fndecl))
2587 {
2588 CASE_FLT_FN (BUILT_IN_IRINT):
2589 fallback_fn = BUILT_IN_LRINT;
2590 gcc_fallthrough ();
2591 CASE_FLT_FN (BUILT_IN_LRINT):
2592 CASE_FLT_FN (BUILT_IN_LLRINT):
2593 builtin_optab = lrint_optab;
2594 break;
2595
2596 CASE_FLT_FN (BUILT_IN_IROUND):
2597 fallback_fn = BUILT_IN_LROUND;
2598 gcc_fallthrough ();
2599 CASE_FLT_FN (BUILT_IN_LROUND):
2600 CASE_FLT_FN (BUILT_IN_LLROUND):
2601 builtin_optab = lround_optab;
2602 break;
2603
2604 default:
2605 gcc_unreachable ();
2606 }
2607
2608 /* There's no easy way to detect the case we need to set EDOM. */
2609 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2610 return NULL_RTX;
2611
2612 /* Make a suitable register to place result in. */
2613 mode = TYPE_MODE (TREE_TYPE (exp));
2614
2615 /* There's no easy way to detect the case we need to set EDOM. */
2616 if (!flag_errno_math)
2617 {
2618 rtx result = gen_reg_rtx (mode);
2619
2620 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2621 need to expand the argument again. This way, we will not perform
2622 side-effects more the once. */
2623 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2624
2625 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2626
2627 start_sequence ();
2628
2629 if (expand_sfix_optab (result, op0, builtin_optab))
2630 {
2631 /* Output the entire sequence. */
2632 insns = get_insns ();
2633 end_sequence ();
2634 emit_insn (insns);
2635 return result;
2636 }
2637
2638 /* If we were unable to expand via the builtin, stop the sequence
2639 (without outputting the insns) and call to the library function
2640 with the stabilized argument list. */
2641 end_sequence ();
2642 }
2643
2644 if (fallback_fn != BUILT_IN_NONE)
2645 {
2646 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2647 targets, (int) round (x) should never be transformed into
2648 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2649 a call to lround in the hope that the target provides at least some
2650 C99 functions. This should result in the best user experience for
2651 not full C99 targets. */
2652 tree fallback_fndecl = mathfn_built_in_1
2653 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2654
2655 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2656 fallback_fndecl, 1, arg);
2657
2658 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2659 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2660 return convert_to_mode (mode, target, 0);
2661 }
2662
2663 return expand_call (exp, target, target == const0_rtx);
2664 }
2665
2666 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2667 a normal call should be emitted rather than expanding the function
2668 in-line. EXP is the expression that is a call to the builtin
2669 function; if convenient, the result should be placed in TARGET. */
2670
2671 static rtx
2672 expand_builtin_powi (tree exp, rtx target)
2673 {
2674 tree arg0, arg1;
2675 rtx op0, op1;
2676 machine_mode mode;
2677 machine_mode mode2;
2678
2679 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2680 return NULL_RTX;
2681
2682 arg0 = CALL_EXPR_ARG (exp, 0);
2683 arg1 = CALL_EXPR_ARG (exp, 1);
2684 mode = TYPE_MODE (TREE_TYPE (exp));
2685
2686 /* Emit a libcall to libgcc. */
2687
2688 /* Mode of the 2nd argument must match that of an int. */
2689 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2690
2691 if (target == NULL_RTX)
2692 target = gen_reg_rtx (mode);
2693
2694 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2695 if (GET_MODE (op0) != mode)
2696 op0 = convert_to_mode (mode, op0, 0);
2697 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2698 if (GET_MODE (op1) != mode2)
2699 op1 = convert_to_mode (mode2, op1, 0);
2700
2701 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2702 target, LCT_CONST, mode, 2,
2703 op0, mode, op1, mode2);
2704
2705 return target;
2706 }
2707
2708 /* Expand expression EXP which is a call to the strlen builtin. Return
2709 NULL_RTX if we failed the caller should emit a normal call, otherwise
2710 try to get the result in TARGET, if convenient. */
2711
2712 static rtx
2713 expand_builtin_strlen (tree exp, rtx target,
2714 machine_mode target_mode)
2715 {
2716 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2717 return NULL_RTX;
2718 else
2719 {
2720 struct expand_operand ops[4];
2721 rtx pat;
2722 tree len;
2723 tree src = CALL_EXPR_ARG (exp, 0);
2724 rtx src_reg;
2725 rtx_insn *before_strlen;
2726 machine_mode insn_mode = target_mode;
2727 enum insn_code icode = CODE_FOR_nothing;
2728 unsigned int align;
2729
2730 /* If the length can be computed at compile-time, return it. */
2731 len = c_strlen (src, 0);
2732 if (len)
2733 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2734
2735 /* If the length can be computed at compile-time and is constant
2736 integer, but there are side-effects in src, evaluate
2737 src for side-effects, then return len.
2738 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2739 can be optimized into: i++; x = 3; */
2740 len = c_strlen (src, 1);
2741 if (len && TREE_CODE (len) == INTEGER_CST)
2742 {
2743 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2744 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2745 }
2746
2747 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2748
2749 /* If SRC is not a pointer type, don't do this operation inline. */
2750 if (align == 0)
2751 return NULL_RTX;
2752
2753 /* Bail out if we can't compute strlen in the right mode. */
2754 while (insn_mode != VOIDmode)
2755 {
2756 icode = optab_handler (strlen_optab, insn_mode);
2757 if (icode != CODE_FOR_nothing)
2758 break;
2759
2760 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2761 }
2762 if (insn_mode == VOIDmode)
2763 return NULL_RTX;
2764
2765 /* Make a place to hold the source address. We will not expand
2766 the actual source until we are sure that the expansion will
2767 not fail -- there are trees that cannot be expanded twice. */
2768 src_reg = gen_reg_rtx (Pmode);
2769
2770 /* Mark the beginning of the strlen sequence so we can emit the
2771 source operand later. */
2772 before_strlen = get_last_insn ();
2773
2774 create_output_operand (&ops[0], target, insn_mode);
2775 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2776 create_integer_operand (&ops[2], 0);
2777 create_integer_operand (&ops[3], align);
2778 if (!maybe_expand_insn (icode, 4, ops))
2779 return NULL_RTX;
2780
2781 /* Now that we are assured of success, expand the source. */
2782 start_sequence ();
2783 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2784 if (pat != src_reg)
2785 {
2786 #ifdef POINTERS_EXTEND_UNSIGNED
2787 if (GET_MODE (pat) != Pmode)
2788 pat = convert_to_mode (Pmode, pat,
2789 POINTERS_EXTEND_UNSIGNED);
2790 #endif
2791 emit_move_insn (src_reg, pat);
2792 }
2793 pat = get_insns ();
2794 end_sequence ();
2795
2796 if (before_strlen)
2797 emit_insn_after (pat, before_strlen);
2798 else
2799 emit_insn_before (pat, get_insns ());
2800
2801 /* Return the value in the proper mode for this function. */
2802 if (GET_MODE (ops[0].value) == target_mode)
2803 target = ops[0].value;
2804 else if (target != 0)
2805 convert_move (target, ops[0].value, 0);
2806 else
2807 target = convert_to_mode (target_mode, ops[0].value, 0);
2808
2809 return target;
2810 }
2811 }
2812
2813 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2814 bytes from constant string DATA + OFFSET and return it as target
2815 constant. */
2816
2817 static rtx
2818 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2819 machine_mode mode)
2820 {
2821 const char *str = (const char *) data;
2822
2823 gcc_assert (offset >= 0
2824 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2825 <= strlen (str) + 1));
2826
2827 return c_readstr (str + offset, mode);
2828 }
2829
2830 /* LEN specify length of the block of memcpy/memset operation.
2831 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2832 In some cases we can make very likely guess on max size, then we
2833 set it into PROBABLE_MAX_SIZE. */
2834
2835 static void
2836 determine_block_size (tree len, rtx len_rtx,
2837 unsigned HOST_WIDE_INT *min_size,
2838 unsigned HOST_WIDE_INT *max_size,
2839 unsigned HOST_WIDE_INT *probable_max_size)
2840 {
2841 if (CONST_INT_P (len_rtx))
2842 {
2843 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2844 return;
2845 }
2846 else
2847 {
2848 wide_int min, max;
2849 enum value_range_type range_type = VR_UNDEFINED;
2850
2851 /* Determine bounds from the type. */
2852 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2853 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2854 else
2855 *min_size = 0;
2856 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2857 *probable_max_size = *max_size
2858 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2859 else
2860 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2861
2862 if (TREE_CODE (len) == SSA_NAME)
2863 range_type = get_range_info (len, &min, &max);
2864 if (range_type == VR_RANGE)
2865 {
2866 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2867 *min_size = min.to_uhwi ();
2868 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2869 *probable_max_size = *max_size = max.to_uhwi ();
2870 }
2871 else if (range_type == VR_ANTI_RANGE)
2872 {
2873 /* Anti range 0...N lets us to determine minimal size to N+1. */
2874 if (min == 0)
2875 {
2876 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2877 *min_size = max.to_uhwi () + 1;
2878 }
2879 /* Code like
2880
2881 int n;
2882 if (n < 100)
2883 memcpy (a, b, n)
2884
2885 Produce anti range allowing negative values of N. We still
2886 can use the information and make a guess that N is not negative.
2887 */
2888 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2889 *probable_max_size = min.to_uhwi () - 1;
2890 }
2891 }
2892 gcc_checking_assert (*max_size <=
2893 (unsigned HOST_WIDE_INT)
2894 GET_MODE_MASK (GET_MODE (len_rtx)));
2895 }
2896
2897 /* Helper function to do the actual work for expand_builtin_memcpy. */
2898
2899 static rtx
2900 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2901 {
2902 const char *src_str;
2903 unsigned int src_align = get_pointer_alignment (src);
2904 unsigned int dest_align = get_pointer_alignment (dest);
2905 rtx dest_mem, src_mem, dest_addr, len_rtx;
2906 HOST_WIDE_INT expected_size = -1;
2907 unsigned int expected_align = 0;
2908 unsigned HOST_WIDE_INT min_size;
2909 unsigned HOST_WIDE_INT max_size;
2910 unsigned HOST_WIDE_INT probable_max_size;
2911
2912 /* If DEST is not a pointer type, call the normal function. */
2913 if (dest_align == 0)
2914 return NULL_RTX;
2915
2916 /* If either SRC is not a pointer type, don't do this
2917 operation in-line. */
2918 if (src_align == 0)
2919 return NULL_RTX;
2920
2921 if (currently_expanding_gimple_stmt)
2922 stringop_block_profile (currently_expanding_gimple_stmt,
2923 &expected_align, &expected_size);
2924
2925 if (expected_align < dest_align)
2926 expected_align = dest_align;
2927 dest_mem = get_memory_rtx (dest, len);
2928 set_mem_align (dest_mem, dest_align);
2929 len_rtx = expand_normal (len);
2930 determine_block_size (len, len_rtx, &min_size, &max_size,
2931 &probable_max_size);
2932 src_str = c_getstr (src);
2933
2934 /* If SRC is a string constant and block move would be done
2935 by pieces, we can avoid loading the string from memory
2936 and only stored the computed constants. */
2937 if (src_str
2938 && CONST_INT_P (len_rtx)
2939 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2940 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2941 CONST_CAST (char *, src_str),
2942 dest_align, false))
2943 {
2944 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2945 builtin_memcpy_read_str,
2946 CONST_CAST (char *, src_str),
2947 dest_align, false, 0);
2948 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2949 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2950 return dest_mem;
2951 }
2952
2953 src_mem = get_memory_rtx (src, len);
2954 set_mem_align (src_mem, src_align);
2955
2956 /* Copy word part most expediently. */
2957 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2958 CALL_EXPR_TAILCALL (exp)
2959 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2960 expected_align, expected_size,
2961 min_size, max_size, probable_max_size);
2962
2963 if (dest_addr == 0)
2964 {
2965 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2966 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2967 }
2968
2969 return dest_addr;
2970 }
2971
2972 /* Expand a call EXP to the memcpy builtin.
2973 Return NULL_RTX if we failed, the caller should emit a normal call,
2974 otherwise try to get the result in TARGET, if convenient (and in
2975 mode MODE if that's convenient). */
2976
2977 static rtx
2978 expand_builtin_memcpy (tree exp, rtx target)
2979 {
2980 if (!validate_arglist (exp,
2981 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2982 return NULL_RTX;
2983 else
2984 {
2985 tree dest = CALL_EXPR_ARG (exp, 0);
2986 tree src = CALL_EXPR_ARG (exp, 1);
2987 tree len = CALL_EXPR_ARG (exp, 2);
2988 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2989 }
2990 }
2991
2992 /* Expand an instrumented call EXP to the memcpy builtin.
2993 Return NULL_RTX if we failed, the caller should emit a normal call,
2994 otherwise try to get the result in TARGET, if convenient (and in
2995 mode MODE if that's convenient). */
2996
2997 static rtx
2998 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
2999 {
3000 if (!validate_arglist (exp,
3001 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3002 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3003 INTEGER_TYPE, VOID_TYPE))
3004 return NULL_RTX;
3005 else
3006 {
3007 tree dest = CALL_EXPR_ARG (exp, 0);
3008 tree src = CALL_EXPR_ARG (exp, 2);
3009 tree len = CALL_EXPR_ARG (exp, 4);
3010 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3011
3012 /* Return src bounds with the result. */
3013 if (res)
3014 {
3015 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3016 expand_normal (CALL_EXPR_ARG (exp, 1)));
3017 res = chkp_join_splitted_slot (res, bnd);
3018 }
3019 return res;
3020 }
3021 }
3022
3023 /* Expand a call EXP to the mempcpy builtin.
3024 Return NULL_RTX if we failed; the caller should emit a normal call,
3025 otherwise try to get the result in TARGET, if convenient (and in
3026 mode MODE if that's convenient). If ENDP is 0 return the
3027 destination pointer, if ENDP is 1 return the end pointer ala
3028 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3029 stpcpy. */
3030
3031 static rtx
3032 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3033 {
3034 if (!validate_arglist (exp,
3035 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3036 return NULL_RTX;
3037 else
3038 {
3039 tree dest = CALL_EXPR_ARG (exp, 0);
3040 tree src = CALL_EXPR_ARG (exp, 1);
3041 tree len = CALL_EXPR_ARG (exp, 2);
3042 return expand_builtin_mempcpy_args (dest, src, len,
3043 target, mode, /*endp=*/ 1,
3044 exp);
3045 }
3046 }
3047
3048 /* Expand an instrumented call EXP to the mempcpy builtin.
3049 Return NULL_RTX if we failed, the caller should emit a normal call,
3050 otherwise try to get the result in TARGET, if convenient (and in
3051 mode MODE if that's convenient). */
3052
3053 static rtx
3054 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3055 {
3056 if (!validate_arglist (exp,
3057 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3058 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3059 INTEGER_TYPE, VOID_TYPE))
3060 return NULL_RTX;
3061 else
3062 {
3063 tree dest = CALL_EXPR_ARG (exp, 0);
3064 tree src = CALL_EXPR_ARG (exp, 2);
3065 tree len = CALL_EXPR_ARG (exp, 4);
3066 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3067 mode, 1, exp);
3068
3069 /* Return src bounds with the result. */
3070 if (res)
3071 {
3072 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3073 expand_normal (CALL_EXPR_ARG (exp, 1)));
3074 res = chkp_join_splitted_slot (res, bnd);
3075 }
3076 return res;
3077 }
3078 }
3079
3080 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3081 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3082 so that this can also be called without constructing an actual CALL_EXPR.
3083 The other arguments and return value are the same as for
3084 expand_builtin_mempcpy. */
3085
3086 static rtx
3087 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3088 rtx target, machine_mode mode, int endp,
3089 tree orig_exp)
3090 {
3091 tree fndecl = get_callee_fndecl (orig_exp);
3092
3093 /* If return value is ignored, transform mempcpy into memcpy. */
3094 if (target == const0_rtx
3095 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3096 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3097 {
3098 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3099 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3100 dest, src, len);
3101 return expand_expr (result, target, mode, EXPAND_NORMAL);
3102 }
3103 else if (target == const0_rtx
3104 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3105 {
3106 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3107 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3108 dest, src, len);
3109 return expand_expr (result, target, mode, EXPAND_NORMAL);
3110 }
3111 else
3112 {
3113 const char *src_str;
3114 unsigned int src_align = get_pointer_alignment (src);
3115 unsigned int dest_align = get_pointer_alignment (dest);
3116 rtx dest_mem, src_mem, len_rtx;
3117
3118 /* If either SRC or DEST is not a pointer type, don't do this
3119 operation in-line. */
3120 if (dest_align == 0 || src_align == 0)
3121 return NULL_RTX;
3122
3123 /* If LEN is not constant, call the normal function. */
3124 if (! tree_fits_uhwi_p (len))
3125 return NULL_RTX;
3126
3127 len_rtx = expand_normal (len);
3128 src_str = c_getstr (src);
3129
3130 /* If SRC is a string constant and block move would be done
3131 by pieces, we can avoid loading the string from memory
3132 and only stored the computed constants. */
3133 if (src_str
3134 && CONST_INT_P (len_rtx)
3135 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3136 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3137 CONST_CAST (char *, src_str),
3138 dest_align, false))
3139 {
3140 dest_mem = get_memory_rtx (dest, len);
3141 set_mem_align (dest_mem, dest_align);
3142 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3143 builtin_memcpy_read_str,
3144 CONST_CAST (char *, src_str),
3145 dest_align, false, endp);
3146 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3147 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3148 return dest_mem;
3149 }
3150
3151 if (CONST_INT_P (len_rtx)
3152 && can_move_by_pieces (INTVAL (len_rtx),
3153 MIN (dest_align, src_align)))
3154 {
3155 dest_mem = get_memory_rtx (dest, len);
3156 set_mem_align (dest_mem, dest_align);
3157 src_mem = get_memory_rtx (src, len);
3158 set_mem_align (src_mem, src_align);
3159 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3160 MIN (dest_align, src_align), endp);
3161 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3162 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3163 return dest_mem;
3164 }
3165
3166 return NULL_RTX;
3167 }
3168 }
3169
3170 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3171 we failed, the caller should emit a normal call, otherwise try to
3172 get the result in TARGET, if convenient. If ENDP is 0 return the
3173 destination pointer, if ENDP is 1 return the end pointer ala
3174 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3175 stpcpy. */
3176
3177 static rtx
3178 expand_movstr (tree dest, tree src, rtx target, int endp)
3179 {
3180 struct expand_operand ops[3];
3181 rtx dest_mem;
3182 rtx src_mem;
3183
3184 if (!targetm.have_movstr ())
3185 return NULL_RTX;
3186
3187 dest_mem = get_memory_rtx (dest, NULL);
3188 src_mem = get_memory_rtx (src, NULL);
3189 if (!endp)
3190 {
3191 target = force_reg (Pmode, XEXP (dest_mem, 0));
3192 dest_mem = replace_equiv_address (dest_mem, target);
3193 }
3194
3195 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3196 create_fixed_operand (&ops[1], dest_mem);
3197 create_fixed_operand (&ops[2], src_mem);
3198 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3199 return NULL_RTX;
3200
3201 if (endp && target != const0_rtx)
3202 {
3203 target = ops[0].value;
3204 /* movstr is supposed to set end to the address of the NUL
3205 terminator. If the caller requested a mempcpy-like return value,
3206 adjust it. */
3207 if (endp == 1)
3208 {
3209 rtx tem = plus_constant (GET_MODE (target),
3210 gen_lowpart (GET_MODE (target), target), 1);
3211 emit_move_insn (target, force_operand (tem, NULL_RTX));
3212 }
3213 }
3214 return target;
3215 }
3216
3217 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3218 NULL_RTX if we failed the caller should emit a normal call, otherwise
3219 try to get the result in TARGET, if convenient (and in mode MODE if that's
3220 convenient). */
3221
3222 static rtx
3223 expand_builtin_strcpy (tree exp, rtx target)
3224 {
3225 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3226 {
3227 tree dest = CALL_EXPR_ARG (exp, 0);
3228 tree src = CALL_EXPR_ARG (exp, 1);
3229 return expand_builtin_strcpy_args (dest, src, target);
3230 }
3231 return NULL_RTX;
3232 }
3233
3234 /* Helper function to do the actual work for expand_builtin_strcpy. The
3235 arguments to the builtin_strcpy call DEST and SRC are broken out
3236 so that this can also be called without constructing an actual CALL_EXPR.
3237 The other arguments and return value are the same as for
3238 expand_builtin_strcpy. */
3239
3240 static rtx
3241 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3242 {
3243 return expand_movstr (dest, src, target, /*endp=*/0);
3244 }
3245
3246 /* Expand a call EXP to the stpcpy builtin.
3247 Return NULL_RTX if we failed the caller should emit a normal call,
3248 otherwise try to get the result in TARGET, if convenient (and in
3249 mode MODE if that's convenient). */
3250
3251 static rtx
3252 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3253 {
3254 tree dst, src;
3255 location_t loc = EXPR_LOCATION (exp);
3256
3257 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3258 return NULL_RTX;
3259
3260 dst = CALL_EXPR_ARG (exp, 0);
3261 src = CALL_EXPR_ARG (exp, 1);
3262
3263 /* If return value is ignored, transform stpcpy into strcpy. */
3264 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3265 {
3266 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3267 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3268 return expand_expr (result, target, mode, EXPAND_NORMAL);
3269 }
3270 else
3271 {
3272 tree len, lenp1;
3273 rtx ret;
3274
3275 /* Ensure we get an actual string whose length can be evaluated at
3276 compile-time, not an expression containing a string. This is
3277 because the latter will potentially produce pessimized code
3278 when used to produce the return value. */
3279 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3280 return expand_movstr (dst, src, target, /*endp=*/2);
3281
3282 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3283 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3284 target, mode, /*endp=*/2,
3285 exp);
3286
3287 if (ret)
3288 return ret;
3289
3290 if (TREE_CODE (len) == INTEGER_CST)
3291 {
3292 rtx len_rtx = expand_normal (len);
3293
3294 if (CONST_INT_P (len_rtx))
3295 {
3296 ret = expand_builtin_strcpy_args (dst, src, target);
3297
3298 if (ret)
3299 {
3300 if (! target)
3301 {
3302 if (mode != VOIDmode)
3303 target = gen_reg_rtx (mode);
3304 else
3305 target = gen_reg_rtx (GET_MODE (ret));
3306 }
3307 if (GET_MODE (target) != GET_MODE (ret))
3308 ret = gen_lowpart (GET_MODE (target), ret);
3309
3310 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3311 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3312 gcc_assert (ret);
3313
3314 return target;
3315 }
3316 }
3317 }
3318
3319 return expand_movstr (dst, src, target, /*endp=*/2);
3320 }
3321 }
3322
3323 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3324 bytes from constant string DATA + OFFSET and return it as target
3325 constant. */
3326
3327 rtx
3328 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3329 machine_mode mode)
3330 {
3331 const char *str = (const char *) data;
3332
3333 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3334 return const0_rtx;
3335
3336 return c_readstr (str + offset, mode);
3337 }
3338
3339 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3340 NULL_RTX if we failed the caller should emit a normal call. */
3341
3342 static rtx
3343 expand_builtin_strncpy (tree exp, rtx target)
3344 {
3345 location_t loc = EXPR_LOCATION (exp);
3346
3347 if (validate_arglist (exp,
3348 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3349 {
3350 tree dest = CALL_EXPR_ARG (exp, 0);
3351 tree src = CALL_EXPR_ARG (exp, 1);
3352 tree len = CALL_EXPR_ARG (exp, 2);
3353 tree slen = c_strlen (src, 1);
3354
3355 /* We must be passed a constant len and src parameter. */
3356 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3357 return NULL_RTX;
3358
3359 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3360
3361 /* We're required to pad with trailing zeros if the requested
3362 len is greater than strlen(s2)+1. In that case try to
3363 use store_by_pieces, if it fails, punt. */
3364 if (tree_int_cst_lt (slen, len))
3365 {
3366 unsigned int dest_align = get_pointer_alignment (dest);
3367 const char *p = c_getstr (src);
3368 rtx dest_mem;
3369
3370 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3371 || !can_store_by_pieces (tree_to_uhwi (len),
3372 builtin_strncpy_read_str,
3373 CONST_CAST (char *, p),
3374 dest_align, false))
3375 return NULL_RTX;
3376
3377 dest_mem = get_memory_rtx (dest, len);
3378 store_by_pieces (dest_mem, tree_to_uhwi (len),
3379 builtin_strncpy_read_str,
3380 CONST_CAST (char *, p), dest_align, false, 0);
3381 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3382 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3383 return dest_mem;
3384 }
3385 }
3386 return NULL_RTX;
3387 }
3388
3389 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3390 bytes from constant string DATA + OFFSET and return it as target
3391 constant. */
3392
3393 rtx
3394 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3395 machine_mode mode)
3396 {
3397 const char *c = (const char *) data;
3398 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3399
3400 memset (p, *c, GET_MODE_SIZE (mode));
3401
3402 return c_readstr (p, mode);
3403 }
3404
3405 /* Callback routine for store_by_pieces. Return the RTL of a register
3406 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3407 char value given in the RTL register data. For example, if mode is
3408 4 bytes wide, return the RTL for 0x01010101*data. */
3409
3410 static rtx
3411 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3412 machine_mode mode)
3413 {
3414 rtx target, coeff;
3415 size_t size;
3416 char *p;
3417
3418 size = GET_MODE_SIZE (mode);
3419 if (size == 1)
3420 return (rtx) data;
3421
3422 p = XALLOCAVEC (char, size);
3423 memset (p, 1, size);
3424 coeff = c_readstr (p, mode);
3425
3426 target = convert_to_mode (mode, (rtx) data, 1);
3427 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3428 return force_reg (mode, target);
3429 }
3430
3431 /* Expand expression EXP, which is a call to the memset builtin. Return
3432 NULL_RTX if we failed the caller should emit a normal call, otherwise
3433 try to get the result in TARGET, if convenient (and in mode MODE if that's
3434 convenient). */
3435
3436 static rtx
3437 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3438 {
3439 if (!validate_arglist (exp,
3440 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3441 return NULL_RTX;
3442 else
3443 {
3444 tree dest = CALL_EXPR_ARG (exp, 0);
3445 tree val = CALL_EXPR_ARG (exp, 1);
3446 tree len = CALL_EXPR_ARG (exp, 2);
3447 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3448 }
3449 }
3450
3451 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3452 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3453 try to get the result in TARGET, if convenient (and in mode MODE if that's
3454 convenient). */
3455
3456 static rtx
3457 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3458 {
3459 if (!validate_arglist (exp,
3460 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3461 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3462 return NULL_RTX;
3463 else
3464 {
3465 tree dest = CALL_EXPR_ARG (exp, 0);
3466 tree val = CALL_EXPR_ARG (exp, 2);
3467 tree len = CALL_EXPR_ARG (exp, 3);
3468 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3469
3470 /* Return src bounds with the result. */
3471 if (res)
3472 {
3473 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3474 expand_normal (CALL_EXPR_ARG (exp, 1)));
3475 res = chkp_join_splitted_slot (res, bnd);
3476 }
3477 return res;
3478 }
3479 }
3480
3481 /* Helper function to do the actual work for expand_builtin_memset. The
3482 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3483 so that this can also be called without constructing an actual CALL_EXPR.
3484 The other arguments and return value are the same as for
3485 expand_builtin_memset. */
3486
3487 static rtx
3488 expand_builtin_memset_args (tree dest, tree val, tree len,
3489 rtx target, machine_mode mode, tree orig_exp)
3490 {
3491 tree fndecl, fn;
3492 enum built_in_function fcode;
3493 machine_mode val_mode;
3494 char c;
3495 unsigned int dest_align;
3496 rtx dest_mem, dest_addr, len_rtx;
3497 HOST_WIDE_INT expected_size = -1;
3498 unsigned int expected_align = 0;
3499 unsigned HOST_WIDE_INT min_size;
3500 unsigned HOST_WIDE_INT max_size;
3501 unsigned HOST_WIDE_INT probable_max_size;
3502
3503 dest_align = get_pointer_alignment (dest);
3504
3505 /* If DEST is not a pointer type, don't do this operation in-line. */
3506 if (dest_align == 0)
3507 return NULL_RTX;
3508
3509 if (currently_expanding_gimple_stmt)
3510 stringop_block_profile (currently_expanding_gimple_stmt,
3511 &expected_align, &expected_size);
3512
3513 if (expected_align < dest_align)
3514 expected_align = dest_align;
3515
3516 /* If the LEN parameter is zero, return DEST. */
3517 if (integer_zerop (len))
3518 {
3519 /* Evaluate and ignore VAL in case it has side-effects. */
3520 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3521 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3522 }
3523
3524 /* Stabilize the arguments in case we fail. */
3525 dest = builtin_save_expr (dest);
3526 val = builtin_save_expr (val);
3527 len = builtin_save_expr (len);
3528
3529 len_rtx = expand_normal (len);
3530 determine_block_size (len, len_rtx, &min_size, &max_size,
3531 &probable_max_size);
3532 dest_mem = get_memory_rtx (dest, len);
3533 val_mode = TYPE_MODE (unsigned_char_type_node);
3534
3535 if (TREE_CODE (val) != INTEGER_CST)
3536 {
3537 rtx val_rtx;
3538
3539 val_rtx = expand_normal (val);
3540 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3541
3542 /* Assume that we can memset by pieces if we can store
3543 * the coefficients by pieces (in the required modes).
3544 * We can't pass builtin_memset_gen_str as that emits RTL. */
3545 c = 1;
3546 if (tree_fits_uhwi_p (len)
3547 && can_store_by_pieces (tree_to_uhwi (len),
3548 builtin_memset_read_str, &c, dest_align,
3549 true))
3550 {
3551 val_rtx = force_reg (val_mode, val_rtx);
3552 store_by_pieces (dest_mem, tree_to_uhwi (len),
3553 builtin_memset_gen_str, val_rtx, dest_align,
3554 true, 0);
3555 }
3556 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3557 dest_align, expected_align,
3558 expected_size, min_size, max_size,
3559 probable_max_size))
3560 goto do_libcall;
3561
3562 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3563 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3564 return dest_mem;
3565 }
3566
3567 if (target_char_cast (val, &c))
3568 goto do_libcall;
3569
3570 if (c)
3571 {
3572 if (tree_fits_uhwi_p (len)
3573 && can_store_by_pieces (tree_to_uhwi (len),
3574 builtin_memset_read_str, &c, dest_align,
3575 true))
3576 store_by_pieces (dest_mem, tree_to_uhwi (len),
3577 builtin_memset_read_str, &c, dest_align, true, 0);
3578 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3579 gen_int_mode (c, val_mode),
3580 dest_align, expected_align,
3581 expected_size, min_size, max_size,
3582 probable_max_size))
3583 goto do_libcall;
3584
3585 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3586 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3587 return dest_mem;
3588 }
3589
3590 set_mem_align (dest_mem, dest_align);
3591 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3592 CALL_EXPR_TAILCALL (orig_exp)
3593 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3594 expected_align, expected_size,
3595 min_size, max_size,
3596 probable_max_size);
3597
3598 if (dest_addr == 0)
3599 {
3600 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3601 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3602 }
3603
3604 return dest_addr;
3605
3606 do_libcall:
3607 fndecl = get_callee_fndecl (orig_exp);
3608 fcode = DECL_FUNCTION_CODE (fndecl);
3609 if (fcode == BUILT_IN_MEMSET
3610 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3611 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3612 dest, val, len);
3613 else if (fcode == BUILT_IN_BZERO)
3614 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3615 dest, len);
3616 else
3617 gcc_unreachable ();
3618 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3619 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3620 return expand_call (fn, target, target == const0_rtx);
3621 }
3622
3623 /* Expand expression EXP, which is a call to the bzero builtin. Return
3624 NULL_RTX if we failed the caller should emit a normal call. */
3625
3626 static rtx
3627 expand_builtin_bzero (tree exp)
3628 {
3629 tree dest, size;
3630 location_t loc = EXPR_LOCATION (exp);
3631
3632 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3633 return NULL_RTX;
3634
3635 dest = CALL_EXPR_ARG (exp, 0);
3636 size = CALL_EXPR_ARG (exp, 1);
3637
3638 /* New argument list transforming bzero(ptr x, int y) to
3639 memset(ptr x, int 0, size_t y). This is done this way
3640 so that if it isn't expanded inline, we fallback to
3641 calling bzero instead of memset. */
3642
3643 return expand_builtin_memset_args (dest, integer_zero_node,
3644 fold_convert_loc (loc,
3645 size_type_node, size),
3646 const0_rtx, VOIDmode, exp);
3647 }
3648
3649 /* Try to expand cmpstr operation ICODE with the given operands.
3650 Return the result rtx on success, otherwise return null. */
3651
3652 static rtx
3653 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3654 HOST_WIDE_INT align)
3655 {
3656 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3657
3658 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3659 target = NULL_RTX;
3660
3661 struct expand_operand ops[4];
3662 create_output_operand (&ops[0], target, insn_mode);
3663 create_fixed_operand (&ops[1], arg1_rtx);
3664 create_fixed_operand (&ops[2], arg2_rtx);
3665 create_integer_operand (&ops[3], align);
3666 if (maybe_expand_insn (icode, 4, ops))
3667 return ops[0].value;
3668 return NULL_RTX;
3669 }
3670
3671 /* Expand expression EXP, which is a call to the memcmp built-in function.
3672 Return NULL_RTX if we failed and the caller should emit a normal call,
3673 otherwise try to get the result in TARGET, if convenient.
3674 RESULT_EQ is true if we can relax the returned value to be either zero
3675 or nonzero, without caring about the sign. */
3676
3677 static rtx
3678 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3679 {
3680 if (!validate_arglist (exp,
3681 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3682 return NULL_RTX;
3683
3684 tree arg1 = CALL_EXPR_ARG (exp, 0);
3685 tree arg2 = CALL_EXPR_ARG (exp, 1);
3686 tree len = CALL_EXPR_ARG (exp, 2);
3687 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3688 location_t loc = EXPR_LOCATION (exp);
3689
3690 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3691 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3692
3693 /* If we don't have POINTER_TYPE, call the function. */
3694 if (arg1_align == 0 || arg2_align == 0)
3695 return NULL_RTX;
3696
3697 rtx arg1_rtx = get_memory_rtx (arg1, len);
3698 rtx arg2_rtx = get_memory_rtx (arg2, len);
3699 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3700
3701 /* Set MEM_SIZE as appropriate. */
3702 if (CONST_INT_P (len_rtx))
3703 {
3704 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3705 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3706 }
3707
3708 by_pieces_constfn constfn = NULL;
3709
3710 const char *src_str = c_getstr (arg1);
3711 if (src_str == NULL)
3712 src_str = c_getstr (arg2);
3713 else
3714 std::swap (arg1_rtx, arg2_rtx);
3715
3716 /* If SRC is a string constant and block move would be done
3717 by pieces, we can avoid loading the string from memory
3718 and only stored the computed constants. */
3719 if (src_str
3720 && CONST_INT_P (len_rtx)
3721 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3722 constfn = builtin_memcpy_read_str;
3723
3724 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3725 TREE_TYPE (len), target,
3726 result_eq, constfn,
3727 CONST_CAST (char *, src_str));
3728
3729 if (result)
3730 {
3731 /* Return the value in the proper mode for this function. */
3732 if (GET_MODE (result) == mode)
3733 return result;
3734
3735 if (target != 0)
3736 {
3737 convert_move (target, result, 0);
3738 return target;
3739 }
3740
3741 return convert_to_mode (mode, result, 0);
3742 }
3743
3744 return NULL_RTX;
3745 }
3746
3747 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3748 if we failed the caller should emit a normal call, otherwise try to get
3749 the result in TARGET, if convenient. */
3750
3751 static rtx
3752 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3753 {
3754 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3755 return NULL_RTX;
3756
3757 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3758 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3759 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3760 {
3761 rtx arg1_rtx, arg2_rtx;
3762 tree fndecl, fn;
3763 tree arg1 = CALL_EXPR_ARG (exp, 0);
3764 tree arg2 = CALL_EXPR_ARG (exp, 1);
3765 rtx result = NULL_RTX;
3766
3767 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3768 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3769
3770 /* If we don't have POINTER_TYPE, call the function. */
3771 if (arg1_align == 0 || arg2_align == 0)
3772 return NULL_RTX;
3773
3774 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3775 arg1 = builtin_save_expr (arg1);
3776 arg2 = builtin_save_expr (arg2);
3777
3778 arg1_rtx = get_memory_rtx (arg1, NULL);
3779 arg2_rtx = get_memory_rtx (arg2, NULL);
3780
3781 /* Try to call cmpstrsi. */
3782 if (cmpstr_icode != CODE_FOR_nothing)
3783 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3784 MIN (arg1_align, arg2_align));
3785
3786 /* Try to determine at least one length and call cmpstrnsi. */
3787 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3788 {
3789 tree len;
3790 rtx arg3_rtx;
3791
3792 tree len1 = c_strlen (arg1, 1);
3793 tree len2 = c_strlen (arg2, 1);
3794
3795 if (len1)
3796 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3797 if (len2)
3798 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3799
3800 /* If we don't have a constant length for the first, use the length
3801 of the second, if we know it. We don't require a constant for
3802 this case; some cost analysis could be done if both are available
3803 but neither is constant. For now, assume they're equally cheap,
3804 unless one has side effects. If both strings have constant lengths,
3805 use the smaller. */
3806
3807 if (!len1)
3808 len = len2;
3809 else if (!len2)
3810 len = len1;
3811 else if (TREE_SIDE_EFFECTS (len1))
3812 len = len2;
3813 else if (TREE_SIDE_EFFECTS (len2))
3814 len = len1;
3815 else if (TREE_CODE (len1) != INTEGER_CST)
3816 len = len2;
3817 else if (TREE_CODE (len2) != INTEGER_CST)
3818 len = len1;
3819 else if (tree_int_cst_lt (len1, len2))
3820 len = len1;
3821 else
3822 len = len2;
3823
3824 /* If both arguments have side effects, we cannot optimize. */
3825 if (len && !TREE_SIDE_EFFECTS (len))
3826 {
3827 arg3_rtx = expand_normal (len);
3828 result = expand_cmpstrn_or_cmpmem
3829 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3830 arg3_rtx, MIN (arg1_align, arg2_align));
3831 }
3832 }
3833
3834 if (result)
3835 {
3836 /* Return the value in the proper mode for this function. */
3837 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3838 if (GET_MODE (result) == mode)
3839 return result;
3840 if (target == 0)
3841 return convert_to_mode (mode, result, 0);
3842 convert_move (target, result, 0);
3843 return target;
3844 }
3845
3846 /* Expand the library call ourselves using a stabilized argument
3847 list to avoid re-evaluating the function's arguments twice. */
3848 fndecl = get_callee_fndecl (exp);
3849 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3850 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3851 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3852 return expand_call (fn, target, target == const0_rtx);
3853 }
3854 return NULL_RTX;
3855 }
3856
3857 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3858 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3859 the result in TARGET, if convenient. */
3860
3861 static rtx
3862 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3863 ATTRIBUTE_UNUSED machine_mode mode)
3864 {
3865 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3866
3867 if (!validate_arglist (exp,
3868 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3869 return NULL_RTX;
3870
3871 /* If c_strlen can determine an expression for one of the string
3872 lengths, and it doesn't have side effects, then emit cmpstrnsi
3873 using length MIN(strlen(string)+1, arg3). */
3874 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3875 if (cmpstrn_icode != CODE_FOR_nothing)
3876 {
3877 tree len, len1, len2;
3878 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3879 rtx result;
3880 tree fndecl, fn;
3881 tree arg1 = CALL_EXPR_ARG (exp, 0);
3882 tree arg2 = CALL_EXPR_ARG (exp, 1);
3883 tree arg3 = CALL_EXPR_ARG (exp, 2);
3884
3885 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3886 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3887
3888 len1 = c_strlen (arg1, 1);
3889 len2 = c_strlen (arg2, 1);
3890
3891 if (len1)
3892 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3893 if (len2)
3894 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3895
3896 /* If we don't have a constant length for the first, use the length
3897 of the second, if we know it. We don't require a constant for
3898 this case; some cost analysis could be done if both are available
3899 but neither is constant. For now, assume they're equally cheap,
3900 unless one has side effects. If both strings have constant lengths,
3901 use the smaller. */
3902
3903 if (!len1)
3904 len = len2;
3905 else if (!len2)
3906 len = len1;
3907 else if (TREE_SIDE_EFFECTS (len1))
3908 len = len2;
3909 else if (TREE_SIDE_EFFECTS (len2))
3910 len = len1;
3911 else if (TREE_CODE (len1) != INTEGER_CST)
3912 len = len2;
3913 else if (TREE_CODE (len2) != INTEGER_CST)
3914 len = len1;
3915 else if (tree_int_cst_lt (len1, len2))
3916 len = len1;
3917 else
3918 len = len2;
3919
3920 /* If both arguments have side effects, we cannot optimize. */
3921 if (!len || TREE_SIDE_EFFECTS (len))
3922 return NULL_RTX;
3923
3924 /* The actual new length parameter is MIN(len,arg3). */
3925 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3926 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3927
3928 /* If we don't have POINTER_TYPE, call the function. */
3929 if (arg1_align == 0 || arg2_align == 0)
3930 return NULL_RTX;
3931
3932 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3933 arg1 = builtin_save_expr (arg1);
3934 arg2 = builtin_save_expr (arg2);
3935 len = builtin_save_expr (len);
3936
3937 arg1_rtx = get_memory_rtx (arg1, len);
3938 arg2_rtx = get_memory_rtx (arg2, len);
3939 arg3_rtx = expand_normal (len);
3940 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3941 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3942 MIN (arg1_align, arg2_align));
3943 if (result)
3944 {
3945 /* Return the value in the proper mode for this function. */
3946 mode = TYPE_MODE (TREE_TYPE (exp));
3947 if (GET_MODE (result) == mode)
3948 return result;
3949 if (target == 0)
3950 return convert_to_mode (mode, result, 0);
3951 convert_move (target, result, 0);
3952 return target;
3953 }
3954
3955 /* Expand the library call ourselves using a stabilized argument
3956 list to avoid re-evaluating the function's arguments twice. */
3957 fndecl = get_callee_fndecl (exp);
3958 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3959 arg1, arg2, len);
3960 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3961 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3962 return expand_call (fn, target, target == const0_rtx);
3963 }
3964 return NULL_RTX;
3965 }
3966
3967 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3968 if that's convenient. */
3969
3970 rtx
3971 expand_builtin_saveregs (void)
3972 {
3973 rtx val;
3974 rtx_insn *seq;
3975
3976 /* Don't do __builtin_saveregs more than once in a function.
3977 Save the result of the first call and reuse it. */
3978 if (saveregs_value != 0)
3979 return saveregs_value;
3980
3981 /* When this function is called, it means that registers must be
3982 saved on entry to this function. So we migrate the call to the
3983 first insn of this function. */
3984
3985 start_sequence ();
3986
3987 /* Do whatever the machine needs done in this case. */
3988 val = targetm.calls.expand_builtin_saveregs ();
3989
3990 seq = get_insns ();
3991 end_sequence ();
3992
3993 saveregs_value = val;
3994
3995 /* Put the insns after the NOTE that starts the function. If this
3996 is inside a start_sequence, make the outer-level insn chain current, so
3997 the code is placed at the start of the function. */
3998 push_topmost_sequence ();
3999 emit_insn_after (seq, entry_of_function ());
4000 pop_topmost_sequence ();
4001
4002 return val;
4003 }
4004
4005 /* Expand a call to __builtin_next_arg. */
4006
4007 static rtx
4008 expand_builtin_next_arg (void)
4009 {
4010 /* Checking arguments is already done in fold_builtin_next_arg
4011 that must be called before this function. */
4012 return expand_binop (ptr_mode, add_optab,
4013 crtl->args.internal_arg_pointer,
4014 crtl->args.arg_offset_rtx,
4015 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4016 }
4017
4018 /* Make it easier for the backends by protecting the valist argument
4019 from multiple evaluations. */
4020
4021 static tree
4022 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4023 {
4024 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4025
4026 /* The current way of determining the type of valist is completely
4027 bogus. We should have the information on the va builtin instead. */
4028 if (!vatype)
4029 vatype = targetm.fn_abi_va_list (cfun->decl);
4030
4031 if (TREE_CODE (vatype) == ARRAY_TYPE)
4032 {
4033 if (TREE_SIDE_EFFECTS (valist))
4034 valist = save_expr (valist);
4035
4036 /* For this case, the backends will be expecting a pointer to
4037 vatype, but it's possible we've actually been given an array
4038 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4039 So fix it. */
4040 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4041 {
4042 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4043 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4044 }
4045 }
4046 else
4047 {
4048 tree pt = build_pointer_type (vatype);
4049
4050 if (! needs_lvalue)
4051 {
4052 if (! TREE_SIDE_EFFECTS (valist))
4053 return valist;
4054
4055 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4056 TREE_SIDE_EFFECTS (valist) = 1;
4057 }
4058
4059 if (TREE_SIDE_EFFECTS (valist))
4060 valist = save_expr (valist);
4061 valist = fold_build2_loc (loc, MEM_REF,
4062 vatype, valist, build_int_cst (pt, 0));
4063 }
4064
4065 return valist;
4066 }
4067
4068 /* The "standard" definition of va_list is void*. */
4069
4070 tree
4071 std_build_builtin_va_list (void)
4072 {
4073 return ptr_type_node;
4074 }
4075
4076 /* The "standard" abi va_list is va_list_type_node. */
4077
4078 tree
4079 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4080 {
4081 return va_list_type_node;
4082 }
4083
4084 /* The "standard" type of va_list is va_list_type_node. */
4085
4086 tree
4087 std_canonical_va_list_type (tree type)
4088 {
4089 tree wtype, htype;
4090
4091 wtype = va_list_type_node;
4092 htype = type;
4093 /* Treat structure va_list types. */
4094 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4095 htype = TREE_TYPE (htype);
4096 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4097 {
4098 /* If va_list is an array type, the argument may have decayed
4099 to a pointer type, e.g. by being passed to another function.
4100 In that case, unwrap both types so that we can compare the
4101 underlying records. */
4102 if (TREE_CODE (htype) == ARRAY_TYPE
4103 || POINTER_TYPE_P (htype))
4104 {
4105 wtype = TREE_TYPE (wtype);
4106 htype = TREE_TYPE (htype);
4107 }
4108 }
4109 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4110 return va_list_type_node;
4111
4112 return NULL_TREE;
4113 }
4114
4115 /* The "standard" implementation of va_start: just assign `nextarg' to
4116 the variable. */
4117
4118 void
4119 std_expand_builtin_va_start (tree valist, rtx nextarg)
4120 {
4121 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4122 convert_move (va_r, nextarg, 0);
4123
4124 /* We do not have any valid bounds for the pointer, so
4125 just store zero bounds for it. */
4126 if (chkp_function_instrumented_p (current_function_decl))
4127 chkp_expand_bounds_reset_for_mem (valist,
4128 make_tree (TREE_TYPE (valist),
4129 nextarg));
4130 }
4131
4132 /* Expand EXP, a call to __builtin_va_start. */
4133
4134 static rtx
4135 expand_builtin_va_start (tree exp)
4136 {
4137 rtx nextarg;
4138 tree valist;
4139 location_t loc = EXPR_LOCATION (exp);
4140
4141 if (call_expr_nargs (exp) < 2)
4142 {
4143 error_at (loc, "too few arguments to function %<va_start%>");
4144 return const0_rtx;
4145 }
4146
4147 if (fold_builtin_next_arg (exp, true))
4148 return const0_rtx;
4149
4150 nextarg = expand_builtin_next_arg ();
4151 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4152
4153 if (targetm.expand_builtin_va_start)
4154 targetm.expand_builtin_va_start (valist, nextarg);
4155 else
4156 std_expand_builtin_va_start (valist, nextarg);
4157
4158 return const0_rtx;
4159 }
4160
4161 /* Expand EXP, a call to __builtin_va_end. */
4162
4163 static rtx
4164 expand_builtin_va_end (tree exp)
4165 {
4166 tree valist = CALL_EXPR_ARG (exp, 0);
4167
4168 /* Evaluate for side effects, if needed. I hate macros that don't
4169 do that. */
4170 if (TREE_SIDE_EFFECTS (valist))
4171 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4172
4173 return const0_rtx;
4174 }
4175
4176 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4177 builtin rather than just as an assignment in stdarg.h because of the
4178 nastiness of array-type va_list types. */
4179
4180 static rtx
4181 expand_builtin_va_copy (tree exp)
4182 {
4183 tree dst, src, t;
4184 location_t loc = EXPR_LOCATION (exp);
4185
4186 dst = CALL_EXPR_ARG (exp, 0);
4187 src = CALL_EXPR_ARG (exp, 1);
4188
4189 dst = stabilize_va_list_loc (loc, dst, 1);
4190 src = stabilize_va_list_loc (loc, src, 0);
4191
4192 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4193
4194 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4195 {
4196 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4197 TREE_SIDE_EFFECTS (t) = 1;
4198 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4199 }
4200 else
4201 {
4202 rtx dstb, srcb, size;
4203
4204 /* Evaluate to pointers. */
4205 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4206 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4207 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4208 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4209
4210 dstb = convert_memory_address (Pmode, dstb);
4211 srcb = convert_memory_address (Pmode, srcb);
4212
4213 /* "Dereference" to BLKmode memories. */
4214 dstb = gen_rtx_MEM (BLKmode, dstb);
4215 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4216 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4217 srcb = gen_rtx_MEM (BLKmode, srcb);
4218 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4219 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4220
4221 /* Copy. */
4222 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4223 }
4224
4225 return const0_rtx;
4226 }
4227
4228 /* Expand a call to one of the builtin functions __builtin_frame_address or
4229 __builtin_return_address. */
4230
4231 static rtx
4232 expand_builtin_frame_address (tree fndecl, tree exp)
4233 {
4234 /* The argument must be a nonnegative integer constant.
4235 It counts the number of frames to scan up the stack.
4236 The value is either the frame pointer value or the return
4237 address saved in that frame. */
4238 if (call_expr_nargs (exp) == 0)
4239 /* Warning about missing arg was already issued. */
4240 return const0_rtx;
4241 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4242 {
4243 error ("invalid argument to %qD", fndecl);
4244 return const0_rtx;
4245 }
4246 else
4247 {
4248 /* Number of frames to scan up the stack. */
4249 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4250
4251 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4252
4253 /* Some ports cannot access arbitrary stack frames. */
4254 if (tem == NULL)
4255 {
4256 warning (0, "unsupported argument to %qD", fndecl);
4257 return const0_rtx;
4258 }
4259
4260 if (count)
4261 {
4262 /* Warn since no effort is made to ensure that any frame
4263 beyond the current one exists or can be safely reached. */
4264 warning (OPT_Wframe_address, "calling %qD with "
4265 "a nonzero argument is unsafe", fndecl);
4266 }
4267
4268 /* For __builtin_frame_address, return what we've got. */
4269 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4270 return tem;
4271
4272 if (!REG_P (tem)
4273 && ! CONSTANT_P (tem))
4274 tem = copy_addr_to_reg (tem);
4275 return tem;
4276 }
4277 }
4278
4279 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4280 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4281 is the same as for allocate_dynamic_stack_space. */
4282
4283 static rtx
4284 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4285 {
4286 rtx op0;
4287 rtx result;
4288 bool valid_arglist;
4289 unsigned int align;
4290 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4291 == BUILT_IN_ALLOCA_WITH_ALIGN);
4292
4293 valid_arglist
4294 = (alloca_with_align
4295 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4296 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4297
4298 if (!valid_arglist)
4299 return NULL_RTX;
4300
4301 /* Compute the argument. */
4302 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4303
4304 /* Compute the alignment. */
4305 align = (alloca_with_align
4306 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4307 : BIGGEST_ALIGNMENT);
4308
4309 /* Allocate the desired space. */
4310 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4311 result = convert_memory_address (ptr_mode, result);
4312
4313 return result;
4314 }
4315
4316 /* Expand a call to bswap builtin in EXP.
4317 Return NULL_RTX if a normal call should be emitted rather than expanding the
4318 function in-line. If convenient, the result should be placed in TARGET.
4319 SUBTARGET may be used as the target for computing one of EXP's operands. */
4320
4321 static rtx
4322 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4323 rtx subtarget)
4324 {
4325 tree arg;
4326 rtx op0;
4327
4328 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4329 return NULL_RTX;
4330
4331 arg = CALL_EXPR_ARG (exp, 0);
4332 op0 = expand_expr (arg,
4333 subtarget && GET_MODE (subtarget) == target_mode
4334 ? subtarget : NULL_RTX,
4335 target_mode, EXPAND_NORMAL);
4336 if (GET_MODE (op0) != target_mode)
4337 op0 = convert_to_mode (target_mode, op0, 1);
4338
4339 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4340
4341 gcc_assert (target);
4342
4343 return convert_to_mode (target_mode, target, 1);
4344 }
4345
4346 /* Expand a call to a unary builtin in EXP.
4347 Return NULL_RTX if a normal call should be emitted rather than expanding the
4348 function in-line. If convenient, the result should be placed in TARGET.
4349 SUBTARGET may be used as the target for computing one of EXP's operands. */
4350
4351 static rtx
4352 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4353 rtx subtarget, optab op_optab)
4354 {
4355 rtx op0;
4356
4357 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4358 return NULL_RTX;
4359
4360 /* Compute the argument. */
4361 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4362 (subtarget
4363 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4364 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4365 VOIDmode, EXPAND_NORMAL);
4366 /* Compute op, into TARGET if possible.
4367 Set TARGET to wherever the result comes back. */
4368 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4369 op_optab, op0, target, op_optab != clrsb_optab);
4370 gcc_assert (target);
4371
4372 return convert_to_mode (target_mode, target, 0);
4373 }
4374
4375 /* Expand a call to __builtin_expect. We just return our argument
4376 as the builtin_expect semantic should've been already executed by
4377 tree branch prediction pass. */
4378
4379 static rtx
4380 expand_builtin_expect (tree exp, rtx target)
4381 {
4382 tree arg;
4383
4384 if (call_expr_nargs (exp) < 2)
4385 return const0_rtx;
4386 arg = CALL_EXPR_ARG (exp, 0);
4387
4388 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4389 /* When guessing was done, the hints should be already stripped away. */
4390 gcc_assert (!flag_guess_branch_prob
4391 || optimize == 0 || seen_error ());
4392 return target;
4393 }
4394
4395 /* Expand a call to __builtin_assume_aligned. We just return our first
4396 argument as the builtin_assume_aligned semantic should've been already
4397 executed by CCP. */
4398
4399 static rtx
4400 expand_builtin_assume_aligned (tree exp, rtx target)
4401 {
4402 if (call_expr_nargs (exp) < 2)
4403 return const0_rtx;
4404 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4405 EXPAND_NORMAL);
4406 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4407 && (call_expr_nargs (exp) < 3
4408 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4409 return target;
4410 }
4411
4412 void
4413 expand_builtin_trap (void)
4414 {
4415 if (targetm.have_trap ())
4416 {
4417 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4418 /* For trap insns when not accumulating outgoing args force
4419 REG_ARGS_SIZE note to prevent crossjumping of calls with
4420 different args sizes. */
4421 if (!ACCUMULATE_OUTGOING_ARGS)
4422 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4423 }
4424 else
4425 {
4426 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4427 tree call_expr = build_call_expr (fn, 0);
4428 expand_call (call_expr, NULL_RTX, false);
4429 }
4430
4431 emit_barrier ();
4432 }
4433
4434 /* Expand a call to __builtin_unreachable. We do nothing except emit
4435 a barrier saying that control flow will not pass here.
4436
4437 It is the responsibility of the program being compiled to ensure
4438 that control flow does never reach __builtin_unreachable. */
4439 static void
4440 expand_builtin_unreachable (void)
4441 {
4442 emit_barrier ();
4443 }
4444
4445 /* Expand EXP, a call to fabs, fabsf or fabsl.
4446 Return NULL_RTX if a normal call should be emitted rather than expanding
4447 the function inline. If convenient, the result should be placed
4448 in TARGET. SUBTARGET may be used as the target for computing
4449 the operand. */
4450
4451 static rtx
4452 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4453 {
4454 machine_mode mode;
4455 tree arg;
4456 rtx op0;
4457
4458 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4459 return NULL_RTX;
4460
4461 arg = CALL_EXPR_ARG (exp, 0);
4462 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4463 mode = TYPE_MODE (TREE_TYPE (arg));
4464 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4465 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4466 }
4467
4468 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4469 Return NULL is a normal call should be emitted rather than expanding the
4470 function inline. If convenient, the result should be placed in TARGET.
4471 SUBTARGET may be used as the target for computing the operand. */
4472
4473 static rtx
4474 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4475 {
4476 rtx op0, op1;
4477 tree arg;
4478
4479 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4480 return NULL_RTX;
4481
4482 arg = CALL_EXPR_ARG (exp, 0);
4483 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4484
4485 arg = CALL_EXPR_ARG (exp, 1);
4486 op1 = expand_normal (arg);
4487
4488 return expand_copysign (op0, op1, target);
4489 }
4490
4491 /* Expand a call to __builtin___clear_cache. */
4492
4493 static rtx
4494 expand_builtin___clear_cache (tree exp)
4495 {
4496 if (!targetm.code_for_clear_cache)
4497 {
4498 #ifdef CLEAR_INSN_CACHE
4499 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4500 does something. Just do the default expansion to a call to
4501 __clear_cache(). */
4502 return NULL_RTX;
4503 #else
4504 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4505 does nothing. There is no need to call it. Do nothing. */
4506 return const0_rtx;
4507 #endif /* CLEAR_INSN_CACHE */
4508 }
4509
4510 /* We have a "clear_cache" insn, and it will handle everything. */
4511 tree begin, end;
4512 rtx begin_rtx, end_rtx;
4513
4514 /* We must not expand to a library call. If we did, any
4515 fallback library function in libgcc that might contain a call to
4516 __builtin___clear_cache() would recurse infinitely. */
4517 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4518 {
4519 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4520 return const0_rtx;
4521 }
4522
4523 if (targetm.have_clear_cache ())
4524 {
4525 struct expand_operand ops[2];
4526
4527 begin = CALL_EXPR_ARG (exp, 0);
4528 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4529
4530 end = CALL_EXPR_ARG (exp, 1);
4531 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4532
4533 create_address_operand (&ops[0], begin_rtx);
4534 create_address_operand (&ops[1], end_rtx);
4535 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4536 return const0_rtx;
4537 }
4538 return const0_rtx;
4539 }
4540
4541 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4542
4543 static rtx
4544 round_trampoline_addr (rtx tramp)
4545 {
4546 rtx temp, addend, mask;
4547
4548 /* If we don't need too much alignment, we'll have been guaranteed
4549 proper alignment by get_trampoline_type. */
4550 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4551 return tramp;
4552
4553 /* Round address up to desired boundary. */
4554 temp = gen_reg_rtx (Pmode);
4555 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4556 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4557
4558 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4559 temp, 0, OPTAB_LIB_WIDEN);
4560 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4561 temp, 0, OPTAB_LIB_WIDEN);
4562
4563 return tramp;
4564 }
4565
4566 static rtx
4567 expand_builtin_init_trampoline (tree exp, bool onstack)
4568 {
4569 tree t_tramp, t_func, t_chain;
4570 rtx m_tramp, r_tramp, r_chain, tmp;
4571
4572 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4573 POINTER_TYPE, VOID_TYPE))
4574 return NULL_RTX;
4575
4576 t_tramp = CALL_EXPR_ARG (exp, 0);
4577 t_func = CALL_EXPR_ARG (exp, 1);
4578 t_chain = CALL_EXPR_ARG (exp, 2);
4579
4580 r_tramp = expand_normal (t_tramp);
4581 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4582 MEM_NOTRAP_P (m_tramp) = 1;
4583
4584 /* If ONSTACK, the TRAMP argument should be the address of a field
4585 within the local function's FRAME decl. Either way, let's see if
4586 we can fill in the MEM_ATTRs for this memory. */
4587 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4588 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4589
4590 /* Creator of a heap trampoline is responsible for making sure the
4591 address is aligned to at least STACK_BOUNDARY. Normally malloc
4592 will ensure this anyhow. */
4593 tmp = round_trampoline_addr (r_tramp);
4594 if (tmp != r_tramp)
4595 {
4596 m_tramp = change_address (m_tramp, BLKmode, tmp);
4597 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4598 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4599 }
4600
4601 /* The FUNC argument should be the address of the nested function.
4602 Extract the actual function decl to pass to the hook. */
4603 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4604 t_func = TREE_OPERAND (t_func, 0);
4605 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4606
4607 r_chain = expand_normal (t_chain);
4608
4609 /* Generate insns to initialize the trampoline. */
4610 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4611
4612 if (onstack)
4613 {
4614 trampolines_created = 1;
4615
4616 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4617 "trampoline generated for nested function %qD", t_func);
4618 }
4619
4620 return const0_rtx;
4621 }
4622
4623 static rtx
4624 expand_builtin_adjust_trampoline (tree exp)
4625 {
4626 rtx tramp;
4627
4628 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4629 return NULL_RTX;
4630
4631 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4632 tramp = round_trampoline_addr (tramp);
4633 if (targetm.calls.trampoline_adjust_address)
4634 tramp = targetm.calls.trampoline_adjust_address (tramp);
4635
4636 return tramp;
4637 }
4638
4639 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4640 function. The function first checks whether the back end provides
4641 an insn to implement signbit for the respective mode. If not, it
4642 checks whether the floating point format of the value is such that
4643 the sign bit can be extracted. If that is not the case, error out.
4644 EXP is the expression that is a call to the builtin function; if
4645 convenient, the result should be placed in TARGET. */
4646 static rtx
4647 expand_builtin_signbit (tree exp, rtx target)
4648 {
4649 const struct real_format *fmt;
4650 machine_mode fmode, imode, rmode;
4651 tree arg;
4652 int word, bitpos;
4653 enum insn_code icode;
4654 rtx temp;
4655 location_t loc = EXPR_LOCATION (exp);
4656
4657 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4658 return NULL_RTX;
4659
4660 arg = CALL_EXPR_ARG (exp, 0);
4661 fmode = TYPE_MODE (TREE_TYPE (arg));
4662 rmode = TYPE_MODE (TREE_TYPE (exp));
4663 fmt = REAL_MODE_FORMAT (fmode);
4664
4665 arg = builtin_save_expr (arg);
4666
4667 /* Expand the argument yielding a RTX expression. */
4668 temp = expand_normal (arg);
4669
4670 /* Check if the back end provides an insn that handles signbit for the
4671 argument's mode. */
4672 icode = optab_handler (signbit_optab, fmode);
4673 if (icode != CODE_FOR_nothing)
4674 {
4675 rtx_insn *last = get_last_insn ();
4676 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4677 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4678 return target;
4679 delete_insns_since (last);
4680 }
4681
4682 /* For floating point formats without a sign bit, implement signbit
4683 as "ARG < 0.0". */
4684 bitpos = fmt->signbit_ro;
4685 if (bitpos < 0)
4686 {
4687 /* But we can't do this if the format supports signed zero. */
4688 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4689
4690 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4691 build_real (TREE_TYPE (arg), dconst0));
4692 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4693 }
4694
4695 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4696 {
4697 imode = int_mode_for_mode (fmode);
4698 gcc_assert (imode != BLKmode);
4699 temp = gen_lowpart (imode, temp);
4700 }
4701 else
4702 {
4703 imode = word_mode;
4704 /* Handle targets with different FP word orders. */
4705 if (FLOAT_WORDS_BIG_ENDIAN)
4706 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4707 else
4708 word = bitpos / BITS_PER_WORD;
4709 temp = operand_subword_force (temp, word, fmode);
4710 bitpos = bitpos % BITS_PER_WORD;
4711 }
4712
4713 /* Force the intermediate word_mode (or narrower) result into a
4714 register. This avoids attempting to create paradoxical SUBREGs
4715 of floating point modes below. */
4716 temp = force_reg (imode, temp);
4717
4718 /* If the bitpos is within the "result mode" lowpart, the operation
4719 can be implement with a single bitwise AND. Otherwise, we need
4720 a right shift and an AND. */
4721
4722 if (bitpos < GET_MODE_BITSIZE (rmode))
4723 {
4724 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4725
4726 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4727 temp = gen_lowpart (rmode, temp);
4728 temp = expand_binop (rmode, and_optab, temp,
4729 immed_wide_int_const (mask, rmode),
4730 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4731 }
4732 else
4733 {
4734 /* Perform a logical right shift to place the signbit in the least
4735 significant bit, then truncate the result to the desired mode
4736 and mask just this bit. */
4737 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4738 temp = gen_lowpart (rmode, temp);
4739 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4740 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4741 }
4742
4743 return temp;
4744 }
4745
4746 /* Expand fork or exec calls. TARGET is the desired target of the
4747 call. EXP is the call. FN is the
4748 identificator of the actual function. IGNORE is nonzero if the
4749 value is to be ignored. */
4750
4751 static rtx
4752 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4753 {
4754 tree id, decl;
4755 tree call;
4756
4757 /* If we are not profiling, just call the function. */
4758 if (!profile_arc_flag)
4759 return NULL_RTX;
4760
4761 /* Otherwise call the wrapper. This should be equivalent for the rest of
4762 compiler, so the code does not diverge, and the wrapper may run the
4763 code necessary for keeping the profiling sane. */
4764
4765 switch (DECL_FUNCTION_CODE (fn))
4766 {
4767 case BUILT_IN_FORK:
4768 id = get_identifier ("__gcov_fork");
4769 break;
4770
4771 case BUILT_IN_EXECL:
4772 id = get_identifier ("__gcov_execl");
4773 break;
4774
4775 case BUILT_IN_EXECV:
4776 id = get_identifier ("__gcov_execv");
4777 break;
4778
4779 case BUILT_IN_EXECLP:
4780 id = get_identifier ("__gcov_execlp");
4781 break;
4782
4783 case BUILT_IN_EXECLE:
4784 id = get_identifier ("__gcov_execle");
4785 break;
4786
4787 case BUILT_IN_EXECVP:
4788 id = get_identifier ("__gcov_execvp");
4789 break;
4790
4791 case BUILT_IN_EXECVE:
4792 id = get_identifier ("__gcov_execve");
4793 break;
4794
4795 default:
4796 gcc_unreachable ();
4797 }
4798
4799 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4800 FUNCTION_DECL, id, TREE_TYPE (fn));
4801 DECL_EXTERNAL (decl) = 1;
4802 TREE_PUBLIC (decl) = 1;
4803 DECL_ARTIFICIAL (decl) = 1;
4804 TREE_NOTHROW (decl) = 1;
4805 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4806 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4807 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4808 return expand_call (call, target, ignore);
4809 }
4810
4811
4812 \f
4813 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4814 the pointer in these functions is void*, the tree optimizers may remove
4815 casts. The mode computed in expand_builtin isn't reliable either, due
4816 to __sync_bool_compare_and_swap.
4817
4818 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4819 group of builtins. This gives us log2 of the mode size. */
4820
4821 static inline machine_mode
4822 get_builtin_sync_mode (int fcode_diff)
4823 {
4824 /* The size is not negotiable, so ask not to get BLKmode in return
4825 if the target indicates that a smaller size would be better. */
4826 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4827 }
4828
4829 /* Expand the memory expression LOC and return the appropriate memory operand
4830 for the builtin_sync operations. */
4831
4832 static rtx
4833 get_builtin_sync_mem (tree loc, machine_mode mode)
4834 {
4835 rtx addr, mem;
4836
4837 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4838 addr = convert_memory_address (Pmode, addr);
4839
4840 /* Note that we explicitly do not want any alias information for this
4841 memory, so that we kill all other live memories. Otherwise we don't
4842 satisfy the full barrier semantics of the intrinsic. */
4843 mem = validize_mem (gen_rtx_MEM (mode, addr));
4844
4845 /* The alignment needs to be at least according to that of the mode. */
4846 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4847 get_pointer_alignment (loc)));
4848 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4849 MEM_VOLATILE_P (mem) = 1;
4850
4851 return mem;
4852 }
4853
4854 /* Make sure an argument is in the right mode.
4855 EXP is the tree argument.
4856 MODE is the mode it should be in. */
4857
4858 static rtx
4859 expand_expr_force_mode (tree exp, machine_mode mode)
4860 {
4861 rtx val;
4862 machine_mode old_mode;
4863
4864 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4865 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4866 of CONST_INTs, where we know the old_mode only from the call argument. */
4867
4868 old_mode = GET_MODE (val);
4869 if (old_mode == VOIDmode)
4870 old_mode = TYPE_MODE (TREE_TYPE (exp));
4871 val = convert_modes (mode, old_mode, val, 1);
4872 return val;
4873 }
4874
4875
4876 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4877 EXP is the CALL_EXPR. CODE is the rtx code
4878 that corresponds to the arithmetic or logical operation from the name;
4879 an exception here is that NOT actually means NAND. TARGET is an optional
4880 place for us to store the results; AFTER is true if this is the
4881 fetch_and_xxx form. */
4882
4883 static rtx
4884 expand_builtin_sync_operation (machine_mode mode, tree exp,
4885 enum rtx_code code, bool after,
4886 rtx target)
4887 {
4888 rtx val, mem;
4889 location_t loc = EXPR_LOCATION (exp);
4890
4891 if (code == NOT && warn_sync_nand)
4892 {
4893 tree fndecl = get_callee_fndecl (exp);
4894 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4895
4896 static bool warned_f_a_n, warned_n_a_f;
4897
4898 switch (fcode)
4899 {
4900 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4901 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4902 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4903 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4904 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4905 if (warned_f_a_n)
4906 break;
4907
4908 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4909 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4910 warned_f_a_n = true;
4911 break;
4912
4913 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4914 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4915 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4916 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4917 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4918 if (warned_n_a_f)
4919 break;
4920
4921 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4922 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4923 warned_n_a_f = true;
4924 break;
4925
4926 default:
4927 gcc_unreachable ();
4928 }
4929 }
4930
4931 /* Expand the operands. */
4932 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4933 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4934
4935 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4936 after);
4937 }
4938
4939 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4940 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4941 true if this is the boolean form. TARGET is a place for us to store the
4942 results; this is NOT optional if IS_BOOL is true. */
4943
4944 static rtx
4945 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4946 bool is_bool, rtx target)
4947 {
4948 rtx old_val, new_val, mem;
4949 rtx *pbool, *poval;
4950
4951 /* Expand the operands. */
4952 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4953 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4954 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4955
4956 pbool = poval = NULL;
4957 if (target != const0_rtx)
4958 {
4959 if (is_bool)
4960 pbool = &target;
4961 else
4962 poval = &target;
4963 }
4964 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4965 false, MEMMODEL_SYNC_SEQ_CST,
4966 MEMMODEL_SYNC_SEQ_CST))
4967 return NULL_RTX;
4968
4969 return target;
4970 }
4971
4972 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4973 general form is actually an atomic exchange, and some targets only
4974 support a reduced form with the second argument being a constant 1.
4975 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4976 the results. */
4977
4978 static rtx
4979 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
4980 rtx target)
4981 {
4982 rtx val, mem;
4983
4984 /* Expand the operands. */
4985 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4986 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4987
4988 return expand_sync_lock_test_and_set (target, mem, val);
4989 }
4990
4991 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4992
4993 static void
4994 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
4995 {
4996 rtx mem;
4997
4998 /* Expand the operands. */
4999 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5000
5001 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5002 }
5003
5004 /* Given an integer representing an ``enum memmodel'', verify its
5005 correctness and return the memory model enum. */
5006
5007 static enum memmodel
5008 get_memmodel (tree exp)
5009 {
5010 rtx op;
5011 unsigned HOST_WIDE_INT val;
5012 source_location loc
5013 = expansion_point_location_if_in_system_header (input_location);
5014
5015 /* If the parameter is not a constant, it's a run time value so we'll just
5016 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5017 if (TREE_CODE (exp) != INTEGER_CST)
5018 return MEMMODEL_SEQ_CST;
5019
5020 op = expand_normal (exp);
5021
5022 val = INTVAL (op);
5023 if (targetm.memmodel_check)
5024 val = targetm.memmodel_check (val);
5025 else if (val & ~MEMMODEL_MASK)
5026 {
5027 warning_at (loc, OPT_Winvalid_memory_model,
5028 "unknown architecture specifier in memory model to builtin");
5029 return MEMMODEL_SEQ_CST;
5030 }
5031
5032 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5033 if (memmodel_base (val) >= MEMMODEL_LAST)
5034 {
5035 warning_at (loc, OPT_Winvalid_memory_model,
5036 "invalid memory model argument to builtin");
5037 return MEMMODEL_SEQ_CST;
5038 }
5039
5040 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5041 be conservative and promote consume to acquire. */
5042 if (val == MEMMODEL_CONSUME)
5043 val = MEMMODEL_ACQUIRE;
5044
5045 return (enum memmodel) val;
5046 }
5047
5048 /* Expand the __atomic_exchange intrinsic:
5049 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5050 EXP is the CALL_EXPR.
5051 TARGET is an optional place for us to store the results. */
5052
5053 static rtx
5054 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5055 {
5056 rtx val, mem;
5057 enum memmodel model;
5058
5059 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5060
5061 if (!flag_inline_atomics)
5062 return NULL_RTX;
5063
5064 /* Expand the operands. */
5065 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5066 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5067
5068 return expand_atomic_exchange (target, mem, val, model);
5069 }
5070
5071 /* Expand the __atomic_compare_exchange intrinsic:
5072 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5073 TYPE desired, BOOL weak,
5074 enum memmodel success,
5075 enum memmodel failure)
5076 EXP is the CALL_EXPR.
5077 TARGET is an optional place for us to store the results. */
5078
5079 static rtx
5080 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5081 rtx target)
5082 {
5083 rtx expect, desired, mem, oldval;
5084 rtx_code_label *label;
5085 enum memmodel success, failure;
5086 tree weak;
5087 bool is_weak;
5088 source_location loc
5089 = expansion_point_location_if_in_system_header (input_location);
5090
5091 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5092 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5093
5094 if (failure > success)
5095 {
5096 warning_at (loc, OPT_Winvalid_memory_model,
5097 "failure memory model cannot be stronger than success "
5098 "memory model for %<__atomic_compare_exchange%>");
5099 success = MEMMODEL_SEQ_CST;
5100 }
5101
5102 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5103 {
5104 warning_at (loc, OPT_Winvalid_memory_model,
5105 "invalid failure memory model for "
5106 "%<__atomic_compare_exchange%>");
5107 failure = MEMMODEL_SEQ_CST;
5108 success = MEMMODEL_SEQ_CST;
5109 }
5110
5111
5112 if (!flag_inline_atomics)
5113 return NULL_RTX;
5114
5115 /* Expand the operands. */
5116 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5117
5118 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5119 expect = convert_memory_address (Pmode, expect);
5120 expect = gen_rtx_MEM (mode, expect);
5121 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5122
5123 weak = CALL_EXPR_ARG (exp, 3);
5124 is_weak = false;
5125 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5126 is_weak = true;
5127
5128 if (target == const0_rtx)
5129 target = NULL;
5130
5131 /* Lest the rtl backend create a race condition with an imporoper store
5132 to memory, always create a new pseudo for OLDVAL. */
5133 oldval = NULL;
5134
5135 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5136 is_weak, success, failure))
5137 return NULL_RTX;
5138
5139 /* Conditionally store back to EXPECT, lest we create a race condition
5140 with an improper store to memory. */
5141 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5142 the normal case where EXPECT is totally private, i.e. a register. At
5143 which point the store can be unconditional. */
5144 label = gen_label_rtx ();
5145 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5146 GET_MODE (target), 1, label);
5147 emit_move_insn (expect, oldval);
5148 emit_label (label);
5149
5150 return target;
5151 }
5152
5153 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5154 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5155 call. The weak parameter must be dropped to match the expected parameter
5156 list and the expected argument changed from value to pointer to memory
5157 slot. */
5158
5159 static void
5160 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5161 {
5162 unsigned int z;
5163 vec<tree, va_gc> *vec;
5164
5165 vec_alloc (vec, 5);
5166 vec->quick_push (gimple_call_arg (call, 0));
5167 tree expected = gimple_call_arg (call, 1);
5168 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5169 TREE_TYPE (expected));
5170 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5171 if (expd != x)
5172 emit_move_insn (x, expd);
5173 tree v = make_tree (TREE_TYPE (expected), x);
5174 vec->quick_push (build1 (ADDR_EXPR,
5175 build_pointer_type (TREE_TYPE (expected)), v));
5176 vec->quick_push (gimple_call_arg (call, 2));
5177 /* Skip the boolean weak parameter. */
5178 for (z = 4; z < 6; z++)
5179 vec->quick_push (gimple_call_arg (call, z));
5180 built_in_function fncode
5181 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5182 + exact_log2 (GET_MODE_SIZE (mode)));
5183 tree fndecl = builtin_decl_explicit (fncode);
5184 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5185 fndecl);
5186 tree exp = build_call_vec (boolean_type_node, fn, vec);
5187 tree lhs = gimple_call_lhs (call);
5188 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5189 if (lhs)
5190 {
5191 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5192 if (GET_MODE (boolret) != mode)
5193 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5194 x = force_reg (mode, x);
5195 write_complex_part (target, boolret, true);
5196 write_complex_part (target, x, false);
5197 }
5198 }
5199
5200 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5201
5202 void
5203 expand_ifn_atomic_compare_exchange (gcall *call)
5204 {
5205 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5206 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5207 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5208 rtx expect, desired, mem, oldval, boolret;
5209 enum memmodel success, failure;
5210 tree lhs;
5211 bool is_weak;
5212 source_location loc
5213 = expansion_point_location_if_in_system_header (gimple_location (call));
5214
5215 success = get_memmodel (gimple_call_arg (call, 4));
5216 failure = get_memmodel (gimple_call_arg (call, 5));
5217
5218 if (failure > success)
5219 {
5220 warning_at (loc, OPT_Winvalid_memory_model,
5221 "failure memory model cannot be stronger than success "
5222 "memory model for %<__atomic_compare_exchange%>");
5223 success = MEMMODEL_SEQ_CST;
5224 }
5225
5226 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5227 {
5228 warning_at (loc, OPT_Winvalid_memory_model,
5229 "invalid failure memory model for "
5230 "%<__atomic_compare_exchange%>");
5231 failure = MEMMODEL_SEQ_CST;
5232 success = MEMMODEL_SEQ_CST;
5233 }
5234
5235 if (!flag_inline_atomics)
5236 {
5237 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5238 return;
5239 }
5240
5241 /* Expand the operands. */
5242 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5243
5244 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5245 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5246
5247 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5248
5249 boolret = NULL;
5250 oldval = NULL;
5251
5252 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5253 is_weak, success, failure))
5254 {
5255 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5256 return;
5257 }
5258
5259 lhs = gimple_call_lhs (call);
5260 if (lhs)
5261 {
5262 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5263 if (GET_MODE (boolret) != mode)
5264 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5265 write_complex_part (target, boolret, true);
5266 write_complex_part (target, oldval, false);
5267 }
5268 }
5269
5270 /* Expand the __atomic_load intrinsic:
5271 TYPE __atomic_load (TYPE *object, enum memmodel)
5272 EXP is the CALL_EXPR.
5273 TARGET is an optional place for us to store the results. */
5274
5275 static rtx
5276 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5277 {
5278 rtx mem;
5279 enum memmodel model;
5280
5281 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5282 if (is_mm_release (model) || is_mm_acq_rel (model))
5283 {
5284 source_location loc
5285 = expansion_point_location_if_in_system_header (input_location);
5286 warning_at (loc, OPT_Winvalid_memory_model,
5287 "invalid memory model for %<__atomic_load%>");
5288 model = MEMMODEL_SEQ_CST;
5289 }
5290
5291 if (!flag_inline_atomics)
5292 return NULL_RTX;
5293
5294 /* Expand the operand. */
5295 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5296
5297 return expand_atomic_load (target, mem, model);
5298 }
5299
5300
5301 /* Expand the __atomic_store intrinsic:
5302 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5303 EXP is the CALL_EXPR.
5304 TARGET is an optional place for us to store the results. */
5305
5306 static rtx
5307 expand_builtin_atomic_store (machine_mode mode, tree exp)
5308 {
5309 rtx mem, val;
5310 enum memmodel model;
5311
5312 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5313 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5314 || is_mm_release (model)))
5315 {
5316 source_location loc
5317 = expansion_point_location_if_in_system_header (input_location);
5318 warning_at (loc, OPT_Winvalid_memory_model,
5319 "invalid memory model for %<__atomic_store%>");
5320 model = MEMMODEL_SEQ_CST;
5321 }
5322
5323 if (!flag_inline_atomics)
5324 return NULL_RTX;
5325
5326 /* Expand the operands. */
5327 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5328 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5329
5330 return expand_atomic_store (mem, val, model, false);
5331 }
5332
5333 /* Expand the __atomic_fetch_XXX intrinsic:
5334 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5335 EXP is the CALL_EXPR.
5336 TARGET is an optional place for us to store the results.
5337 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5338 FETCH_AFTER is true if returning the result of the operation.
5339 FETCH_AFTER is false if returning the value before the operation.
5340 IGNORE is true if the result is not used.
5341 EXT_CALL is the correct builtin for an external call if this cannot be
5342 resolved to an instruction sequence. */
5343
5344 static rtx
5345 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5346 enum rtx_code code, bool fetch_after,
5347 bool ignore, enum built_in_function ext_call)
5348 {
5349 rtx val, mem, ret;
5350 enum memmodel model;
5351 tree fndecl;
5352 tree addr;
5353
5354 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5355
5356 /* Expand the operands. */
5357 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5358 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5359
5360 /* Only try generating instructions if inlining is turned on. */
5361 if (flag_inline_atomics)
5362 {
5363 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5364 if (ret)
5365 return ret;
5366 }
5367
5368 /* Return if a different routine isn't needed for the library call. */
5369 if (ext_call == BUILT_IN_NONE)
5370 return NULL_RTX;
5371
5372 /* Change the call to the specified function. */
5373 fndecl = get_callee_fndecl (exp);
5374 addr = CALL_EXPR_FN (exp);
5375 STRIP_NOPS (addr);
5376
5377 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5378 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5379
5380 /* Expand the call here so we can emit trailing code. */
5381 ret = expand_call (exp, target, ignore);
5382
5383 /* Replace the original function just in case it matters. */
5384 TREE_OPERAND (addr, 0) = fndecl;
5385
5386 /* Then issue the arithmetic correction to return the right result. */
5387 if (!ignore)
5388 {
5389 if (code == NOT)
5390 {
5391 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5392 OPTAB_LIB_WIDEN);
5393 ret = expand_simple_unop (mode, NOT, ret, target, true);
5394 }
5395 else
5396 ret = expand_simple_binop (mode, code, ret, val, target, true,
5397 OPTAB_LIB_WIDEN);
5398 }
5399 return ret;
5400 }
5401
5402 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5403
5404 void
5405 expand_ifn_atomic_bit_test_and (gcall *call)
5406 {
5407 tree ptr = gimple_call_arg (call, 0);
5408 tree bit = gimple_call_arg (call, 1);
5409 tree flag = gimple_call_arg (call, 2);
5410 tree lhs = gimple_call_lhs (call);
5411 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5412 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5413 enum rtx_code code;
5414 optab optab;
5415 struct expand_operand ops[5];
5416
5417 gcc_assert (flag_inline_atomics);
5418
5419 if (gimple_call_num_args (call) == 4)
5420 model = get_memmodel (gimple_call_arg (call, 3));
5421
5422 rtx mem = get_builtin_sync_mem (ptr, mode);
5423 rtx val = expand_expr_force_mode (bit, mode);
5424
5425 switch (gimple_call_internal_fn (call))
5426 {
5427 case IFN_ATOMIC_BIT_TEST_AND_SET:
5428 code = IOR;
5429 optab = atomic_bit_test_and_set_optab;
5430 break;
5431 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5432 code = XOR;
5433 optab = atomic_bit_test_and_complement_optab;
5434 break;
5435 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5436 code = AND;
5437 optab = atomic_bit_test_and_reset_optab;
5438 break;
5439 default:
5440 gcc_unreachable ();
5441 }
5442
5443 if (lhs == NULL_TREE)
5444 {
5445 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5446 val, NULL_RTX, true, OPTAB_DIRECT);
5447 if (code == AND)
5448 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5449 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5450 return;
5451 }
5452
5453 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5454 enum insn_code icode = direct_optab_handler (optab, mode);
5455 gcc_assert (icode != CODE_FOR_nothing);
5456 create_output_operand (&ops[0], target, mode);
5457 create_fixed_operand (&ops[1], mem);
5458 create_convert_operand_to (&ops[2], val, mode, true);
5459 create_integer_operand (&ops[3], model);
5460 create_integer_operand (&ops[4], integer_onep (flag));
5461 if (maybe_expand_insn (icode, 5, ops))
5462 return;
5463
5464 rtx bitval = val;
5465 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5466 val, NULL_RTX, true, OPTAB_DIRECT);
5467 rtx maskval = val;
5468 if (code == AND)
5469 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5470 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5471 code, model, false);
5472 if (integer_onep (flag))
5473 {
5474 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5475 NULL_RTX, true, OPTAB_DIRECT);
5476 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5477 true, OPTAB_DIRECT);
5478 }
5479 else
5480 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5481 OPTAB_DIRECT);
5482 if (result != target)
5483 emit_move_insn (target, result);
5484 }
5485
5486 /* Expand an atomic clear operation.
5487 void _atomic_clear (BOOL *obj, enum memmodel)
5488 EXP is the call expression. */
5489
5490 static rtx
5491 expand_builtin_atomic_clear (tree exp)
5492 {
5493 machine_mode mode;
5494 rtx mem, ret;
5495 enum memmodel model;
5496
5497 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5498 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5499 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5500
5501 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5502 {
5503 source_location loc
5504 = expansion_point_location_if_in_system_header (input_location);
5505 warning_at (loc, OPT_Winvalid_memory_model,
5506 "invalid memory model for %<__atomic_store%>");
5507 model = MEMMODEL_SEQ_CST;
5508 }
5509
5510 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5511 Failing that, a store is issued by __atomic_store. The only way this can
5512 fail is if the bool type is larger than a word size. Unlikely, but
5513 handle it anyway for completeness. Assume a single threaded model since
5514 there is no atomic support in this case, and no barriers are required. */
5515 ret = expand_atomic_store (mem, const0_rtx, model, true);
5516 if (!ret)
5517 emit_move_insn (mem, const0_rtx);
5518 return const0_rtx;
5519 }
5520
5521 /* Expand an atomic test_and_set operation.
5522 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5523 EXP is the call expression. */
5524
5525 static rtx
5526 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5527 {
5528 rtx mem;
5529 enum memmodel model;
5530 machine_mode mode;
5531
5532 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5533 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5534 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5535
5536 return expand_atomic_test_and_set (target, mem, model);
5537 }
5538
5539
5540 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5541 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5542
5543 static tree
5544 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5545 {
5546 int size;
5547 machine_mode mode;
5548 unsigned int mode_align, type_align;
5549
5550 if (TREE_CODE (arg0) != INTEGER_CST)
5551 return NULL_TREE;
5552
5553 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5554 mode = mode_for_size (size, MODE_INT, 0);
5555 mode_align = GET_MODE_ALIGNMENT (mode);
5556
5557 if (TREE_CODE (arg1) == INTEGER_CST)
5558 {
5559 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5560
5561 /* Either this argument is null, or it's a fake pointer encoding
5562 the alignment of the object. */
5563 val = least_bit_hwi (val);
5564 val *= BITS_PER_UNIT;
5565
5566 if (val == 0 || mode_align < val)
5567 type_align = mode_align;
5568 else
5569 type_align = val;
5570 }
5571 else
5572 {
5573 tree ttype = TREE_TYPE (arg1);
5574
5575 /* This function is usually invoked and folded immediately by the front
5576 end before anything else has a chance to look at it. The pointer
5577 parameter at this point is usually cast to a void *, so check for that
5578 and look past the cast. */
5579 if (CONVERT_EXPR_P (arg1)
5580 && POINTER_TYPE_P (ttype)
5581 && VOID_TYPE_P (TREE_TYPE (ttype))
5582 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5583 arg1 = TREE_OPERAND (arg1, 0);
5584
5585 ttype = TREE_TYPE (arg1);
5586 gcc_assert (POINTER_TYPE_P (ttype));
5587
5588 /* Get the underlying type of the object. */
5589 ttype = TREE_TYPE (ttype);
5590 type_align = TYPE_ALIGN (ttype);
5591 }
5592
5593 /* If the object has smaller alignment, the lock free routines cannot
5594 be used. */
5595 if (type_align < mode_align)
5596 return boolean_false_node;
5597
5598 /* Check if a compare_and_swap pattern exists for the mode which represents
5599 the required size. The pattern is not allowed to fail, so the existence
5600 of the pattern indicates support is present. */
5601 if (can_compare_and_swap_p (mode, true))
5602 return boolean_true_node;
5603 else
5604 return boolean_false_node;
5605 }
5606
5607 /* Return true if the parameters to call EXP represent an object which will
5608 always generate lock free instructions. The first argument represents the
5609 size of the object, and the second parameter is a pointer to the object
5610 itself. If NULL is passed for the object, then the result is based on
5611 typical alignment for an object of the specified size. Otherwise return
5612 false. */
5613
5614 static rtx
5615 expand_builtin_atomic_always_lock_free (tree exp)
5616 {
5617 tree size;
5618 tree arg0 = CALL_EXPR_ARG (exp, 0);
5619 tree arg1 = CALL_EXPR_ARG (exp, 1);
5620
5621 if (TREE_CODE (arg0) != INTEGER_CST)
5622 {
5623 error ("non-constant argument 1 to __atomic_always_lock_free");
5624 return const0_rtx;
5625 }
5626
5627 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5628 if (size == boolean_true_node)
5629 return const1_rtx;
5630 return const0_rtx;
5631 }
5632
5633 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5634 is lock free on this architecture. */
5635
5636 static tree
5637 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5638 {
5639 if (!flag_inline_atomics)
5640 return NULL_TREE;
5641
5642 /* If it isn't always lock free, don't generate a result. */
5643 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5644 return boolean_true_node;
5645
5646 return NULL_TREE;
5647 }
5648
5649 /* Return true if the parameters to call EXP represent an object which will
5650 always generate lock free instructions. The first argument represents the
5651 size of the object, and the second parameter is a pointer to the object
5652 itself. If NULL is passed for the object, then the result is based on
5653 typical alignment for an object of the specified size. Otherwise return
5654 NULL*/
5655
5656 static rtx
5657 expand_builtin_atomic_is_lock_free (tree exp)
5658 {
5659 tree size;
5660 tree arg0 = CALL_EXPR_ARG (exp, 0);
5661 tree arg1 = CALL_EXPR_ARG (exp, 1);
5662
5663 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5664 {
5665 error ("non-integer argument 1 to __atomic_is_lock_free");
5666 return NULL_RTX;
5667 }
5668
5669 if (!flag_inline_atomics)
5670 return NULL_RTX;
5671
5672 /* If the value is known at compile time, return the RTX for it. */
5673 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5674 if (size == boolean_true_node)
5675 return const1_rtx;
5676
5677 return NULL_RTX;
5678 }
5679
5680 /* Expand the __atomic_thread_fence intrinsic:
5681 void __atomic_thread_fence (enum memmodel)
5682 EXP is the CALL_EXPR. */
5683
5684 static void
5685 expand_builtin_atomic_thread_fence (tree exp)
5686 {
5687 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5688 expand_mem_thread_fence (model);
5689 }
5690
5691 /* Expand the __atomic_signal_fence intrinsic:
5692 void __atomic_signal_fence (enum memmodel)
5693 EXP is the CALL_EXPR. */
5694
5695 static void
5696 expand_builtin_atomic_signal_fence (tree exp)
5697 {
5698 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5699 expand_mem_signal_fence (model);
5700 }
5701
5702 /* Expand the __sync_synchronize intrinsic. */
5703
5704 static void
5705 expand_builtin_sync_synchronize (void)
5706 {
5707 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5708 }
5709
5710 static rtx
5711 expand_builtin_thread_pointer (tree exp, rtx target)
5712 {
5713 enum insn_code icode;
5714 if (!validate_arglist (exp, VOID_TYPE))
5715 return const0_rtx;
5716 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5717 if (icode != CODE_FOR_nothing)
5718 {
5719 struct expand_operand op;
5720 /* If the target is not sutitable then create a new target. */
5721 if (target == NULL_RTX
5722 || !REG_P (target)
5723 || GET_MODE (target) != Pmode)
5724 target = gen_reg_rtx (Pmode);
5725 create_output_operand (&op, target, Pmode);
5726 expand_insn (icode, 1, &op);
5727 return target;
5728 }
5729 error ("__builtin_thread_pointer is not supported on this target");
5730 return const0_rtx;
5731 }
5732
5733 static void
5734 expand_builtin_set_thread_pointer (tree exp)
5735 {
5736 enum insn_code icode;
5737 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5738 return;
5739 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5740 if (icode != CODE_FOR_nothing)
5741 {
5742 struct expand_operand op;
5743 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5744 Pmode, EXPAND_NORMAL);
5745 create_input_operand (&op, val, Pmode);
5746 expand_insn (icode, 1, &op);
5747 return;
5748 }
5749 error ("__builtin_set_thread_pointer is not supported on this target");
5750 }
5751
5752 \f
5753 /* Emit code to restore the current value of stack. */
5754
5755 static void
5756 expand_stack_restore (tree var)
5757 {
5758 rtx_insn *prev;
5759 rtx sa = expand_normal (var);
5760
5761 sa = convert_memory_address (Pmode, sa);
5762
5763 prev = get_last_insn ();
5764 emit_stack_restore (SAVE_BLOCK, sa);
5765
5766 record_new_stack_level ();
5767
5768 fixup_args_size_notes (prev, get_last_insn (), 0);
5769 }
5770
5771 /* Emit code to save the current value of stack. */
5772
5773 static rtx
5774 expand_stack_save (void)
5775 {
5776 rtx ret = NULL_RTX;
5777
5778 emit_stack_save (SAVE_BLOCK, &ret);
5779 return ret;
5780 }
5781
5782
5783 /* Expand an expression EXP that calls a built-in function,
5784 with result going to TARGET if that's convenient
5785 (and in mode MODE if that's convenient).
5786 SUBTARGET may be used as the target for computing one of EXP's operands.
5787 IGNORE is nonzero if the value is to be ignored. */
5788
5789 rtx
5790 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5791 int ignore)
5792 {
5793 tree fndecl = get_callee_fndecl (exp);
5794 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5795 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5796 int flags;
5797
5798 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5799 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5800
5801 /* When ASan is enabled, we don't want to expand some memory/string
5802 builtins and rely on libsanitizer's hooks. This allows us to avoid
5803 redundant checks and be sure, that possible overflow will be detected
5804 by ASan. */
5805
5806 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5807 return expand_call (exp, target, ignore);
5808
5809 /* When not optimizing, generate calls to library functions for a certain
5810 set of builtins. */
5811 if (!optimize
5812 && !called_as_built_in (fndecl)
5813 && fcode != BUILT_IN_FORK
5814 && fcode != BUILT_IN_EXECL
5815 && fcode != BUILT_IN_EXECV
5816 && fcode != BUILT_IN_EXECLP
5817 && fcode != BUILT_IN_EXECLE
5818 && fcode != BUILT_IN_EXECVP
5819 && fcode != BUILT_IN_EXECVE
5820 && fcode != BUILT_IN_ALLOCA
5821 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5822 && fcode != BUILT_IN_FREE
5823 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5824 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5825 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5826 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5827 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5828 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5829 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5830 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5831 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5832 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5833 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5834 && fcode != BUILT_IN_CHKP_BNDRET)
5835 return expand_call (exp, target, ignore);
5836
5837 /* The built-in function expanders test for target == const0_rtx
5838 to determine whether the function's result will be ignored. */
5839 if (ignore)
5840 target = const0_rtx;
5841
5842 /* If the result of a pure or const built-in function is ignored, and
5843 none of its arguments are volatile, we can avoid expanding the
5844 built-in call and just evaluate the arguments for side-effects. */
5845 if (target == const0_rtx
5846 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5847 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5848 {
5849 bool volatilep = false;
5850 tree arg;
5851 call_expr_arg_iterator iter;
5852
5853 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5854 if (TREE_THIS_VOLATILE (arg))
5855 {
5856 volatilep = true;
5857 break;
5858 }
5859
5860 if (! volatilep)
5861 {
5862 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5863 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5864 return const0_rtx;
5865 }
5866 }
5867
5868 /* expand_builtin_with_bounds is supposed to be used for
5869 instrumented builtin calls. */
5870 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5871
5872 switch (fcode)
5873 {
5874 CASE_FLT_FN (BUILT_IN_FABS):
5875 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5876 case BUILT_IN_FABSD32:
5877 case BUILT_IN_FABSD64:
5878 case BUILT_IN_FABSD128:
5879 target = expand_builtin_fabs (exp, target, subtarget);
5880 if (target)
5881 return target;
5882 break;
5883
5884 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5885 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5886 target = expand_builtin_copysign (exp, target, subtarget);
5887 if (target)
5888 return target;
5889 break;
5890
5891 /* Just do a normal library call if we were unable to fold
5892 the values. */
5893 CASE_FLT_FN (BUILT_IN_CABS):
5894 break;
5895
5896 CASE_FLT_FN (BUILT_IN_FMA):
5897 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5898 if (target)
5899 return target;
5900 break;
5901
5902 CASE_FLT_FN (BUILT_IN_ILOGB):
5903 if (! flag_unsafe_math_optimizations)
5904 break;
5905 gcc_fallthrough ();
5906 CASE_FLT_FN (BUILT_IN_ISINF):
5907 CASE_FLT_FN (BUILT_IN_FINITE):
5908 case BUILT_IN_ISFINITE:
5909 case BUILT_IN_ISNORMAL:
5910 target = expand_builtin_interclass_mathfn (exp, target);
5911 if (target)
5912 return target;
5913 break;
5914
5915 CASE_FLT_FN (BUILT_IN_ICEIL):
5916 CASE_FLT_FN (BUILT_IN_LCEIL):
5917 CASE_FLT_FN (BUILT_IN_LLCEIL):
5918 CASE_FLT_FN (BUILT_IN_LFLOOR):
5919 CASE_FLT_FN (BUILT_IN_IFLOOR):
5920 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5921 target = expand_builtin_int_roundingfn (exp, target);
5922 if (target)
5923 return target;
5924 break;
5925
5926 CASE_FLT_FN (BUILT_IN_IRINT):
5927 CASE_FLT_FN (BUILT_IN_LRINT):
5928 CASE_FLT_FN (BUILT_IN_LLRINT):
5929 CASE_FLT_FN (BUILT_IN_IROUND):
5930 CASE_FLT_FN (BUILT_IN_LROUND):
5931 CASE_FLT_FN (BUILT_IN_LLROUND):
5932 target = expand_builtin_int_roundingfn_2 (exp, target);
5933 if (target)
5934 return target;
5935 break;
5936
5937 CASE_FLT_FN (BUILT_IN_POWI):
5938 target = expand_builtin_powi (exp, target);
5939 if (target)
5940 return target;
5941 break;
5942
5943 CASE_FLT_FN (BUILT_IN_CEXPI):
5944 target = expand_builtin_cexpi (exp, target);
5945 gcc_assert (target);
5946 return target;
5947
5948 CASE_FLT_FN (BUILT_IN_SIN):
5949 CASE_FLT_FN (BUILT_IN_COS):
5950 if (! flag_unsafe_math_optimizations)
5951 break;
5952 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5953 if (target)
5954 return target;
5955 break;
5956
5957 CASE_FLT_FN (BUILT_IN_SINCOS):
5958 if (! flag_unsafe_math_optimizations)
5959 break;
5960 target = expand_builtin_sincos (exp);
5961 if (target)
5962 return target;
5963 break;
5964
5965 case BUILT_IN_APPLY_ARGS:
5966 return expand_builtin_apply_args ();
5967
5968 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5969 FUNCTION with a copy of the parameters described by
5970 ARGUMENTS, and ARGSIZE. It returns a block of memory
5971 allocated on the stack into which is stored all the registers
5972 that might possibly be used for returning the result of a
5973 function. ARGUMENTS is the value returned by
5974 __builtin_apply_args. ARGSIZE is the number of bytes of
5975 arguments that must be copied. ??? How should this value be
5976 computed? We'll also need a safe worst case value for varargs
5977 functions. */
5978 case BUILT_IN_APPLY:
5979 if (!validate_arglist (exp, POINTER_TYPE,
5980 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5981 && !validate_arglist (exp, REFERENCE_TYPE,
5982 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5983 return const0_rtx;
5984 else
5985 {
5986 rtx ops[3];
5987
5988 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5989 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5990 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5991
5992 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5993 }
5994
5995 /* __builtin_return (RESULT) causes the function to return the
5996 value described by RESULT. RESULT is address of the block of
5997 memory returned by __builtin_apply. */
5998 case BUILT_IN_RETURN:
5999 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6000 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6001 return const0_rtx;
6002
6003 case BUILT_IN_SAVEREGS:
6004 return expand_builtin_saveregs ();
6005
6006 case BUILT_IN_VA_ARG_PACK:
6007 /* All valid uses of __builtin_va_arg_pack () are removed during
6008 inlining. */
6009 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6010 return const0_rtx;
6011
6012 case BUILT_IN_VA_ARG_PACK_LEN:
6013 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6014 inlining. */
6015 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6016 return const0_rtx;
6017
6018 /* Return the address of the first anonymous stack arg. */
6019 case BUILT_IN_NEXT_ARG:
6020 if (fold_builtin_next_arg (exp, false))
6021 return const0_rtx;
6022 return expand_builtin_next_arg ();
6023
6024 case BUILT_IN_CLEAR_CACHE:
6025 target = expand_builtin___clear_cache (exp);
6026 if (target)
6027 return target;
6028 break;
6029
6030 case BUILT_IN_CLASSIFY_TYPE:
6031 return expand_builtin_classify_type (exp);
6032
6033 case BUILT_IN_CONSTANT_P:
6034 return const0_rtx;
6035
6036 case BUILT_IN_FRAME_ADDRESS:
6037 case BUILT_IN_RETURN_ADDRESS:
6038 return expand_builtin_frame_address (fndecl, exp);
6039
6040 /* Returns the address of the area where the structure is returned.
6041 0 otherwise. */
6042 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6043 if (call_expr_nargs (exp) != 0
6044 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6045 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6046 return const0_rtx;
6047 else
6048 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6049
6050 case BUILT_IN_ALLOCA:
6051 case BUILT_IN_ALLOCA_WITH_ALIGN:
6052 /* If the allocation stems from the declaration of a variable-sized
6053 object, it cannot accumulate. */
6054 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6055 if (target)
6056 return target;
6057 break;
6058
6059 case BUILT_IN_STACK_SAVE:
6060 return expand_stack_save ();
6061
6062 case BUILT_IN_STACK_RESTORE:
6063 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6064 return const0_rtx;
6065
6066 case BUILT_IN_BSWAP16:
6067 case BUILT_IN_BSWAP32:
6068 case BUILT_IN_BSWAP64:
6069 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6070 if (target)
6071 return target;
6072 break;
6073
6074 CASE_INT_FN (BUILT_IN_FFS):
6075 target = expand_builtin_unop (target_mode, exp, target,
6076 subtarget, ffs_optab);
6077 if (target)
6078 return target;
6079 break;
6080
6081 CASE_INT_FN (BUILT_IN_CLZ):
6082 target = expand_builtin_unop (target_mode, exp, target,
6083 subtarget, clz_optab);
6084 if (target)
6085 return target;
6086 break;
6087
6088 CASE_INT_FN (BUILT_IN_CTZ):
6089 target = expand_builtin_unop (target_mode, exp, target,
6090 subtarget, ctz_optab);
6091 if (target)
6092 return target;
6093 break;
6094
6095 CASE_INT_FN (BUILT_IN_CLRSB):
6096 target = expand_builtin_unop (target_mode, exp, target,
6097 subtarget, clrsb_optab);
6098 if (target)
6099 return target;
6100 break;
6101
6102 CASE_INT_FN (BUILT_IN_POPCOUNT):
6103 target = expand_builtin_unop (target_mode, exp, target,
6104 subtarget, popcount_optab);
6105 if (target)
6106 return target;
6107 break;
6108
6109 CASE_INT_FN (BUILT_IN_PARITY):
6110 target = expand_builtin_unop (target_mode, exp, target,
6111 subtarget, parity_optab);
6112 if (target)
6113 return target;
6114 break;
6115
6116 case BUILT_IN_STRLEN:
6117 target = expand_builtin_strlen (exp, target, target_mode);
6118 if (target)
6119 return target;
6120 break;
6121
6122 case BUILT_IN_STRCPY:
6123 target = expand_builtin_strcpy (exp, target);
6124 if (target)
6125 return target;
6126 break;
6127
6128 case BUILT_IN_STRNCPY:
6129 target = expand_builtin_strncpy (exp, target);
6130 if (target)
6131 return target;
6132 break;
6133
6134 case BUILT_IN_STPCPY:
6135 target = expand_builtin_stpcpy (exp, target, mode);
6136 if (target)
6137 return target;
6138 break;
6139
6140 case BUILT_IN_MEMCPY:
6141 target = expand_builtin_memcpy (exp, target);
6142 if (target)
6143 return target;
6144 break;
6145
6146 case BUILT_IN_MEMPCPY:
6147 target = expand_builtin_mempcpy (exp, target, mode);
6148 if (target)
6149 return target;
6150 break;
6151
6152 case BUILT_IN_MEMSET:
6153 target = expand_builtin_memset (exp, target, mode);
6154 if (target)
6155 return target;
6156 break;
6157
6158 case BUILT_IN_BZERO:
6159 target = expand_builtin_bzero (exp);
6160 if (target)
6161 return target;
6162 break;
6163
6164 case BUILT_IN_STRCMP:
6165 target = expand_builtin_strcmp (exp, target);
6166 if (target)
6167 return target;
6168 break;
6169
6170 case BUILT_IN_STRNCMP:
6171 target = expand_builtin_strncmp (exp, target, mode);
6172 if (target)
6173 return target;
6174 break;
6175
6176 case BUILT_IN_BCMP:
6177 case BUILT_IN_MEMCMP:
6178 case BUILT_IN_MEMCMP_EQ:
6179 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6180 if (target)
6181 return target;
6182 if (fcode == BUILT_IN_MEMCMP_EQ)
6183 {
6184 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6185 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6186 }
6187 break;
6188
6189 case BUILT_IN_SETJMP:
6190 /* This should have been lowered to the builtins below. */
6191 gcc_unreachable ();
6192
6193 case BUILT_IN_SETJMP_SETUP:
6194 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6195 and the receiver label. */
6196 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6197 {
6198 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6199 VOIDmode, EXPAND_NORMAL);
6200 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6201 rtx_insn *label_r = label_rtx (label);
6202
6203 /* This is copied from the handling of non-local gotos. */
6204 expand_builtin_setjmp_setup (buf_addr, label_r);
6205 nonlocal_goto_handler_labels
6206 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6207 nonlocal_goto_handler_labels);
6208 /* ??? Do not let expand_label treat us as such since we would
6209 not want to be both on the list of non-local labels and on
6210 the list of forced labels. */
6211 FORCED_LABEL (label) = 0;
6212 return const0_rtx;
6213 }
6214 break;
6215
6216 case BUILT_IN_SETJMP_RECEIVER:
6217 /* __builtin_setjmp_receiver is passed the receiver label. */
6218 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6219 {
6220 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6221 rtx_insn *label_r = label_rtx (label);
6222
6223 expand_builtin_setjmp_receiver (label_r);
6224 return const0_rtx;
6225 }
6226 break;
6227
6228 /* __builtin_longjmp is passed a pointer to an array of five words.
6229 It's similar to the C library longjmp function but works with
6230 __builtin_setjmp above. */
6231 case BUILT_IN_LONGJMP:
6232 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6233 {
6234 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6235 VOIDmode, EXPAND_NORMAL);
6236 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6237
6238 if (value != const1_rtx)
6239 {
6240 error ("%<__builtin_longjmp%> second argument must be 1");
6241 return const0_rtx;
6242 }
6243
6244 expand_builtin_longjmp (buf_addr, value);
6245 return const0_rtx;
6246 }
6247 break;
6248
6249 case BUILT_IN_NONLOCAL_GOTO:
6250 target = expand_builtin_nonlocal_goto (exp);
6251 if (target)
6252 return target;
6253 break;
6254
6255 /* This updates the setjmp buffer that is its argument with the value
6256 of the current stack pointer. */
6257 case BUILT_IN_UPDATE_SETJMP_BUF:
6258 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6259 {
6260 rtx buf_addr
6261 = expand_normal (CALL_EXPR_ARG (exp, 0));
6262
6263 expand_builtin_update_setjmp_buf (buf_addr);
6264 return const0_rtx;
6265 }
6266 break;
6267
6268 case BUILT_IN_TRAP:
6269 expand_builtin_trap ();
6270 return const0_rtx;
6271
6272 case BUILT_IN_UNREACHABLE:
6273 expand_builtin_unreachable ();
6274 return const0_rtx;
6275
6276 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6277 case BUILT_IN_SIGNBITD32:
6278 case BUILT_IN_SIGNBITD64:
6279 case BUILT_IN_SIGNBITD128:
6280 target = expand_builtin_signbit (exp, target);
6281 if (target)
6282 return target;
6283 break;
6284
6285 /* Various hooks for the DWARF 2 __throw routine. */
6286 case BUILT_IN_UNWIND_INIT:
6287 expand_builtin_unwind_init ();
6288 return const0_rtx;
6289 case BUILT_IN_DWARF_CFA:
6290 return virtual_cfa_rtx;
6291 #ifdef DWARF2_UNWIND_INFO
6292 case BUILT_IN_DWARF_SP_COLUMN:
6293 return expand_builtin_dwarf_sp_column ();
6294 case BUILT_IN_INIT_DWARF_REG_SIZES:
6295 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6296 return const0_rtx;
6297 #endif
6298 case BUILT_IN_FROB_RETURN_ADDR:
6299 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6300 case BUILT_IN_EXTRACT_RETURN_ADDR:
6301 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6302 case BUILT_IN_EH_RETURN:
6303 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6304 CALL_EXPR_ARG (exp, 1));
6305 return const0_rtx;
6306 case BUILT_IN_EH_RETURN_DATA_REGNO:
6307 return expand_builtin_eh_return_data_regno (exp);
6308 case BUILT_IN_EXTEND_POINTER:
6309 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6310 case BUILT_IN_EH_POINTER:
6311 return expand_builtin_eh_pointer (exp);
6312 case BUILT_IN_EH_FILTER:
6313 return expand_builtin_eh_filter (exp);
6314 case BUILT_IN_EH_COPY_VALUES:
6315 return expand_builtin_eh_copy_values (exp);
6316
6317 case BUILT_IN_VA_START:
6318 return expand_builtin_va_start (exp);
6319 case BUILT_IN_VA_END:
6320 return expand_builtin_va_end (exp);
6321 case BUILT_IN_VA_COPY:
6322 return expand_builtin_va_copy (exp);
6323 case BUILT_IN_EXPECT:
6324 return expand_builtin_expect (exp, target);
6325 case BUILT_IN_ASSUME_ALIGNED:
6326 return expand_builtin_assume_aligned (exp, target);
6327 case BUILT_IN_PREFETCH:
6328 expand_builtin_prefetch (exp);
6329 return const0_rtx;
6330
6331 case BUILT_IN_INIT_TRAMPOLINE:
6332 return expand_builtin_init_trampoline (exp, true);
6333 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6334 return expand_builtin_init_trampoline (exp, false);
6335 case BUILT_IN_ADJUST_TRAMPOLINE:
6336 return expand_builtin_adjust_trampoline (exp);
6337
6338 case BUILT_IN_FORK:
6339 case BUILT_IN_EXECL:
6340 case BUILT_IN_EXECV:
6341 case BUILT_IN_EXECLP:
6342 case BUILT_IN_EXECLE:
6343 case BUILT_IN_EXECVP:
6344 case BUILT_IN_EXECVE:
6345 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6346 if (target)
6347 return target;
6348 break;
6349
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6351 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6352 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6353 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6354 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6355 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6356 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6357 if (target)
6358 return target;
6359 break;
6360
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6362 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6363 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6364 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6365 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6366 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6367 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6368 if (target)
6369 return target;
6370 break;
6371
6372 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6373 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6374 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6375 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6376 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6377 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6378 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6379 if (target)
6380 return target;
6381 break;
6382
6383 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6384 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6385 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6386 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6387 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6388 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6389 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6390 if (target)
6391 return target;
6392 break;
6393
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6395 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6396 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6397 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6398 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6399 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6400 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6401 if (target)
6402 return target;
6403 break;
6404
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6406 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6407 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6408 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6409 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6410 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6411 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6412 if (target)
6413 return target;
6414 break;
6415
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6417 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6418 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6419 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6420 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6421 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6422 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6423 if (target)
6424 return target;
6425 break;
6426
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6428 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6429 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6430 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6431 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6432 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6433 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6434 if (target)
6435 return target;
6436 break;
6437
6438 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6439 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6440 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6441 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6442 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6443 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6444 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6445 if (target)
6446 return target;
6447 break;
6448
6449 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6450 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6451 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6452 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6453 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6454 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6455 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6456 if (target)
6457 return target;
6458 break;
6459
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6461 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6462 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6463 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6464 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6465 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6466 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6467 if (target)
6468 return target;
6469 break;
6470
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6472 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6473 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6474 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6475 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6476 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6477 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6478 if (target)
6479 return target;
6480 break;
6481
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6483 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6484 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6485 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6486 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6487 if (mode == VOIDmode)
6488 mode = TYPE_MODE (boolean_type_node);
6489 if (!target || !register_operand (target, mode))
6490 target = gen_reg_rtx (mode);
6491
6492 mode = get_builtin_sync_mode
6493 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6494 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6495 if (target)
6496 return target;
6497 break;
6498
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6500 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6501 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6502 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6503 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6504 mode = get_builtin_sync_mode
6505 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6506 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6507 if (target)
6508 return target;
6509 break;
6510
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6512 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6513 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6514 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6515 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6516 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6517 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6518 if (target)
6519 return target;
6520 break;
6521
6522 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6523 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6524 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6525 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6526 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6527 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6528 expand_builtin_sync_lock_release (mode, exp);
6529 return const0_rtx;
6530
6531 case BUILT_IN_SYNC_SYNCHRONIZE:
6532 expand_builtin_sync_synchronize ();
6533 return const0_rtx;
6534
6535 case BUILT_IN_ATOMIC_EXCHANGE_1:
6536 case BUILT_IN_ATOMIC_EXCHANGE_2:
6537 case BUILT_IN_ATOMIC_EXCHANGE_4:
6538 case BUILT_IN_ATOMIC_EXCHANGE_8:
6539 case BUILT_IN_ATOMIC_EXCHANGE_16:
6540 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6541 target = expand_builtin_atomic_exchange (mode, exp, target);
6542 if (target)
6543 return target;
6544 break;
6545
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6547 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6548 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6549 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6550 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6551 {
6552 unsigned int nargs, z;
6553 vec<tree, va_gc> *vec;
6554
6555 mode =
6556 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6557 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6558 if (target)
6559 return target;
6560
6561 /* If this is turned into an external library call, the weak parameter
6562 must be dropped to match the expected parameter list. */
6563 nargs = call_expr_nargs (exp);
6564 vec_alloc (vec, nargs - 1);
6565 for (z = 0; z < 3; z++)
6566 vec->quick_push (CALL_EXPR_ARG (exp, z));
6567 /* Skip the boolean weak parameter. */
6568 for (z = 4; z < 6; z++)
6569 vec->quick_push (CALL_EXPR_ARG (exp, z));
6570 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6571 break;
6572 }
6573
6574 case BUILT_IN_ATOMIC_LOAD_1:
6575 case BUILT_IN_ATOMIC_LOAD_2:
6576 case BUILT_IN_ATOMIC_LOAD_4:
6577 case BUILT_IN_ATOMIC_LOAD_8:
6578 case BUILT_IN_ATOMIC_LOAD_16:
6579 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6580 target = expand_builtin_atomic_load (mode, exp, target);
6581 if (target)
6582 return target;
6583 break;
6584
6585 case BUILT_IN_ATOMIC_STORE_1:
6586 case BUILT_IN_ATOMIC_STORE_2:
6587 case BUILT_IN_ATOMIC_STORE_4:
6588 case BUILT_IN_ATOMIC_STORE_8:
6589 case BUILT_IN_ATOMIC_STORE_16:
6590 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6591 target = expand_builtin_atomic_store (mode, exp);
6592 if (target)
6593 return const0_rtx;
6594 break;
6595
6596 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6597 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6598 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6599 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6600 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6601 {
6602 enum built_in_function lib;
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6604 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6605 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6606 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6607 ignore, lib);
6608 if (target)
6609 return target;
6610 break;
6611 }
6612 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6613 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6614 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6615 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6616 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6617 {
6618 enum built_in_function lib;
6619 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6620 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6621 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6622 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6623 ignore, lib);
6624 if (target)
6625 return target;
6626 break;
6627 }
6628 case BUILT_IN_ATOMIC_AND_FETCH_1:
6629 case BUILT_IN_ATOMIC_AND_FETCH_2:
6630 case BUILT_IN_ATOMIC_AND_FETCH_4:
6631 case BUILT_IN_ATOMIC_AND_FETCH_8:
6632 case BUILT_IN_ATOMIC_AND_FETCH_16:
6633 {
6634 enum built_in_function lib;
6635 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6636 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6637 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6638 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6639 ignore, lib);
6640 if (target)
6641 return target;
6642 break;
6643 }
6644 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6645 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6646 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6647 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6648 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6649 {
6650 enum built_in_function lib;
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6652 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6653 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6654 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6655 ignore, lib);
6656 if (target)
6657 return target;
6658 break;
6659 }
6660 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6661 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6662 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6663 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6664 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6665 {
6666 enum built_in_function lib;
6667 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6668 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6669 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6670 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6671 ignore, lib);
6672 if (target)
6673 return target;
6674 break;
6675 }
6676 case BUILT_IN_ATOMIC_OR_FETCH_1:
6677 case BUILT_IN_ATOMIC_OR_FETCH_2:
6678 case BUILT_IN_ATOMIC_OR_FETCH_4:
6679 case BUILT_IN_ATOMIC_OR_FETCH_8:
6680 case BUILT_IN_ATOMIC_OR_FETCH_16:
6681 {
6682 enum built_in_function lib;
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6684 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6685 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6686 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6687 ignore, lib);
6688 if (target)
6689 return target;
6690 break;
6691 }
6692 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6693 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6694 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6695 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6696 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6697 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6698 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6699 ignore, BUILT_IN_NONE);
6700 if (target)
6701 return target;
6702 break;
6703
6704 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6705 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6706 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6707 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6708 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6709 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6710 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6711 ignore, BUILT_IN_NONE);
6712 if (target)
6713 return target;
6714 break;
6715
6716 case BUILT_IN_ATOMIC_FETCH_AND_1:
6717 case BUILT_IN_ATOMIC_FETCH_AND_2:
6718 case BUILT_IN_ATOMIC_FETCH_AND_4:
6719 case BUILT_IN_ATOMIC_FETCH_AND_8:
6720 case BUILT_IN_ATOMIC_FETCH_AND_16:
6721 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6722 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6723 ignore, BUILT_IN_NONE);
6724 if (target)
6725 return target;
6726 break;
6727
6728 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6729 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6730 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6731 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6732 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6734 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6735 ignore, BUILT_IN_NONE);
6736 if (target)
6737 return target;
6738 break;
6739
6740 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6741 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6742 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6743 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6744 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6745 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6746 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6747 ignore, BUILT_IN_NONE);
6748 if (target)
6749 return target;
6750 break;
6751
6752 case BUILT_IN_ATOMIC_FETCH_OR_1:
6753 case BUILT_IN_ATOMIC_FETCH_OR_2:
6754 case BUILT_IN_ATOMIC_FETCH_OR_4:
6755 case BUILT_IN_ATOMIC_FETCH_OR_8:
6756 case BUILT_IN_ATOMIC_FETCH_OR_16:
6757 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6758 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6759 ignore, BUILT_IN_NONE);
6760 if (target)
6761 return target;
6762 break;
6763
6764 case BUILT_IN_ATOMIC_TEST_AND_SET:
6765 return expand_builtin_atomic_test_and_set (exp, target);
6766
6767 case BUILT_IN_ATOMIC_CLEAR:
6768 return expand_builtin_atomic_clear (exp);
6769
6770 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6771 return expand_builtin_atomic_always_lock_free (exp);
6772
6773 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6774 target = expand_builtin_atomic_is_lock_free (exp);
6775 if (target)
6776 return target;
6777 break;
6778
6779 case BUILT_IN_ATOMIC_THREAD_FENCE:
6780 expand_builtin_atomic_thread_fence (exp);
6781 return const0_rtx;
6782
6783 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6784 expand_builtin_atomic_signal_fence (exp);
6785 return const0_rtx;
6786
6787 case BUILT_IN_OBJECT_SIZE:
6788 return expand_builtin_object_size (exp);
6789
6790 case BUILT_IN_MEMCPY_CHK:
6791 case BUILT_IN_MEMPCPY_CHK:
6792 case BUILT_IN_MEMMOVE_CHK:
6793 case BUILT_IN_MEMSET_CHK:
6794 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6795 if (target)
6796 return target;
6797 break;
6798
6799 case BUILT_IN_STRCPY_CHK:
6800 case BUILT_IN_STPCPY_CHK:
6801 case BUILT_IN_STRNCPY_CHK:
6802 case BUILT_IN_STPNCPY_CHK:
6803 case BUILT_IN_STRCAT_CHK:
6804 case BUILT_IN_STRNCAT_CHK:
6805 case BUILT_IN_SNPRINTF_CHK:
6806 case BUILT_IN_VSNPRINTF_CHK:
6807 maybe_emit_chk_warning (exp, fcode);
6808 break;
6809
6810 case BUILT_IN_SPRINTF_CHK:
6811 case BUILT_IN_VSPRINTF_CHK:
6812 maybe_emit_sprintf_chk_warning (exp, fcode);
6813 break;
6814
6815 case BUILT_IN_FREE:
6816 if (warn_free_nonheap_object)
6817 maybe_emit_free_warning (exp);
6818 break;
6819
6820 case BUILT_IN_THREAD_POINTER:
6821 return expand_builtin_thread_pointer (exp, target);
6822
6823 case BUILT_IN_SET_THREAD_POINTER:
6824 expand_builtin_set_thread_pointer (exp);
6825 return const0_rtx;
6826
6827 case BUILT_IN_CILK_DETACH:
6828 expand_builtin_cilk_detach (exp);
6829 return const0_rtx;
6830
6831 case BUILT_IN_CILK_POP_FRAME:
6832 expand_builtin_cilk_pop_frame (exp);
6833 return const0_rtx;
6834
6835 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6836 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6837 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6838 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6839 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6840 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6841 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6842 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6843 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6844 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6845 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6846 /* We allow user CHKP builtins if Pointer Bounds
6847 Checker is off. */
6848 if (!chkp_function_instrumented_p (current_function_decl))
6849 {
6850 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6851 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6852 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6853 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6854 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6855 return expand_normal (CALL_EXPR_ARG (exp, 0));
6856 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6857 return expand_normal (size_zero_node);
6858 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6859 return expand_normal (size_int (-1));
6860 else
6861 return const0_rtx;
6862 }
6863 /* FALLTHROUGH */
6864
6865 case BUILT_IN_CHKP_BNDMK:
6866 case BUILT_IN_CHKP_BNDSTX:
6867 case BUILT_IN_CHKP_BNDCL:
6868 case BUILT_IN_CHKP_BNDCU:
6869 case BUILT_IN_CHKP_BNDLDX:
6870 case BUILT_IN_CHKP_BNDRET:
6871 case BUILT_IN_CHKP_INTERSECT:
6872 case BUILT_IN_CHKP_NARROW:
6873 case BUILT_IN_CHKP_EXTRACT_LOWER:
6874 case BUILT_IN_CHKP_EXTRACT_UPPER:
6875 /* Software implementation of Pointer Bounds Checker is NYI.
6876 Target support is required. */
6877 error ("Your target platform does not support -fcheck-pointer-bounds");
6878 break;
6879
6880 case BUILT_IN_ACC_ON_DEVICE:
6881 /* Do library call, if we failed to expand the builtin when
6882 folding. */
6883 break;
6884
6885 default: /* just do library call, if unknown builtin */
6886 break;
6887 }
6888
6889 /* The switch statement above can drop through to cause the function
6890 to be called normally. */
6891 return expand_call (exp, target, ignore);
6892 }
6893
6894 /* Similar to expand_builtin but is used for instrumented calls. */
6895
6896 rtx
6897 expand_builtin_with_bounds (tree exp, rtx target,
6898 rtx subtarget ATTRIBUTE_UNUSED,
6899 machine_mode mode, int ignore)
6900 {
6901 tree fndecl = get_callee_fndecl (exp);
6902 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6903
6904 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6905
6906 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6907 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6908
6909 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6910 && fcode < END_CHKP_BUILTINS);
6911
6912 switch (fcode)
6913 {
6914 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6915 target = expand_builtin_memcpy_with_bounds (exp, target);
6916 if (target)
6917 return target;
6918 break;
6919
6920 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6921 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6922 if (target)
6923 return target;
6924 break;
6925
6926 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6927 target = expand_builtin_memset_with_bounds (exp, target, mode);
6928 if (target)
6929 return target;
6930 break;
6931
6932 default:
6933 break;
6934 }
6935
6936 /* The switch statement above can drop through to cause the function
6937 to be called normally. */
6938 return expand_call (exp, target, ignore);
6939 }
6940
6941 /* Determine whether a tree node represents a call to a built-in
6942 function. If the tree T is a call to a built-in function with
6943 the right number of arguments of the appropriate types, return
6944 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6945 Otherwise the return value is END_BUILTINS. */
6946
6947 enum built_in_function
6948 builtin_mathfn_code (const_tree t)
6949 {
6950 const_tree fndecl, arg, parmlist;
6951 const_tree argtype, parmtype;
6952 const_call_expr_arg_iterator iter;
6953
6954 if (TREE_CODE (t) != CALL_EXPR
6955 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6956 return END_BUILTINS;
6957
6958 fndecl = get_callee_fndecl (t);
6959 if (fndecl == NULL_TREE
6960 || TREE_CODE (fndecl) != FUNCTION_DECL
6961 || ! DECL_BUILT_IN (fndecl)
6962 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6963 return END_BUILTINS;
6964
6965 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6966 init_const_call_expr_arg_iterator (t, &iter);
6967 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6968 {
6969 /* If a function doesn't take a variable number of arguments,
6970 the last element in the list will have type `void'. */
6971 parmtype = TREE_VALUE (parmlist);
6972 if (VOID_TYPE_P (parmtype))
6973 {
6974 if (more_const_call_expr_args_p (&iter))
6975 return END_BUILTINS;
6976 return DECL_FUNCTION_CODE (fndecl);
6977 }
6978
6979 if (! more_const_call_expr_args_p (&iter))
6980 return END_BUILTINS;
6981
6982 arg = next_const_call_expr_arg (&iter);
6983 argtype = TREE_TYPE (arg);
6984
6985 if (SCALAR_FLOAT_TYPE_P (parmtype))
6986 {
6987 if (! SCALAR_FLOAT_TYPE_P (argtype))
6988 return END_BUILTINS;
6989 }
6990 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6991 {
6992 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6993 return END_BUILTINS;
6994 }
6995 else if (POINTER_TYPE_P (parmtype))
6996 {
6997 if (! POINTER_TYPE_P (argtype))
6998 return END_BUILTINS;
6999 }
7000 else if (INTEGRAL_TYPE_P (parmtype))
7001 {
7002 if (! INTEGRAL_TYPE_P (argtype))
7003 return END_BUILTINS;
7004 }
7005 else
7006 return END_BUILTINS;
7007 }
7008
7009 /* Variable-length argument list. */
7010 return DECL_FUNCTION_CODE (fndecl);
7011 }
7012
7013 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7014 evaluate to a constant. */
7015
7016 static tree
7017 fold_builtin_constant_p (tree arg)
7018 {
7019 /* We return 1 for a numeric type that's known to be a constant
7020 value at compile-time or for an aggregate type that's a
7021 literal constant. */
7022 STRIP_NOPS (arg);
7023
7024 /* If we know this is a constant, emit the constant of one. */
7025 if (CONSTANT_CLASS_P (arg)
7026 || (TREE_CODE (arg) == CONSTRUCTOR
7027 && TREE_CONSTANT (arg)))
7028 return integer_one_node;
7029 if (TREE_CODE (arg) == ADDR_EXPR)
7030 {
7031 tree op = TREE_OPERAND (arg, 0);
7032 if (TREE_CODE (op) == STRING_CST
7033 || (TREE_CODE (op) == ARRAY_REF
7034 && integer_zerop (TREE_OPERAND (op, 1))
7035 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7036 return integer_one_node;
7037 }
7038
7039 /* If this expression has side effects, show we don't know it to be a
7040 constant. Likewise if it's a pointer or aggregate type since in
7041 those case we only want literals, since those are only optimized
7042 when generating RTL, not later.
7043 And finally, if we are compiling an initializer, not code, we
7044 need to return a definite result now; there's not going to be any
7045 more optimization done. */
7046 if (TREE_SIDE_EFFECTS (arg)
7047 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7048 || POINTER_TYPE_P (TREE_TYPE (arg))
7049 || cfun == 0
7050 || folding_initializer
7051 || force_folding_builtin_constant_p)
7052 return integer_zero_node;
7053
7054 return NULL_TREE;
7055 }
7056
7057 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7058 return it as a truthvalue. */
7059
7060 static tree
7061 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7062 tree predictor)
7063 {
7064 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7065
7066 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7067 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7068 ret_type = TREE_TYPE (TREE_TYPE (fn));
7069 pred_type = TREE_VALUE (arg_types);
7070 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7071
7072 pred = fold_convert_loc (loc, pred_type, pred);
7073 expected = fold_convert_loc (loc, expected_type, expected);
7074 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7075 predictor);
7076
7077 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7078 build_int_cst (ret_type, 0));
7079 }
7080
7081 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7082 NULL_TREE if no simplification is possible. */
7083
7084 tree
7085 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7086 {
7087 tree inner, fndecl, inner_arg0;
7088 enum tree_code code;
7089
7090 /* Distribute the expected value over short-circuiting operators.
7091 See through the cast from truthvalue_type_node to long. */
7092 inner_arg0 = arg0;
7093 while (CONVERT_EXPR_P (inner_arg0)
7094 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7095 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7096 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7097
7098 /* If this is a builtin_expect within a builtin_expect keep the
7099 inner one. See through a comparison against a constant. It
7100 might have been added to create a thruthvalue. */
7101 inner = inner_arg0;
7102
7103 if (COMPARISON_CLASS_P (inner)
7104 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7105 inner = TREE_OPERAND (inner, 0);
7106
7107 if (TREE_CODE (inner) == CALL_EXPR
7108 && (fndecl = get_callee_fndecl (inner))
7109 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7110 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7111 return arg0;
7112
7113 inner = inner_arg0;
7114 code = TREE_CODE (inner);
7115 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7116 {
7117 tree op0 = TREE_OPERAND (inner, 0);
7118 tree op1 = TREE_OPERAND (inner, 1);
7119
7120 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7121 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7122 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7123
7124 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7125 }
7126
7127 /* If the argument isn't invariant then there's nothing else we can do. */
7128 if (!TREE_CONSTANT (inner_arg0))
7129 return NULL_TREE;
7130
7131 /* If we expect that a comparison against the argument will fold to
7132 a constant return the constant. In practice, this means a true
7133 constant or the address of a non-weak symbol. */
7134 inner = inner_arg0;
7135 STRIP_NOPS (inner);
7136 if (TREE_CODE (inner) == ADDR_EXPR)
7137 {
7138 do
7139 {
7140 inner = TREE_OPERAND (inner, 0);
7141 }
7142 while (TREE_CODE (inner) == COMPONENT_REF
7143 || TREE_CODE (inner) == ARRAY_REF);
7144 if ((TREE_CODE (inner) == VAR_DECL
7145 || TREE_CODE (inner) == FUNCTION_DECL)
7146 && DECL_WEAK (inner))
7147 return NULL_TREE;
7148 }
7149
7150 /* Otherwise, ARG0 already has the proper type for the return value. */
7151 return arg0;
7152 }
7153
7154 /* Fold a call to __builtin_classify_type with argument ARG. */
7155
7156 static tree
7157 fold_builtin_classify_type (tree arg)
7158 {
7159 if (arg == 0)
7160 return build_int_cst (integer_type_node, no_type_class);
7161
7162 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7163 }
7164
7165 /* Fold a call to __builtin_strlen with argument ARG. */
7166
7167 static tree
7168 fold_builtin_strlen (location_t loc, tree type, tree arg)
7169 {
7170 if (!validate_arg (arg, POINTER_TYPE))
7171 return NULL_TREE;
7172 else
7173 {
7174 tree len = c_strlen (arg, 0);
7175
7176 if (len)
7177 return fold_convert_loc (loc, type, len);
7178
7179 return NULL_TREE;
7180 }
7181 }
7182
7183 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7184
7185 static tree
7186 fold_builtin_inf (location_t loc, tree type, int warn)
7187 {
7188 REAL_VALUE_TYPE real;
7189
7190 /* __builtin_inff is intended to be usable to define INFINITY on all
7191 targets. If an infinity is not available, INFINITY expands "to a
7192 positive constant of type float that overflows at translation
7193 time", footnote "In this case, using INFINITY will violate the
7194 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7195 Thus we pedwarn to ensure this constraint violation is
7196 diagnosed. */
7197 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7198 pedwarn (loc, 0, "target format does not support infinity");
7199
7200 real_inf (&real);
7201 return build_real (type, real);
7202 }
7203
7204 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7205 NULL_TREE if no simplification can be made. */
7206
7207 static tree
7208 fold_builtin_sincos (location_t loc,
7209 tree arg0, tree arg1, tree arg2)
7210 {
7211 tree type;
7212 tree fndecl, call = NULL_TREE;
7213
7214 if (!validate_arg (arg0, REAL_TYPE)
7215 || !validate_arg (arg1, POINTER_TYPE)
7216 || !validate_arg (arg2, POINTER_TYPE))
7217 return NULL_TREE;
7218
7219 type = TREE_TYPE (arg0);
7220
7221 /* Calculate the result when the argument is a constant. */
7222 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7223 if (fn == END_BUILTINS)
7224 return NULL_TREE;
7225
7226 /* Canonicalize sincos to cexpi. */
7227 if (TREE_CODE (arg0) == REAL_CST)
7228 {
7229 tree complex_type = build_complex_type (type);
7230 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7231 }
7232 if (!call)
7233 {
7234 if (!targetm.libc_has_function (function_c99_math_complex)
7235 || !builtin_decl_implicit_p (fn))
7236 return NULL_TREE;
7237 fndecl = builtin_decl_explicit (fn);
7238 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7239 call = builtin_save_expr (call);
7240 }
7241
7242 return build2 (COMPOUND_EXPR, void_type_node,
7243 build2 (MODIFY_EXPR, void_type_node,
7244 build_fold_indirect_ref_loc (loc, arg1),
7245 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7246 build2 (MODIFY_EXPR, void_type_node,
7247 build_fold_indirect_ref_loc (loc, arg2),
7248 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7249 }
7250
7251 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7252 arguments to the call, and TYPE is its return type.
7253 Return NULL_TREE if no simplification can be made. */
7254
7255 static tree
7256 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7257 {
7258 if (!validate_arg (arg1, POINTER_TYPE)
7259 || !validate_arg (arg2, INTEGER_TYPE)
7260 || !validate_arg (len, INTEGER_TYPE))
7261 return NULL_TREE;
7262 else
7263 {
7264 const char *p1;
7265
7266 if (TREE_CODE (arg2) != INTEGER_CST
7267 || !tree_fits_uhwi_p (len))
7268 return NULL_TREE;
7269
7270 p1 = c_getstr (arg1);
7271 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7272 {
7273 char c;
7274 const char *r;
7275 tree tem;
7276
7277 if (target_char_cast (arg2, &c))
7278 return NULL_TREE;
7279
7280 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7281
7282 if (r == NULL)
7283 return build_int_cst (TREE_TYPE (arg1), 0);
7284
7285 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7286 return fold_convert_loc (loc, type, tem);
7287 }
7288 return NULL_TREE;
7289 }
7290 }
7291
7292 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7293 Return NULL_TREE if no simplification can be made. */
7294
7295 static tree
7296 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7297 {
7298 if (!validate_arg (arg1, POINTER_TYPE)
7299 || !validate_arg (arg2, POINTER_TYPE)
7300 || !validate_arg (len, INTEGER_TYPE))
7301 return NULL_TREE;
7302
7303 /* If the LEN parameter is zero, return zero. */
7304 if (integer_zerop (len))
7305 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7306 arg1, arg2);
7307
7308 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7309 if (operand_equal_p (arg1, arg2, 0))
7310 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7311
7312 /* If len parameter is one, return an expression corresponding to
7313 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7314 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7315 {
7316 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7317 tree cst_uchar_ptr_node
7318 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7319
7320 tree ind1
7321 = fold_convert_loc (loc, integer_type_node,
7322 build1 (INDIRECT_REF, cst_uchar_node,
7323 fold_convert_loc (loc,
7324 cst_uchar_ptr_node,
7325 arg1)));
7326 tree ind2
7327 = fold_convert_loc (loc, integer_type_node,
7328 build1 (INDIRECT_REF, cst_uchar_node,
7329 fold_convert_loc (loc,
7330 cst_uchar_ptr_node,
7331 arg2)));
7332 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7333 }
7334
7335 return NULL_TREE;
7336 }
7337
7338 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7339 Return NULL_TREE if no simplification can be made. */
7340
7341 static tree
7342 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7343 {
7344 if (!validate_arg (arg1, POINTER_TYPE)
7345 || !validate_arg (arg2, POINTER_TYPE))
7346 return NULL_TREE;
7347
7348 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7349 if (operand_equal_p (arg1, arg2, 0))
7350 return integer_zero_node;
7351
7352 /* If the second arg is "", return *(const unsigned char*)arg1. */
7353 const char *p2 = c_getstr (arg2);
7354 if (p2 && *p2 == '\0')
7355 {
7356 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7357 tree cst_uchar_ptr_node
7358 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7359
7360 return fold_convert_loc (loc, integer_type_node,
7361 build1 (INDIRECT_REF, cst_uchar_node,
7362 fold_convert_loc (loc,
7363 cst_uchar_ptr_node,
7364 arg1)));
7365 }
7366
7367 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7368 const char *p1 = c_getstr (arg1);
7369 if (p1 && *p1 == '\0')
7370 {
7371 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7372 tree cst_uchar_ptr_node
7373 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7374
7375 tree temp
7376 = fold_convert_loc (loc, integer_type_node,
7377 build1 (INDIRECT_REF, cst_uchar_node,
7378 fold_convert_loc (loc,
7379 cst_uchar_ptr_node,
7380 arg2)));
7381 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7382 }
7383
7384 return NULL_TREE;
7385 }
7386
7387 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7388 Return NULL_TREE if no simplification can be made. */
7389
7390 static tree
7391 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7392 {
7393 if (!validate_arg (arg1, POINTER_TYPE)
7394 || !validate_arg (arg2, POINTER_TYPE)
7395 || !validate_arg (len, INTEGER_TYPE))
7396 return NULL_TREE;
7397
7398 /* If the LEN parameter is zero, return zero. */
7399 if (integer_zerop (len))
7400 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7401 arg1, arg2);
7402
7403 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7404 if (operand_equal_p (arg1, arg2, 0))
7405 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7406
7407 /* If the second arg is "", and the length is greater than zero,
7408 return *(const unsigned char*)arg1. */
7409 const char *p2 = c_getstr (arg2);
7410 if (p2 && *p2 == '\0'
7411 && TREE_CODE (len) == INTEGER_CST
7412 && tree_int_cst_sgn (len) == 1)
7413 {
7414 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7415 tree cst_uchar_ptr_node
7416 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7417
7418 return fold_convert_loc (loc, integer_type_node,
7419 build1 (INDIRECT_REF, cst_uchar_node,
7420 fold_convert_loc (loc,
7421 cst_uchar_ptr_node,
7422 arg1)));
7423 }
7424
7425 /* If the first arg is "", and the length is greater than zero,
7426 return -*(const unsigned char*)arg2. */
7427 const char *p1 = c_getstr (arg1);
7428 if (p1 && *p1 == '\0'
7429 && TREE_CODE (len) == INTEGER_CST
7430 && tree_int_cst_sgn (len) == 1)
7431 {
7432 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7433 tree cst_uchar_ptr_node
7434 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7435
7436 tree temp = fold_convert_loc (loc, integer_type_node,
7437 build1 (INDIRECT_REF, cst_uchar_node,
7438 fold_convert_loc (loc,
7439 cst_uchar_ptr_node,
7440 arg2)));
7441 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7442 }
7443
7444 /* If len parameter is one, return an expression corresponding to
7445 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7446 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7447 {
7448 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7449 tree cst_uchar_ptr_node
7450 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7451
7452 tree ind1 = fold_convert_loc (loc, integer_type_node,
7453 build1 (INDIRECT_REF, cst_uchar_node,
7454 fold_convert_loc (loc,
7455 cst_uchar_ptr_node,
7456 arg1)));
7457 tree ind2 = fold_convert_loc (loc, integer_type_node,
7458 build1 (INDIRECT_REF, cst_uchar_node,
7459 fold_convert_loc (loc,
7460 cst_uchar_ptr_node,
7461 arg2)));
7462 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7463 }
7464
7465 return NULL_TREE;
7466 }
7467
7468 /* Fold a call to builtin isascii with argument ARG. */
7469
7470 static tree
7471 fold_builtin_isascii (location_t loc, tree arg)
7472 {
7473 if (!validate_arg (arg, INTEGER_TYPE))
7474 return NULL_TREE;
7475 else
7476 {
7477 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7478 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7479 build_int_cst (integer_type_node,
7480 ~ (unsigned HOST_WIDE_INT) 0x7f));
7481 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7482 arg, integer_zero_node);
7483 }
7484 }
7485
7486 /* Fold a call to builtin toascii with argument ARG. */
7487
7488 static tree
7489 fold_builtin_toascii (location_t loc, tree arg)
7490 {
7491 if (!validate_arg (arg, INTEGER_TYPE))
7492 return NULL_TREE;
7493
7494 /* Transform toascii(c) -> (c & 0x7f). */
7495 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7496 build_int_cst (integer_type_node, 0x7f));
7497 }
7498
7499 /* Fold a call to builtin isdigit with argument ARG. */
7500
7501 static tree
7502 fold_builtin_isdigit (location_t loc, tree arg)
7503 {
7504 if (!validate_arg (arg, INTEGER_TYPE))
7505 return NULL_TREE;
7506 else
7507 {
7508 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7509 /* According to the C standard, isdigit is unaffected by locale.
7510 However, it definitely is affected by the target character set. */
7511 unsigned HOST_WIDE_INT target_digit0
7512 = lang_hooks.to_target_charset ('0');
7513
7514 if (target_digit0 == 0)
7515 return NULL_TREE;
7516
7517 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7518 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7519 build_int_cst (unsigned_type_node, target_digit0));
7520 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7521 build_int_cst (unsigned_type_node, 9));
7522 }
7523 }
7524
7525 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7526
7527 static tree
7528 fold_builtin_fabs (location_t loc, tree arg, tree type)
7529 {
7530 if (!validate_arg (arg, REAL_TYPE))
7531 return NULL_TREE;
7532
7533 arg = fold_convert_loc (loc, type, arg);
7534 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7535 }
7536
7537 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7538
7539 static tree
7540 fold_builtin_abs (location_t loc, tree arg, tree type)
7541 {
7542 if (!validate_arg (arg, INTEGER_TYPE))
7543 return NULL_TREE;
7544
7545 arg = fold_convert_loc (loc, type, arg);
7546 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7547 }
7548
7549 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7550
7551 static tree
7552 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7553 {
7554 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7555 if (validate_arg (arg0, REAL_TYPE)
7556 && validate_arg (arg1, REAL_TYPE)
7557 && validate_arg (arg2, REAL_TYPE)
7558 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7559 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7560
7561 return NULL_TREE;
7562 }
7563
7564 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7565
7566 static tree
7567 fold_builtin_carg (location_t loc, tree arg, tree type)
7568 {
7569 if (validate_arg (arg, COMPLEX_TYPE)
7570 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7571 {
7572 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7573
7574 if (atan2_fn)
7575 {
7576 tree new_arg = builtin_save_expr (arg);
7577 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7578 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7579 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7580 }
7581 }
7582
7583 return NULL_TREE;
7584 }
7585
7586 /* Fold a call to builtin frexp, we can assume the base is 2. */
7587
7588 static tree
7589 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7590 {
7591 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7592 return NULL_TREE;
7593
7594 STRIP_NOPS (arg0);
7595
7596 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7597 return NULL_TREE;
7598
7599 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7600
7601 /* Proceed if a valid pointer type was passed in. */
7602 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7603 {
7604 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7605 tree frac, exp;
7606
7607 switch (value->cl)
7608 {
7609 case rvc_zero:
7610 /* For +-0, return (*exp = 0, +-0). */
7611 exp = integer_zero_node;
7612 frac = arg0;
7613 break;
7614 case rvc_nan:
7615 case rvc_inf:
7616 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7617 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7618 case rvc_normal:
7619 {
7620 /* Since the frexp function always expects base 2, and in
7621 GCC normalized significands are already in the range
7622 [0.5, 1.0), we have exactly what frexp wants. */
7623 REAL_VALUE_TYPE frac_rvt = *value;
7624 SET_REAL_EXP (&frac_rvt, 0);
7625 frac = build_real (rettype, frac_rvt);
7626 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7627 }
7628 break;
7629 default:
7630 gcc_unreachable ();
7631 }
7632
7633 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7634 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7635 TREE_SIDE_EFFECTS (arg1) = 1;
7636 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7637 }
7638
7639 return NULL_TREE;
7640 }
7641
7642 /* Fold a call to builtin modf. */
7643
7644 static tree
7645 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7646 {
7647 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7648 return NULL_TREE;
7649
7650 STRIP_NOPS (arg0);
7651
7652 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7653 return NULL_TREE;
7654
7655 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7656
7657 /* Proceed if a valid pointer type was passed in. */
7658 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7659 {
7660 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7661 REAL_VALUE_TYPE trunc, frac;
7662
7663 switch (value->cl)
7664 {
7665 case rvc_nan:
7666 case rvc_zero:
7667 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7668 trunc = frac = *value;
7669 break;
7670 case rvc_inf:
7671 /* For +-Inf, return (*arg1 = arg0, +-0). */
7672 frac = dconst0;
7673 frac.sign = value->sign;
7674 trunc = *value;
7675 break;
7676 case rvc_normal:
7677 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7678 real_trunc (&trunc, VOIDmode, value);
7679 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7680 /* If the original number was negative and already
7681 integral, then the fractional part is -0.0. */
7682 if (value->sign && frac.cl == rvc_zero)
7683 frac.sign = value->sign;
7684 break;
7685 }
7686
7687 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7688 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7689 build_real (rettype, trunc));
7690 TREE_SIDE_EFFECTS (arg1) = 1;
7691 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7692 build_real (rettype, frac));
7693 }
7694
7695 return NULL_TREE;
7696 }
7697
7698 /* Given a location LOC, an interclass builtin function decl FNDECL
7699 and its single argument ARG, return an folded expression computing
7700 the same, or NULL_TREE if we either couldn't or didn't want to fold
7701 (the latter happen if there's an RTL instruction available). */
7702
7703 static tree
7704 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7705 {
7706 machine_mode mode;
7707
7708 if (!validate_arg (arg, REAL_TYPE))
7709 return NULL_TREE;
7710
7711 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7712 return NULL_TREE;
7713
7714 mode = TYPE_MODE (TREE_TYPE (arg));
7715
7716 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7717
7718 /* If there is no optab, try generic code. */
7719 switch (DECL_FUNCTION_CODE (fndecl))
7720 {
7721 tree result;
7722
7723 CASE_FLT_FN (BUILT_IN_ISINF):
7724 {
7725 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7726 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7727 tree type = TREE_TYPE (arg);
7728 REAL_VALUE_TYPE r;
7729 char buf[128];
7730
7731 if (is_ibm_extended)
7732 {
7733 /* NaN and Inf are encoded in the high-order double value
7734 only. The low-order value is not significant. */
7735 type = double_type_node;
7736 mode = DFmode;
7737 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7738 }
7739 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7740 real_from_string (&r, buf);
7741 result = build_call_expr (isgr_fn, 2,
7742 fold_build1_loc (loc, ABS_EXPR, type, arg),
7743 build_real (type, r));
7744 return result;
7745 }
7746 CASE_FLT_FN (BUILT_IN_FINITE):
7747 case BUILT_IN_ISFINITE:
7748 {
7749 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7750 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7751 tree type = TREE_TYPE (arg);
7752 REAL_VALUE_TYPE r;
7753 char buf[128];
7754
7755 if (is_ibm_extended)
7756 {
7757 /* NaN and Inf are encoded in the high-order double value
7758 only. The low-order value is not significant. */
7759 type = double_type_node;
7760 mode = DFmode;
7761 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7762 }
7763 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7764 real_from_string (&r, buf);
7765 result = build_call_expr (isle_fn, 2,
7766 fold_build1_loc (loc, ABS_EXPR, type, arg),
7767 build_real (type, r));
7768 /*result = fold_build2_loc (loc, UNGT_EXPR,
7769 TREE_TYPE (TREE_TYPE (fndecl)),
7770 fold_build1_loc (loc, ABS_EXPR, type, arg),
7771 build_real (type, r));
7772 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7773 TREE_TYPE (TREE_TYPE (fndecl)),
7774 result);*/
7775 return result;
7776 }
7777 case BUILT_IN_ISNORMAL:
7778 {
7779 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7780 islessequal(fabs(x),DBL_MAX). */
7781 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7782 tree type = TREE_TYPE (arg);
7783 tree orig_arg, max_exp, min_exp;
7784 machine_mode orig_mode = mode;
7785 REAL_VALUE_TYPE rmax, rmin;
7786 char buf[128];
7787
7788 orig_arg = arg = builtin_save_expr (arg);
7789 if (is_ibm_extended)
7790 {
7791 /* Use double to test the normal range of IBM extended
7792 precision. Emin for IBM extended precision is
7793 different to emin for IEEE double, being 53 higher
7794 since the low double exponent is at least 53 lower
7795 than the high double exponent. */
7796 type = double_type_node;
7797 mode = DFmode;
7798 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7799 }
7800 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7801
7802 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7803 real_from_string (&rmax, buf);
7804 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7805 real_from_string (&rmin, buf);
7806 max_exp = build_real (type, rmax);
7807 min_exp = build_real (type, rmin);
7808
7809 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7810 if (is_ibm_extended)
7811 {
7812 /* Testing the high end of the range is done just using
7813 the high double, using the same test as isfinite().
7814 For the subnormal end of the range we first test the
7815 high double, then if its magnitude is equal to the
7816 limit of 0x1p-969, we test whether the low double is
7817 non-zero and opposite sign to the high double. */
7818 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7819 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7820 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7821 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7822 arg, min_exp);
7823 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7824 complex_double_type_node, orig_arg);
7825 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7826 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7827 tree zero = build_real (type, dconst0);
7828 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7829 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7830 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7831 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7832 fold_build3 (COND_EXPR,
7833 integer_type_node,
7834 hilt, logt, lolt));
7835 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7836 eq_min, ok_lo);
7837 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7838 gt_min, eq_min);
7839 }
7840 else
7841 {
7842 tree const isge_fn
7843 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7844 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7845 }
7846 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7847 max_exp, min_exp);
7848 return result;
7849 }
7850 default:
7851 break;
7852 }
7853
7854 return NULL_TREE;
7855 }
7856
7857 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7858 ARG is the argument for the call. */
7859
7860 static tree
7861 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7862 {
7863 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7864
7865 if (!validate_arg (arg, REAL_TYPE))
7866 return NULL_TREE;
7867
7868 switch (builtin_index)
7869 {
7870 case BUILT_IN_ISINF:
7871 if (!HONOR_INFINITIES (arg))
7872 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7873
7874 return NULL_TREE;
7875
7876 case BUILT_IN_ISINF_SIGN:
7877 {
7878 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7879 /* In a boolean context, GCC will fold the inner COND_EXPR to
7880 1. So e.g. "if (isinf_sign(x))" would be folded to just
7881 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7882 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7883 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7884 tree tmp = NULL_TREE;
7885
7886 arg = builtin_save_expr (arg);
7887
7888 if (signbit_fn && isinf_fn)
7889 {
7890 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7891 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7892
7893 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7894 signbit_call, integer_zero_node);
7895 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7896 isinf_call, integer_zero_node);
7897
7898 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7899 integer_minus_one_node, integer_one_node);
7900 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7901 isinf_call, tmp,
7902 integer_zero_node);
7903 }
7904
7905 return tmp;
7906 }
7907
7908 case BUILT_IN_ISFINITE:
7909 if (!HONOR_NANS (arg)
7910 && !HONOR_INFINITIES (arg))
7911 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7912
7913 return NULL_TREE;
7914
7915 case BUILT_IN_ISNAN:
7916 if (!HONOR_NANS (arg))
7917 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7918
7919 {
7920 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7921 if (is_ibm_extended)
7922 {
7923 /* NaN and Inf are encoded in the high-order double value
7924 only. The low-order value is not significant. */
7925 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7926 }
7927 }
7928 arg = builtin_save_expr (arg);
7929 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7930
7931 default:
7932 gcc_unreachable ();
7933 }
7934 }
7935
7936 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7937 This builtin will generate code to return the appropriate floating
7938 point classification depending on the value of the floating point
7939 number passed in. The possible return values must be supplied as
7940 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7941 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7942 one floating point argument which is "type generic". */
7943
7944 static tree
7945 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7946 {
7947 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7948 arg, type, res, tmp;
7949 machine_mode mode;
7950 REAL_VALUE_TYPE r;
7951 char buf[128];
7952
7953 /* Verify the required arguments in the original call. */
7954 if (nargs != 6
7955 || !validate_arg (args[0], INTEGER_TYPE)
7956 || !validate_arg (args[1], INTEGER_TYPE)
7957 || !validate_arg (args[2], INTEGER_TYPE)
7958 || !validate_arg (args[3], INTEGER_TYPE)
7959 || !validate_arg (args[4], INTEGER_TYPE)
7960 || !validate_arg (args[5], REAL_TYPE))
7961 return NULL_TREE;
7962
7963 fp_nan = args[0];
7964 fp_infinite = args[1];
7965 fp_normal = args[2];
7966 fp_subnormal = args[3];
7967 fp_zero = args[4];
7968 arg = args[5];
7969 type = TREE_TYPE (arg);
7970 mode = TYPE_MODE (type);
7971 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7972
7973 /* fpclassify(x) ->
7974 isnan(x) ? FP_NAN :
7975 (fabs(x) == Inf ? FP_INFINITE :
7976 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7977 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7978
7979 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7980 build_real (type, dconst0));
7981 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7982 tmp, fp_zero, fp_subnormal);
7983
7984 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7985 real_from_string (&r, buf);
7986 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7987 arg, build_real (type, r));
7988 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7989
7990 if (HONOR_INFINITIES (mode))
7991 {
7992 real_inf (&r);
7993 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7994 build_real (type, r));
7995 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7996 fp_infinite, res);
7997 }
7998
7999 if (HONOR_NANS (mode))
8000 {
8001 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8002 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8003 }
8004
8005 return res;
8006 }
8007
8008 /* Fold a call to an unordered comparison function such as
8009 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8010 being called and ARG0 and ARG1 are the arguments for the call.
8011 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8012 the opposite of the desired result. UNORDERED_CODE is used
8013 for modes that can hold NaNs and ORDERED_CODE is used for
8014 the rest. */
8015
8016 static tree
8017 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8018 enum tree_code unordered_code,
8019 enum tree_code ordered_code)
8020 {
8021 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8022 enum tree_code code;
8023 tree type0, type1;
8024 enum tree_code code0, code1;
8025 tree cmp_type = NULL_TREE;
8026
8027 type0 = TREE_TYPE (arg0);
8028 type1 = TREE_TYPE (arg1);
8029
8030 code0 = TREE_CODE (type0);
8031 code1 = TREE_CODE (type1);
8032
8033 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8034 /* Choose the wider of two real types. */
8035 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8036 ? type0 : type1;
8037 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8038 cmp_type = type0;
8039 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8040 cmp_type = type1;
8041
8042 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8043 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8044
8045 if (unordered_code == UNORDERED_EXPR)
8046 {
8047 if (!HONOR_NANS (arg0))
8048 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8049 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8050 }
8051
8052 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8053 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8054 fold_build2_loc (loc, code, type, arg0, arg1));
8055 }
8056
8057 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8058 arithmetics if it can never overflow, or into internal functions that
8059 return both result of arithmetics and overflowed boolean flag in
8060 a complex integer result, or some other check for overflow.
8061 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8062 checking part of that. */
8063
8064 static tree
8065 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8066 tree arg0, tree arg1, tree arg2)
8067 {
8068 enum internal_fn ifn = IFN_LAST;
8069 /* The code of the expression corresponding to the type-generic
8070 built-in, or ERROR_MARK for the type-specific ones. */
8071 enum tree_code opcode = ERROR_MARK;
8072 bool ovf_only = false;
8073
8074 switch (fcode)
8075 {
8076 case BUILT_IN_ADD_OVERFLOW_P:
8077 ovf_only = true;
8078 /* FALLTHRU */
8079 case BUILT_IN_ADD_OVERFLOW:
8080 opcode = PLUS_EXPR;
8081 /* FALLTHRU */
8082 case BUILT_IN_SADD_OVERFLOW:
8083 case BUILT_IN_SADDL_OVERFLOW:
8084 case BUILT_IN_SADDLL_OVERFLOW:
8085 case BUILT_IN_UADD_OVERFLOW:
8086 case BUILT_IN_UADDL_OVERFLOW:
8087 case BUILT_IN_UADDLL_OVERFLOW:
8088 ifn = IFN_ADD_OVERFLOW;
8089 break;
8090 case BUILT_IN_SUB_OVERFLOW_P:
8091 ovf_only = true;
8092 /* FALLTHRU */
8093 case BUILT_IN_SUB_OVERFLOW:
8094 opcode = MINUS_EXPR;
8095 /* FALLTHRU */
8096 case BUILT_IN_SSUB_OVERFLOW:
8097 case BUILT_IN_SSUBL_OVERFLOW:
8098 case BUILT_IN_SSUBLL_OVERFLOW:
8099 case BUILT_IN_USUB_OVERFLOW:
8100 case BUILT_IN_USUBL_OVERFLOW:
8101 case BUILT_IN_USUBLL_OVERFLOW:
8102 ifn = IFN_SUB_OVERFLOW;
8103 break;
8104 case BUILT_IN_MUL_OVERFLOW_P:
8105 ovf_only = true;
8106 /* FALLTHRU */
8107 case BUILT_IN_MUL_OVERFLOW:
8108 opcode = MULT_EXPR;
8109 /* FALLTHRU */
8110 case BUILT_IN_SMUL_OVERFLOW:
8111 case BUILT_IN_SMULL_OVERFLOW:
8112 case BUILT_IN_SMULLL_OVERFLOW:
8113 case BUILT_IN_UMUL_OVERFLOW:
8114 case BUILT_IN_UMULL_OVERFLOW:
8115 case BUILT_IN_UMULLL_OVERFLOW:
8116 ifn = IFN_MUL_OVERFLOW;
8117 break;
8118 default:
8119 gcc_unreachable ();
8120 }
8121
8122 /* For the "generic" overloads, the first two arguments can have different
8123 types and the last argument determines the target type to use to check
8124 for overflow. The arguments of the other overloads all have the same
8125 type. */
8126 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8127
8128 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8129 arguments are constant, attempt to fold the built-in call into a constant
8130 expression indicating whether or not it detected an overflow. */
8131 if (ovf_only
8132 && TREE_CODE (arg0) == INTEGER_CST
8133 && TREE_CODE (arg1) == INTEGER_CST)
8134 /* Perform the computation in the target type and check for overflow. */
8135 return omit_one_operand_loc (loc, boolean_type_node,
8136 arith_overflowed_p (opcode, type, arg0, arg1)
8137 ? boolean_true_node : boolean_false_node,
8138 arg2);
8139
8140 tree ctype = build_complex_type (type);
8141 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8142 2, arg0, arg1);
8143 tree tgt = save_expr (call);
8144 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8145 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8146 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8147
8148 if (ovf_only)
8149 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8150
8151 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8152 tree store
8153 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8154 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8155 }
8156
8157 /* Fold a call to __builtin_FILE to a constant string. */
8158
8159 static inline tree
8160 fold_builtin_FILE (location_t loc)
8161 {
8162 if (const char *fname = LOCATION_FILE (loc))
8163 return build_string_literal (strlen (fname) + 1, fname);
8164
8165 return build_string_literal (1, "");
8166 }
8167
8168 /* Fold a call to __builtin_FUNCTION to a constant string. */
8169
8170 static inline tree
8171 fold_builtin_FUNCTION ()
8172 {
8173 if (current_function_decl)
8174 {
8175 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8176 return build_string_literal (strlen (name) + 1, name);
8177 }
8178
8179 return build_string_literal (1, "");
8180 }
8181
8182 /* Fold a call to __builtin_LINE to an integer constant. */
8183
8184 static inline tree
8185 fold_builtin_LINE (location_t loc, tree type)
8186 {
8187 return build_int_cst (type, LOCATION_LINE (loc));
8188 }
8189
8190 /* Fold a call to built-in function FNDECL with 0 arguments.
8191 This function returns NULL_TREE if no simplification was possible. */
8192
8193 static tree
8194 fold_builtin_0 (location_t loc, tree fndecl)
8195 {
8196 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8197 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8198 switch (fcode)
8199 {
8200 case BUILT_IN_FILE:
8201 return fold_builtin_FILE (loc);
8202
8203 case BUILT_IN_FUNCTION:
8204 return fold_builtin_FUNCTION ();
8205
8206 case BUILT_IN_LINE:
8207 return fold_builtin_LINE (loc, type);
8208
8209 CASE_FLT_FN (BUILT_IN_INF):
8210 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8211 case BUILT_IN_INFD32:
8212 case BUILT_IN_INFD64:
8213 case BUILT_IN_INFD128:
8214 return fold_builtin_inf (loc, type, true);
8215
8216 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8217 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8218 return fold_builtin_inf (loc, type, false);
8219
8220 case BUILT_IN_CLASSIFY_TYPE:
8221 return fold_builtin_classify_type (NULL_TREE);
8222
8223 default:
8224 break;
8225 }
8226 return NULL_TREE;
8227 }
8228
8229 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8230 This function returns NULL_TREE if no simplification was possible. */
8231
8232 static tree
8233 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8234 {
8235 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8236 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8237
8238 if (TREE_CODE (arg0) == ERROR_MARK)
8239 return NULL_TREE;
8240
8241 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8242 return ret;
8243
8244 switch (fcode)
8245 {
8246 case BUILT_IN_CONSTANT_P:
8247 {
8248 tree val = fold_builtin_constant_p (arg0);
8249
8250 /* Gimplification will pull the CALL_EXPR for the builtin out of
8251 an if condition. When not optimizing, we'll not CSE it back.
8252 To avoid link error types of regressions, return false now. */
8253 if (!val && !optimize)
8254 val = integer_zero_node;
8255
8256 return val;
8257 }
8258
8259 case BUILT_IN_CLASSIFY_TYPE:
8260 return fold_builtin_classify_type (arg0);
8261
8262 case BUILT_IN_STRLEN:
8263 return fold_builtin_strlen (loc, type, arg0);
8264
8265 CASE_FLT_FN (BUILT_IN_FABS):
8266 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8267 case BUILT_IN_FABSD32:
8268 case BUILT_IN_FABSD64:
8269 case BUILT_IN_FABSD128:
8270 return fold_builtin_fabs (loc, arg0, type);
8271
8272 case BUILT_IN_ABS:
8273 case BUILT_IN_LABS:
8274 case BUILT_IN_LLABS:
8275 case BUILT_IN_IMAXABS:
8276 return fold_builtin_abs (loc, arg0, type);
8277
8278 CASE_FLT_FN (BUILT_IN_CONJ):
8279 if (validate_arg (arg0, COMPLEX_TYPE)
8280 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8281 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8282 break;
8283
8284 CASE_FLT_FN (BUILT_IN_CREAL):
8285 if (validate_arg (arg0, COMPLEX_TYPE)
8286 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8287 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8288 break;
8289
8290 CASE_FLT_FN (BUILT_IN_CIMAG):
8291 if (validate_arg (arg0, COMPLEX_TYPE)
8292 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8293 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8294 break;
8295
8296 CASE_FLT_FN (BUILT_IN_CARG):
8297 return fold_builtin_carg (loc, arg0, type);
8298
8299 case BUILT_IN_ISASCII:
8300 return fold_builtin_isascii (loc, arg0);
8301
8302 case BUILT_IN_TOASCII:
8303 return fold_builtin_toascii (loc, arg0);
8304
8305 case BUILT_IN_ISDIGIT:
8306 return fold_builtin_isdigit (loc, arg0);
8307
8308 CASE_FLT_FN (BUILT_IN_FINITE):
8309 case BUILT_IN_FINITED32:
8310 case BUILT_IN_FINITED64:
8311 case BUILT_IN_FINITED128:
8312 case BUILT_IN_ISFINITE:
8313 {
8314 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8315 if (ret)
8316 return ret;
8317 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8318 }
8319
8320 CASE_FLT_FN (BUILT_IN_ISINF):
8321 case BUILT_IN_ISINFD32:
8322 case BUILT_IN_ISINFD64:
8323 case BUILT_IN_ISINFD128:
8324 {
8325 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8326 if (ret)
8327 return ret;
8328 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8329 }
8330
8331 case BUILT_IN_ISNORMAL:
8332 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8333
8334 case BUILT_IN_ISINF_SIGN:
8335 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8336
8337 CASE_FLT_FN (BUILT_IN_ISNAN):
8338 case BUILT_IN_ISNAND32:
8339 case BUILT_IN_ISNAND64:
8340 case BUILT_IN_ISNAND128:
8341 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8342
8343 case BUILT_IN_FREE:
8344 if (integer_zerop (arg0))
8345 return build_empty_stmt (loc);
8346 break;
8347
8348 default:
8349 break;
8350 }
8351
8352 return NULL_TREE;
8353
8354 }
8355
8356 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8357 This function returns NULL_TREE if no simplification was possible. */
8358
8359 static tree
8360 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8361 {
8362 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8363 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8364
8365 if (TREE_CODE (arg0) == ERROR_MARK
8366 || TREE_CODE (arg1) == ERROR_MARK)
8367 return NULL_TREE;
8368
8369 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8370 return ret;
8371
8372 switch (fcode)
8373 {
8374 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8375 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8376 if (validate_arg (arg0, REAL_TYPE)
8377 && validate_arg (arg1, POINTER_TYPE))
8378 return do_mpfr_lgamma_r (arg0, arg1, type);
8379 break;
8380
8381 CASE_FLT_FN (BUILT_IN_FREXP):
8382 return fold_builtin_frexp (loc, arg0, arg1, type);
8383
8384 CASE_FLT_FN (BUILT_IN_MODF):
8385 return fold_builtin_modf (loc, arg0, arg1, type);
8386
8387 case BUILT_IN_STRSTR:
8388 return fold_builtin_strstr (loc, arg0, arg1, type);
8389
8390 case BUILT_IN_STRSPN:
8391 return fold_builtin_strspn (loc, arg0, arg1);
8392
8393 case BUILT_IN_STRCSPN:
8394 return fold_builtin_strcspn (loc, arg0, arg1);
8395
8396 case BUILT_IN_STRCHR:
8397 case BUILT_IN_INDEX:
8398 return fold_builtin_strchr (loc, arg0, arg1, type);
8399
8400 case BUILT_IN_STRRCHR:
8401 case BUILT_IN_RINDEX:
8402 return fold_builtin_strrchr (loc, arg0, arg1, type);
8403
8404 case BUILT_IN_STRCMP:
8405 return fold_builtin_strcmp (loc, arg0, arg1);
8406
8407 case BUILT_IN_STRPBRK:
8408 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8409
8410 case BUILT_IN_EXPECT:
8411 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8412
8413 case BUILT_IN_ISGREATER:
8414 return fold_builtin_unordered_cmp (loc, fndecl,
8415 arg0, arg1, UNLE_EXPR, LE_EXPR);
8416 case BUILT_IN_ISGREATEREQUAL:
8417 return fold_builtin_unordered_cmp (loc, fndecl,
8418 arg0, arg1, UNLT_EXPR, LT_EXPR);
8419 case BUILT_IN_ISLESS:
8420 return fold_builtin_unordered_cmp (loc, fndecl,
8421 arg0, arg1, UNGE_EXPR, GE_EXPR);
8422 case BUILT_IN_ISLESSEQUAL:
8423 return fold_builtin_unordered_cmp (loc, fndecl,
8424 arg0, arg1, UNGT_EXPR, GT_EXPR);
8425 case BUILT_IN_ISLESSGREATER:
8426 return fold_builtin_unordered_cmp (loc, fndecl,
8427 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8428 case BUILT_IN_ISUNORDERED:
8429 return fold_builtin_unordered_cmp (loc, fndecl,
8430 arg0, arg1, UNORDERED_EXPR,
8431 NOP_EXPR);
8432
8433 /* We do the folding for va_start in the expander. */
8434 case BUILT_IN_VA_START:
8435 break;
8436
8437 case BUILT_IN_OBJECT_SIZE:
8438 return fold_builtin_object_size (arg0, arg1);
8439
8440 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8441 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8442
8443 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8444 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8445
8446 default:
8447 break;
8448 }
8449 return NULL_TREE;
8450 }
8451
8452 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8453 and ARG2.
8454 This function returns NULL_TREE if no simplification was possible. */
8455
8456 static tree
8457 fold_builtin_3 (location_t loc, tree fndecl,
8458 tree arg0, tree arg1, tree arg2)
8459 {
8460 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8461 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8462
8463 if (TREE_CODE (arg0) == ERROR_MARK
8464 || TREE_CODE (arg1) == ERROR_MARK
8465 || TREE_CODE (arg2) == ERROR_MARK)
8466 return NULL_TREE;
8467
8468 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8469 arg0, arg1, arg2))
8470 return ret;
8471
8472 switch (fcode)
8473 {
8474
8475 CASE_FLT_FN (BUILT_IN_SINCOS):
8476 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8477
8478 CASE_FLT_FN (BUILT_IN_FMA):
8479 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8480
8481 CASE_FLT_FN (BUILT_IN_REMQUO):
8482 if (validate_arg (arg0, REAL_TYPE)
8483 && validate_arg (arg1, REAL_TYPE)
8484 && validate_arg (arg2, POINTER_TYPE))
8485 return do_mpfr_remquo (arg0, arg1, arg2);
8486 break;
8487
8488 case BUILT_IN_STRNCMP:
8489 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8490
8491 case BUILT_IN_MEMCHR:
8492 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8493
8494 case BUILT_IN_BCMP:
8495 case BUILT_IN_MEMCMP:
8496 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8497
8498 case BUILT_IN_EXPECT:
8499 return fold_builtin_expect (loc, arg0, arg1, arg2);
8500
8501 case BUILT_IN_ADD_OVERFLOW:
8502 case BUILT_IN_SUB_OVERFLOW:
8503 case BUILT_IN_MUL_OVERFLOW:
8504 case BUILT_IN_ADD_OVERFLOW_P:
8505 case BUILT_IN_SUB_OVERFLOW_P:
8506 case BUILT_IN_MUL_OVERFLOW_P:
8507 case BUILT_IN_SADD_OVERFLOW:
8508 case BUILT_IN_SADDL_OVERFLOW:
8509 case BUILT_IN_SADDLL_OVERFLOW:
8510 case BUILT_IN_SSUB_OVERFLOW:
8511 case BUILT_IN_SSUBL_OVERFLOW:
8512 case BUILT_IN_SSUBLL_OVERFLOW:
8513 case BUILT_IN_SMUL_OVERFLOW:
8514 case BUILT_IN_SMULL_OVERFLOW:
8515 case BUILT_IN_SMULLL_OVERFLOW:
8516 case BUILT_IN_UADD_OVERFLOW:
8517 case BUILT_IN_UADDL_OVERFLOW:
8518 case BUILT_IN_UADDLL_OVERFLOW:
8519 case BUILT_IN_USUB_OVERFLOW:
8520 case BUILT_IN_USUBL_OVERFLOW:
8521 case BUILT_IN_USUBLL_OVERFLOW:
8522 case BUILT_IN_UMUL_OVERFLOW:
8523 case BUILT_IN_UMULL_OVERFLOW:
8524 case BUILT_IN_UMULLL_OVERFLOW:
8525 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8526
8527 default:
8528 break;
8529 }
8530 return NULL_TREE;
8531 }
8532
8533 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8534 arguments. IGNORE is true if the result of the
8535 function call is ignored. This function returns NULL_TREE if no
8536 simplification was possible. */
8537
8538 tree
8539 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8540 {
8541 tree ret = NULL_TREE;
8542
8543 switch (nargs)
8544 {
8545 case 0:
8546 ret = fold_builtin_0 (loc, fndecl);
8547 break;
8548 case 1:
8549 ret = fold_builtin_1 (loc, fndecl, args[0]);
8550 break;
8551 case 2:
8552 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8553 break;
8554 case 3:
8555 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8556 break;
8557 default:
8558 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8559 break;
8560 }
8561 if (ret)
8562 {
8563 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8564 SET_EXPR_LOCATION (ret, loc);
8565 TREE_NO_WARNING (ret) = 1;
8566 return ret;
8567 }
8568 return NULL_TREE;
8569 }
8570
8571 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8572 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8573 of arguments in ARGS to be omitted. OLDNARGS is the number of
8574 elements in ARGS. */
8575
8576 static tree
8577 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8578 int skip, tree fndecl, int n, va_list newargs)
8579 {
8580 int nargs = oldnargs - skip + n;
8581 tree *buffer;
8582
8583 if (n > 0)
8584 {
8585 int i, j;
8586
8587 buffer = XALLOCAVEC (tree, nargs);
8588 for (i = 0; i < n; i++)
8589 buffer[i] = va_arg (newargs, tree);
8590 for (j = skip; j < oldnargs; j++, i++)
8591 buffer[i] = args[j];
8592 }
8593 else
8594 buffer = args + skip;
8595
8596 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8597 }
8598
8599 /* Return true if FNDECL shouldn't be folded right now.
8600 If a built-in function has an inline attribute always_inline
8601 wrapper, defer folding it after always_inline functions have
8602 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8603 might not be performed. */
8604
8605 bool
8606 avoid_folding_inline_builtin (tree fndecl)
8607 {
8608 return (DECL_DECLARED_INLINE_P (fndecl)
8609 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8610 && cfun
8611 && !cfun->always_inline_functions_inlined
8612 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8613 }
8614
8615 /* A wrapper function for builtin folding that prevents warnings for
8616 "statement without effect" and the like, caused by removing the
8617 call node earlier than the warning is generated. */
8618
8619 tree
8620 fold_call_expr (location_t loc, tree exp, bool ignore)
8621 {
8622 tree ret = NULL_TREE;
8623 tree fndecl = get_callee_fndecl (exp);
8624 if (fndecl
8625 && TREE_CODE (fndecl) == FUNCTION_DECL
8626 && DECL_BUILT_IN (fndecl)
8627 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8628 yet. Defer folding until we see all the arguments
8629 (after inlining). */
8630 && !CALL_EXPR_VA_ARG_PACK (exp))
8631 {
8632 int nargs = call_expr_nargs (exp);
8633
8634 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8635 instead last argument is __builtin_va_arg_pack (). Defer folding
8636 even in that case, until arguments are finalized. */
8637 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8638 {
8639 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8640 if (fndecl2
8641 && TREE_CODE (fndecl2) == FUNCTION_DECL
8642 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8643 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8644 return NULL_TREE;
8645 }
8646
8647 if (avoid_folding_inline_builtin (fndecl))
8648 return NULL_TREE;
8649
8650 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8651 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8652 CALL_EXPR_ARGP (exp), ignore);
8653 else
8654 {
8655 tree *args = CALL_EXPR_ARGP (exp);
8656 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8657 if (ret)
8658 return ret;
8659 }
8660 }
8661 return NULL_TREE;
8662 }
8663
8664 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8665 N arguments are passed in the array ARGARRAY. Return a folded
8666 expression or NULL_TREE if no simplification was possible. */
8667
8668 tree
8669 fold_builtin_call_array (location_t loc, tree,
8670 tree fn,
8671 int n,
8672 tree *argarray)
8673 {
8674 if (TREE_CODE (fn) != ADDR_EXPR)
8675 return NULL_TREE;
8676
8677 tree fndecl = TREE_OPERAND (fn, 0);
8678 if (TREE_CODE (fndecl) == FUNCTION_DECL
8679 && DECL_BUILT_IN (fndecl))
8680 {
8681 /* If last argument is __builtin_va_arg_pack (), arguments to this
8682 function are not finalized yet. Defer folding until they are. */
8683 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8684 {
8685 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8686 if (fndecl2
8687 && TREE_CODE (fndecl2) == FUNCTION_DECL
8688 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8689 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8690 return NULL_TREE;
8691 }
8692 if (avoid_folding_inline_builtin (fndecl))
8693 return NULL_TREE;
8694 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8695 return targetm.fold_builtin (fndecl, n, argarray, false);
8696 else
8697 return fold_builtin_n (loc, fndecl, argarray, n, false);
8698 }
8699
8700 return NULL_TREE;
8701 }
8702
8703 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8704 along with N new arguments specified as the "..." parameters. SKIP
8705 is the number of arguments in EXP to be omitted. This function is used
8706 to do varargs-to-varargs transformations. */
8707
8708 static tree
8709 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8710 {
8711 va_list ap;
8712 tree t;
8713
8714 va_start (ap, n);
8715 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8716 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8717 va_end (ap);
8718
8719 return t;
8720 }
8721
8722 /* Validate a single argument ARG against a tree code CODE representing
8723 a type. */
8724
8725 static bool
8726 validate_arg (const_tree arg, enum tree_code code)
8727 {
8728 if (!arg)
8729 return false;
8730 else if (code == POINTER_TYPE)
8731 return POINTER_TYPE_P (TREE_TYPE (arg));
8732 else if (code == INTEGER_TYPE)
8733 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8734 return code == TREE_CODE (TREE_TYPE (arg));
8735 }
8736
8737 /* This function validates the types of a function call argument list
8738 against a specified list of tree_codes. If the last specifier is a 0,
8739 that represents an ellipses, otherwise the last specifier must be a
8740 VOID_TYPE.
8741
8742 This is the GIMPLE version of validate_arglist. Eventually we want to
8743 completely convert builtins.c to work from GIMPLEs and the tree based
8744 validate_arglist will then be removed. */
8745
8746 bool
8747 validate_gimple_arglist (const gcall *call, ...)
8748 {
8749 enum tree_code code;
8750 bool res = 0;
8751 va_list ap;
8752 const_tree arg;
8753 size_t i;
8754
8755 va_start (ap, call);
8756 i = 0;
8757
8758 do
8759 {
8760 code = (enum tree_code) va_arg (ap, int);
8761 switch (code)
8762 {
8763 case 0:
8764 /* This signifies an ellipses, any further arguments are all ok. */
8765 res = true;
8766 goto end;
8767 case VOID_TYPE:
8768 /* This signifies an endlink, if no arguments remain, return
8769 true, otherwise return false. */
8770 res = (i == gimple_call_num_args (call));
8771 goto end;
8772 default:
8773 /* If no parameters remain or the parameter's code does not
8774 match the specified code, return false. Otherwise continue
8775 checking any remaining arguments. */
8776 arg = gimple_call_arg (call, i++);
8777 if (!validate_arg (arg, code))
8778 goto end;
8779 break;
8780 }
8781 }
8782 while (1);
8783
8784 /* We need gotos here since we can only have one VA_CLOSE in a
8785 function. */
8786 end: ;
8787 va_end (ap);
8788
8789 return res;
8790 }
8791
8792 /* Default target-specific builtin expander that does nothing. */
8793
8794 rtx
8795 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8796 rtx target ATTRIBUTE_UNUSED,
8797 rtx subtarget ATTRIBUTE_UNUSED,
8798 machine_mode mode ATTRIBUTE_UNUSED,
8799 int ignore ATTRIBUTE_UNUSED)
8800 {
8801 return NULL_RTX;
8802 }
8803
8804 /* Returns true is EXP represents data that would potentially reside
8805 in a readonly section. */
8806
8807 bool
8808 readonly_data_expr (tree exp)
8809 {
8810 STRIP_NOPS (exp);
8811
8812 if (TREE_CODE (exp) != ADDR_EXPR)
8813 return false;
8814
8815 exp = get_base_address (TREE_OPERAND (exp, 0));
8816 if (!exp)
8817 return false;
8818
8819 /* Make sure we call decl_readonly_section only for trees it
8820 can handle (since it returns true for everything it doesn't
8821 understand). */
8822 if (TREE_CODE (exp) == STRING_CST
8823 || TREE_CODE (exp) == CONSTRUCTOR
8824 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8825 return decl_readonly_section (exp, 0);
8826 else
8827 return false;
8828 }
8829
8830 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8831 to the call, and TYPE is its return type.
8832
8833 Return NULL_TREE if no simplification was possible, otherwise return the
8834 simplified form of the call as a tree.
8835
8836 The simplified form may be a constant or other expression which
8837 computes the same value, but in a more efficient manner (including
8838 calls to other builtin functions).
8839
8840 The call may contain arguments which need to be evaluated, but
8841 which are not useful to determine the result of the call. In
8842 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8843 COMPOUND_EXPR will be an argument which must be evaluated.
8844 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8845 COMPOUND_EXPR in the chain will contain the tree for the simplified
8846 form of the builtin function call. */
8847
8848 static tree
8849 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8850 {
8851 if (!validate_arg (s1, POINTER_TYPE)
8852 || !validate_arg (s2, POINTER_TYPE))
8853 return NULL_TREE;
8854 else
8855 {
8856 tree fn;
8857 const char *p1, *p2;
8858
8859 p2 = c_getstr (s2);
8860 if (p2 == NULL)
8861 return NULL_TREE;
8862
8863 p1 = c_getstr (s1);
8864 if (p1 != NULL)
8865 {
8866 const char *r = strstr (p1, p2);
8867 tree tem;
8868
8869 if (r == NULL)
8870 return build_int_cst (TREE_TYPE (s1), 0);
8871
8872 /* Return an offset into the constant string argument. */
8873 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8874 return fold_convert_loc (loc, type, tem);
8875 }
8876
8877 /* The argument is const char *, and the result is char *, so we need
8878 a type conversion here to avoid a warning. */
8879 if (p2[0] == '\0')
8880 return fold_convert_loc (loc, type, s1);
8881
8882 if (p2[1] != '\0')
8883 return NULL_TREE;
8884
8885 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8886 if (!fn)
8887 return NULL_TREE;
8888
8889 /* New argument list transforming strstr(s1, s2) to
8890 strchr(s1, s2[0]). */
8891 return build_call_expr_loc (loc, fn, 2, s1,
8892 build_int_cst (integer_type_node, p2[0]));
8893 }
8894 }
8895
8896 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8897 the call, and TYPE is its return type.
8898
8899 Return NULL_TREE if no simplification was possible, otherwise return the
8900 simplified form of the call as a tree.
8901
8902 The simplified form may be a constant or other expression which
8903 computes the same value, but in a more efficient manner (including
8904 calls to other builtin functions).
8905
8906 The call may contain arguments which need to be evaluated, but
8907 which are not useful to determine the result of the call. In
8908 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8909 COMPOUND_EXPR will be an argument which must be evaluated.
8910 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8911 COMPOUND_EXPR in the chain will contain the tree for the simplified
8912 form of the builtin function call. */
8913
8914 static tree
8915 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8916 {
8917 if (!validate_arg (s1, POINTER_TYPE)
8918 || !validate_arg (s2, INTEGER_TYPE))
8919 return NULL_TREE;
8920 else
8921 {
8922 const char *p1;
8923
8924 if (TREE_CODE (s2) != INTEGER_CST)
8925 return NULL_TREE;
8926
8927 p1 = c_getstr (s1);
8928 if (p1 != NULL)
8929 {
8930 char c;
8931 const char *r;
8932 tree tem;
8933
8934 if (target_char_cast (s2, &c))
8935 return NULL_TREE;
8936
8937 r = strchr (p1, c);
8938
8939 if (r == NULL)
8940 return build_int_cst (TREE_TYPE (s1), 0);
8941
8942 /* Return an offset into the constant string argument. */
8943 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8944 return fold_convert_loc (loc, type, tem);
8945 }
8946 return NULL_TREE;
8947 }
8948 }
8949
8950 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8951 the call, and TYPE is its return type.
8952
8953 Return NULL_TREE if no simplification was possible, otherwise return the
8954 simplified form of the call as a tree.
8955
8956 The simplified form may be a constant or other expression which
8957 computes the same value, but in a more efficient manner (including
8958 calls to other builtin functions).
8959
8960 The call may contain arguments which need to be evaluated, but
8961 which are not useful to determine the result of the call. In
8962 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8963 COMPOUND_EXPR will be an argument which must be evaluated.
8964 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8965 COMPOUND_EXPR in the chain will contain the tree for the simplified
8966 form of the builtin function call. */
8967
8968 static tree
8969 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8970 {
8971 if (!validate_arg (s1, POINTER_TYPE)
8972 || !validate_arg (s2, INTEGER_TYPE))
8973 return NULL_TREE;
8974 else
8975 {
8976 tree fn;
8977 const char *p1;
8978
8979 if (TREE_CODE (s2) != INTEGER_CST)
8980 return NULL_TREE;
8981
8982 p1 = c_getstr (s1);
8983 if (p1 != NULL)
8984 {
8985 char c;
8986 const char *r;
8987 tree tem;
8988
8989 if (target_char_cast (s2, &c))
8990 return NULL_TREE;
8991
8992 r = strrchr (p1, c);
8993
8994 if (r == NULL)
8995 return build_int_cst (TREE_TYPE (s1), 0);
8996
8997 /* Return an offset into the constant string argument. */
8998 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8999 return fold_convert_loc (loc, type, tem);
9000 }
9001
9002 if (! integer_zerop (s2))
9003 return NULL_TREE;
9004
9005 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9006 if (!fn)
9007 return NULL_TREE;
9008
9009 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9010 return build_call_expr_loc (loc, fn, 2, s1, s2);
9011 }
9012 }
9013
9014 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9015 to the call, and TYPE is its return type.
9016
9017 Return NULL_TREE if no simplification was possible, otherwise return the
9018 simplified form of the call as a tree.
9019
9020 The simplified form may be a constant or other expression which
9021 computes the same value, but in a more efficient manner (including
9022 calls to other builtin functions).
9023
9024 The call may contain arguments which need to be evaluated, but
9025 which are not useful to determine the result of the call. In
9026 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9027 COMPOUND_EXPR will be an argument which must be evaluated.
9028 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9029 COMPOUND_EXPR in the chain will contain the tree for the simplified
9030 form of the builtin function call. */
9031
9032 static tree
9033 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9034 {
9035 if (!validate_arg (s1, POINTER_TYPE)
9036 || !validate_arg (s2, POINTER_TYPE))
9037 return NULL_TREE;
9038 else
9039 {
9040 tree fn;
9041 const char *p1, *p2;
9042
9043 p2 = c_getstr (s2);
9044 if (p2 == NULL)
9045 return NULL_TREE;
9046
9047 p1 = c_getstr (s1);
9048 if (p1 != NULL)
9049 {
9050 const char *r = strpbrk (p1, p2);
9051 tree tem;
9052
9053 if (r == NULL)
9054 return build_int_cst (TREE_TYPE (s1), 0);
9055
9056 /* Return an offset into the constant string argument. */
9057 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9058 return fold_convert_loc (loc, type, tem);
9059 }
9060
9061 if (p2[0] == '\0')
9062 /* strpbrk(x, "") == NULL.
9063 Evaluate and ignore s1 in case it had side-effects. */
9064 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9065
9066 if (p2[1] != '\0')
9067 return NULL_TREE; /* Really call strpbrk. */
9068
9069 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9070 if (!fn)
9071 return NULL_TREE;
9072
9073 /* New argument list transforming strpbrk(s1, s2) to
9074 strchr(s1, s2[0]). */
9075 return build_call_expr_loc (loc, fn, 2, s1,
9076 build_int_cst (integer_type_node, p2[0]));
9077 }
9078 }
9079
9080 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9081 to the call.
9082
9083 Return NULL_TREE if no simplification was possible, otherwise return the
9084 simplified form of the call as a tree.
9085
9086 The simplified form may be a constant or other expression which
9087 computes the same value, but in a more efficient manner (including
9088 calls to other builtin functions).
9089
9090 The call may contain arguments which need to be evaluated, but
9091 which are not useful to determine the result of the call. In
9092 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9093 COMPOUND_EXPR will be an argument which must be evaluated.
9094 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9095 COMPOUND_EXPR in the chain will contain the tree for the simplified
9096 form of the builtin function call. */
9097
9098 static tree
9099 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9100 {
9101 if (!validate_arg (s1, POINTER_TYPE)
9102 || !validate_arg (s2, POINTER_TYPE))
9103 return NULL_TREE;
9104 else
9105 {
9106 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9107
9108 /* If either argument is "", return NULL_TREE. */
9109 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9110 /* Evaluate and ignore both arguments in case either one has
9111 side-effects. */
9112 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9113 s1, s2);
9114 return NULL_TREE;
9115 }
9116 }
9117
9118 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9119 to the call.
9120
9121 Return NULL_TREE if no simplification was possible, otherwise return the
9122 simplified form of the call as a tree.
9123
9124 The simplified form may be a constant or other expression which
9125 computes the same value, but in a more efficient manner (including
9126 calls to other builtin functions).
9127
9128 The call may contain arguments which need to be evaluated, but
9129 which are not useful to determine the result of the call. In
9130 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9131 COMPOUND_EXPR will be an argument which must be evaluated.
9132 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9133 COMPOUND_EXPR in the chain will contain the tree for the simplified
9134 form of the builtin function call. */
9135
9136 static tree
9137 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9138 {
9139 if (!validate_arg (s1, POINTER_TYPE)
9140 || !validate_arg (s2, POINTER_TYPE))
9141 return NULL_TREE;
9142 else
9143 {
9144 /* If the first argument is "", return NULL_TREE. */
9145 const char *p1 = c_getstr (s1);
9146 if (p1 && *p1 == '\0')
9147 {
9148 /* Evaluate and ignore argument s2 in case it has
9149 side-effects. */
9150 return omit_one_operand_loc (loc, size_type_node,
9151 size_zero_node, s2);
9152 }
9153
9154 /* If the second argument is "", return __builtin_strlen(s1). */
9155 const char *p2 = c_getstr (s2);
9156 if (p2 && *p2 == '\0')
9157 {
9158 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9159
9160 /* If the replacement _DECL isn't initialized, don't do the
9161 transformation. */
9162 if (!fn)
9163 return NULL_TREE;
9164
9165 return build_call_expr_loc (loc, fn, 1, s1);
9166 }
9167 return NULL_TREE;
9168 }
9169 }
9170
9171 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9172 produced. False otherwise. This is done so that we don't output the error
9173 or warning twice or three times. */
9174
9175 bool
9176 fold_builtin_next_arg (tree exp, bool va_start_p)
9177 {
9178 tree fntype = TREE_TYPE (current_function_decl);
9179 int nargs = call_expr_nargs (exp);
9180 tree arg;
9181 /* There is good chance the current input_location points inside the
9182 definition of the va_start macro (perhaps on the token for
9183 builtin) in a system header, so warnings will not be emitted.
9184 Use the location in real source code. */
9185 source_location current_location =
9186 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9187 NULL);
9188
9189 if (!stdarg_p (fntype))
9190 {
9191 error ("%<va_start%> used in function with fixed args");
9192 return true;
9193 }
9194
9195 if (va_start_p)
9196 {
9197 if (va_start_p && (nargs != 2))
9198 {
9199 error ("wrong number of arguments to function %<va_start%>");
9200 return true;
9201 }
9202 arg = CALL_EXPR_ARG (exp, 1);
9203 }
9204 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9205 when we checked the arguments and if needed issued a warning. */
9206 else
9207 {
9208 if (nargs == 0)
9209 {
9210 /* Evidently an out of date version of <stdarg.h>; can't validate
9211 va_start's second argument, but can still work as intended. */
9212 warning_at (current_location,
9213 OPT_Wvarargs,
9214 "%<__builtin_next_arg%> called without an argument");
9215 return true;
9216 }
9217 else if (nargs > 1)
9218 {
9219 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9220 return true;
9221 }
9222 arg = CALL_EXPR_ARG (exp, 0);
9223 }
9224
9225 if (TREE_CODE (arg) == SSA_NAME)
9226 arg = SSA_NAME_VAR (arg);
9227
9228 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9229 or __builtin_next_arg (0) the first time we see it, after checking
9230 the arguments and if needed issuing a warning. */
9231 if (!integer_zerop (arg))
9232 {
9233 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9234
9235 /* Strip off all nops for the sake of the comparison. This
9236 is not quite the same as STRIP_NOPS. It does more.
9237 We must also strip off INDIRECT_EXPR for C++ reference
9238 parameters. */
9239 while (CONVERT_EXPR_P (arg)
9240 || TREE_CODE (arg) == INDIRECT_REF)
9241 arg = TREE_OPERAND (arg, 0);
9242 if (arg != last_parm)
9243 {
9244 /* FIXME: Sometimes with the tree optimizers we can get the
9245 not the last argument even though the user used the last
9246 argument. We just warn and set the arg to be the last
9247 argument so that we will get wrong-code because of
9248 it. */
9249 warning_at (current_location,
9250 OPT_Wvarargs,
9251 "second parameter of %<va_start%> not last named argument");
9252 }
9253
9254 /* Undefined by C99 7.15.1.4p4 (va_start):
9255 "If the parameter parmN is declared with the register storage
9256 class, with a function or array type, or with a type that is
9257 not compatible with the type that results after application of
9258 the default argument promotions, the behavior is undefined."
9259 */
9260 else if (DECL_REGISTER (arg))
9261 {
9262 warning_at (current_location,
9263 OPT_Wvarargs,
9264 "undefined behavior when second parameter of "
9265 "%<va_start%> is declared with %<register%> storage");
9266 }
9267
9268 /* We want to verify the second parameter just once before the tree
9269 optimizers are run and then avoid keeping it in the tree,
9270 as otherwise we could warn even for correct code like:
9271 void foo (int i, ...)
9272 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9273 if (va_start_p)
9274 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9275 else
9276 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9277 }
9278 return false;
9279 }
9280
9281
9282 /* Expand a call EXP to __builtin_object_size. */
9283
9284 static rtx
9285 expand_builtin_object_size (tree exp)
9286 {
9287 tree ost;
9288 int object_size_type;
9289 tree fndecl = get_callee_fndecl (exp);
9290
9291 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9292 {
9293 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9294 exp, fndecl);
9295 expand_builtin_trap ();
9296 return const0_rtx;
9297 }
9298
9299 ost = CALL_EXPR_ARG (exp, 1);
9300 STRIP_NOPS (ost);
9301
9302 if (TREE_CODE (ost) != INTEGER_CST
9303 || tree_int_cst_sgn (ost) < 0
9304 || compare_tree_int (ost, 3) > 0)
9305 {
9306 error ("%Klast argument of %D is not integer constant between 0 and 3",
9307 exp, fndecl);
9308 expand_builtin_trap ();
9309 return const0_rtx;
9310 }
9311
9312 object_size_type = tree_to_shwi (ost);
9313
9314 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9315 }
9316
9317 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9318 FCODE is the BUILT_IN_* to use.
9319 Return NULL_RTX if we failed; the caller should emit a normal call,
9320 otherwise try to get the result in TARGET, if convenient (and in
9321 mode MODE if that's convenient). */
9322
9323 static rtx
9324 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9325 enum built_in_function fcode)
9326 {
9327 tree dest, src, len, size;
9328
9329 if (!validate_arglist (exp,
9330 POINTER_TYPE,
9331 fcode == BUILT_IN_MEMSET_CHK
9332 ? INTEGER_TYPE : POINTER_TYPE,
9333 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9334 return NULL_RTX;
9335
9336 dest = CALL_EXPR_ARG (exp, 0);
9337 src = CALL_EXPR_ARG (exp, 1);
9338 len = CALL_EXPR_ARG (exp, 2);
9339 size = CALL_EXPR_ARG (exp, 3);
9340
9341 if (! tree_fits_uhwi_p (size))
9342 return NULL_RTX;
9343
9344 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9345 {
9346 tree fn;
9347
9348 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9349 {
9350 warning_at (tree_nonartificial_location (exp),
9351 0, "%Kcall to %D will always overflow destination buffer",
9352 exp, get_callee_fndecl (exp));
9353 return NULL_RTX;
9354 }
9355
9356 fn = NULL_TREE;
9357 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9358 mem{cpy,pcpy,move,set} is available. */
9359 switch (fcode)
9360 {
9361 case BUILT_IN_MEMCPY_CHK:
9362 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9363 break;
9364 case BUILT_IN_MEMPCPY_CHK:
9365 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9366 break;
9367 case BUILT_IN_MEMMOVE_CHK:
9368 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9369 break;
9370 case BUILT_IN_MEMSET_CHK:
9371 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9372 break;
9373 default:
9374 break;
9375 }
9376
9377 if (! fn)
9378 return NULL_RTX;
9379
9380 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9381 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9382 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9383 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9384 }
9385 else if (fcode == BUILT_IN_MEMSET_CHK)
9386 return NULL_RTX;
9387 else
9388 {
9389 unsigned int dest_align = get_pointer_alignment (dest);
9390
9391 /* If DEST is not a pointer type, call the normal function. */
9392 if (dest_align == 0)
9393 return NULL_RTX;
9394
9395 /* If SRC and DEST are the same (and not volatile), do nothing. */
9396 if (operand_equal_p (src, dest, 0))
9397 {
9398 tree expr;
9399
9400 if (fcode != BUILT_IN_MEMPCPY_CHK)
9401 {
9402 /* Evaluate and ignore LEN in case it has side-effects. */
9403 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9404 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9405 }
9406
9407 expr = fold_build_pointer_plus (dest, len);
9408 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9409 }
9410
9411 /* __memmove_chk special case. */
9412 if (fcode == BUILT_IN_MEMMOVE_CHK)
9413 {
9414 unsigned int src_align = get_pointer_alignment (src);
9415
9416 if (src_align == 0)
9417 return NULL_RTX;
9418
9419 /* If src is categorized for a readonly section we can use
9420 normal __memcpy_chk. */
9421 if (readonly_data_expr (src))
9422 {
9423 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9424 if (!fn)
9425 return NULL_RTX;
9426 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9427 dest, src, len, size);
9428 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9429 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9430 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9431 }
9432 }
9433 return NULL_RTX;
9434 }
9435 }
9436
9437 /* Emit warning if a buffer overflow is detected at compile time. */
9438
9439 static void
9440 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9441 {
9442 int is_strlen = 0;
9443 tree len, size;
9444 location_t loc = tree_nonartificial_location (exp);
9445
9446 switch (fcode)
9447 {
9448 case BUILT_IN_STRCPY_CHK:
9449 case BUILT_IN_STPCPY_CHK:
9450 /* For __strcat_chk the warning will be emitted only if overflowing
9451 by at least strlen (dest) + 1 bytes. */
9452 case BUILT_IN_STRCAT_CHK:
9453 len = CALL_EXPR_ARG (exp, 1);
9454 size = CALL_EXPR_ARG (exp, 2);
9455 is_strlen = 1;
9456 break;
9457 case BUILT_IN_STRNCAT_CHK:
9458 case BUILT_IN_STRNCPY_CHK:
9459 case BUILT_IN_STPNCPY_CHK:
9460 len = CALL_EXPR_ARG (exp, 2);
9461 size = CALL_EXPR_ARG (exp, 3);
9462 break;
9463 case BUILT_IN_SNPRINTF_CHK:
9464 case BUILT_IN_VSNPRINTF_CHK:
9465 len = CALL_EXPR_ARG (exp, 1);
9466 size = CALL_EXPR_ARG (exp, 3);
9467 break;
9468 default:
9469 gcc_unreachable ();
9470 }
9471
9472 if (!len || !size)
9473 return;
9474
9475 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9476 return;
9477
9478 if (is_strlen)
9479 {
9480 len = c_strlen (len, 1);
9481 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9482 return;
9483 }
9484 else if (fcode == BUILT_IN_STRNCAT_CHK)
9485 {
9486 tree src = CALL_EXPR_ARG (exp, 1);
9487 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9488 return;
9489 src = c_strlen (src, 1);
9490 if (! src || ! tree_fits_uhwi_p (src))
9491 {
9492 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9493 exp, get_callee_fndecl (exp));
9494 return;
9495 }
9496 else if (tree_int_cst_lt (src, size))
9497 return;
9498 }
9499 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9500 return;
9501
9502 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9503 exp, get_callee_fndecl (exp));
9504 }
9505
9506 /* Emit warning if a buffer overflow is detected at compile time
9507 in __sprintf_chk/__vsprintf_chk calls. */
9508
9509 static void
9510 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9511 {
9512 tree size, len, fmt;
9513 const char *fmt_str;
9514 int nargs = call_expr_nargs (exp);
9515
9516 /* Verify the required arguments in the original call. */
9517
9518 if (nargs < 4)
9519 return;
9520 size = CALL_EXPR_ARG (exp, 2);
9521 fmt = CALL_EXPR_ARG (exp, 3);
9522
9523 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9524 return;
9525
9526 /* Check whether the format is a literal string constant. */
9527 fmt_str = c_getstr (fmt);
9528 if (fmt_str == NULL)
9529 return;
9530
9531 if (!init_target_chars ())
9532 return;
9533
9534 /* If the format doesn't contain % args or %%, we know its size. */
9535 if (strchr (fmt_str, target_percent) == 0)
9536 len = build_int_cstu (size_type_node, strlen (fmt_str));
9537 /* If the format is "%s" and first ... argument is a string literal,
9538 we know it too. */
9539 else if (fcode == BUILT_IN_SPRINTF_CHK
9540 && strcmp (fmt_str, target_percent_s) == 0)
9541 {
9542 tree arg;
9543
9544 if (nargs < 5)
9545 return;
9546 arg = CALL_EXPR_ARG (exp, 4);
9547 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9548 return;
9549
9550 len = c_strlen (arg, 1);
9551 if (!len || ! tree_fits_uhwi_p (len))
9552 return;
9553 }
9554 else
9555 return;
9556
9557 if (! tree_int_cst_lt (len, size))
9558 warning_at (tree_nonartificial_location (exp),
9559 0, "%Kcall to %D will always overflow destination buffer",
9560 exp, get_callee_fndecl (exp));
9561 }
9562
9563 /* Emit warning if a free is called with address of a variable. */
9564
9565 static void
9566 maybe_emit_free_warning (tree exp)
9567 {
9568 tree arg = CALL_EXPR_ARG (exp, 0);
9569
9570 STRIP_NOPS (arg);
9571 if (TREE_CODE (arg) != ADDR_EXPR)
9572 return;
9573
9574 arg = get_base_address (TREE_OPERAND (arg, 0));
9575 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9576 return;
9577
9578 if (SSA_VAR_P (arg))
9579 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9580 "%Kattempt to free a non-heap object %qD", exp, arg);
9581 else
9582 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9583 "%Kattempt to free a non-heap object", exp);
9584 }
9585
9586 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9587 if possible. */
9588
9589 static tree
9590 fold_builtin_object_size (tree ptr, tree ost)
9591 {
9592 unsigned HOST_WIDE_INT bytes;
9593 int object_size_type;
9594
9595 if (!validate_arg (ptr, POINTER_TYPE)
9596 || !validate_arg (ost, INTEGER_TYPE))
9597 return NULL_TREE;
9598
9599 STRIP_NOPS (ost);
9600
9601 if (TREE_CODE (ost) != INTEGER_CST
9602 || tree_int_cst_sgn (ost) < 0
9603 || compare_tree_int (ost, 3) > 0)
9604 return NULL_TREE;
9605
9606 object_size_type = tree_to_shwi (ost);
9607
9608 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9609 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9610 and (size_t) 0 for types 2 and 3. */
9611 if (TREE_SIDE_EFFECTS (ptr))
9612 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9613
9614 if (TREE_CODE (ptr) == ADDR_EXPR)
9615 {
9616 compute_builtin_object_size (ptr, object_size_type, &bytes);
9617 if (wi::fits_to_tree_p (bytes, size_type_node))
9618 return build_int_cstu (size_type_node, bytes);
9619 }
9620 else if (TREE_CODE (ptr) == SSA_NAME)
9621 {
9622 /* If object size is not known yet, delay folding until
9623 later. Maybe subsequent passes will help determining
9624 it. */
9625 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9626 && wi::fits_to_tree_p (bytes, size_type_node))
9627 return build_int_cstu (size_type_node, bytes);
9628 }
9629
9630 return NULL_TREE;
9631 }
9632
9633 /* Builtins with folding operations that operate on "..." arguments
9634 need special handling; we need to store the arguments in a convenient
9635 data structure before attempting any folding. Fortunately there are
9636 only a few builtins that fall into this category. FNDECL is the
9637 function, EXP is the CALL_EXPR for the call. */
9638
9639 static tree
9640 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9641 {
9642 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9643 tree ret = NULL_TREE;
9644
9645 switch (fcode)
9646 {
9647 case BUILT_IN_FPCLASSIFY:
9648 ret = fold_builtin_fpclassify (loc, args, nargs);
9649 break;
9650
9651 default:
9652 break;
9653 }
9654 if (ret)
9655 {
9656 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9657 SET_EXPR_LOCATION (ret, loc);
9658 TREE_NO_WARNING (ret) = 1;
9659 return ret;
9660 }
9661 return NULL_TREE;
9662 }
9663
9664 /* Initialize format string characters in the target charset. */
9665
9666 bool
9667 init_target_chars (void)
9668 {
9669 static bool init;
9670 if (!init)
9671 {
9672 target_newline = lang_hooks.to_target_charset ('\n');
9673 target_percent = lang_hooks.to_target_charset ('%');
9674 target_c = lang_hooks.to_target_charset ('c');
9675 target_s = lang_hooks.to_target_charset ('s');
9676 if (target_newline == 0 || target_percent == 0 || target_c == 0
9677 || target_s == 0)
9678 return false;
9679
9680 target_percent_c[0] = target_percent;
9681 target_percent_c[1] = target_c;
9682 target_percent_c[2] = '\0';
9683
9684 target_percent_s[0] = target_percent;
9685 target_percent_s[1] = target_s;
9686 target_percent_s[2] = '\0';
9687
9688 target_percent_s_newline[0] = target_percent;
9689 target_percent_s_newline[1] = target_s;
9690 target_percent_s_newline[2] = target_newline;
9691 target_percent_s_newline[3] = '\0';
9692
9693 init = true;
9694 }
9695 return true;
9696 }
9697
9698 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9699 and no overflow/underflow occurred. INEXACT is true if M was not
9700 exactly calculated. TYPE is the tree type for the result. This
9701 function assumes that you cleared the MPFR flags and then
9702 calculated M to see if anything subsequently set a flag prior to
9703 entering this function. Return NULL_TREE if any checks fail. */
9704
9705 static tree
9706 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9707 {
9708 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9709 overflow/underflow occurred. If -frounding-math, proceed iff the
9710 result of calling FUNC was exact. */
9711 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9712 && (!flag_rounding_math || !inexact))
9713 {
9714 REAL_VALUE_TYPE rr;
9715
9716 real_from_mpfr (&rr, m, type, GMP_RNDN);
9717 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9718 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9719 but the mpft_t is not, then we underflowed in the
9720 conversion. */
9721 if (real_isfinite (&rr)
9722 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9723 {
9724 REAL_VALUE_TYPE rmode;
9725
9726 real_convert (&rmode, TYPE_MODE (type), &rr);
9727 /* Proceed iff the specified mode can hold the value. */
9728 if (real_identical (&rmode, &rr))
9729 return build_real (type, rmode);
9730 }
9731 }
9732 return NULL_TREE;
9733 }
9734
9735 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9736 number and no overflow/underflow occurred. INEXACT is true if M
9737 was not exactly calculated. TYPE is the tree type for the result.
9738 This function assumes that you cleared the MPFR flags and then
9739 calculated M to see if anything subsequently set a flag prior to
9740 entering this function. Return NULL_TREE if any checks fail, if
9741 FORCE_CONVERT is true, then bypass the checks. */
9742
9743 static tree
9744 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9745 {
9746 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9747 overflow/underflow occurred. If -frounding-math, proceed iff the
9748 result of calling FUNC was exact. */
9749 if (force_convert
9750 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9751 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9752 && (!flag_rounding_math || !inexact)))
9753 {
9754 REAL_VALUE_TYPE re, im;
9755
9756 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9757 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9758 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9759 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9760 but the mpft_t is not, then we underflowed in the
9761 conversion. */
9762 if (force_convert
9763 || (real_isfinite (&re) && real_isfinite (&im)
9764 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9765 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9766 {
9767 REAL_VALUE_TYPE re_mode, im_mode;
9768
9769 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9770 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9771 /* Proceed iff the specified mode can hold the value. */
9772 if (force_convert
9773 || (real_identical (&re_mode, &re)
9774 && real_identical (&im_mode, &im)))
9775 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9776 build_real (TREE_TYPE (type), im_mode));
9777 }
9778 }
9779 return NULL_TREE;
9780 }
9781
9782 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9783 the pointer *(ARG_QUO) and return the result. The type is taken
9784 from the type of ARG0 and is used for setting the precision of the
9785 calculation and results. */
9786
9787 static tree
9788 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9789 {
9790 tree const type = TREE_TYPE (arg0);
9791 tree result = NULL_TREE;
9792
9793 STRIP_NOPS (arg0);
9794 STRIP_NOPS (arg1);
9795
9796 /* To proceed, MPFR must exactly represent the target floating point
9797 format, which only happens when the target base equals two. */
9798 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9799 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9800 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9801 {
9802 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9803 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9804
9805 if (real_isfinite (ra0) && real_isfinite (ra1))
9806 {
9807 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9808 const int prec = fmt->p;
9809 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9810 tree result_rem;
9811 long integer_quo;
9812 mpfr_t m0, m1;
9813
9814 mpfr_inits2 (prec, m0, m1, NULL);
9815 mpfr_from_real (m0, ra0, GMP_RNDN);
9816 mpfr_from_real (m1, ra1, GMP_RNDN);
9817 mpfr_clear_flags ();
9818 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9819 /* Remquo is independent of the rounding mode, so pass
9820 inexact=0 to do_mpfr_ckconv(). */
9821 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9822 mpfr_clears (m0, m1, NULL);
9823 if (result_rem)
9824 {
9825 /* MPFR calculates quo in the host's long so it may
9826 return more bits in quo than the target int can hold
9827 if sizeof(host long) > sizeof(target int). This can
9828 happen even for native compilers in LP64 mode. In
9829 these cases, modulo the quo value with the largest
9830 number that the target int can hold while leaving one
9831 bit for the sign. */
9832 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9833 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9834
9835 /* Dereference the quo pointer argument. */
9836 arg_quo = build_fold_indirect_ref (arg_quo);
9837 /* Proceed iff a valid pointer type was passed in. */
9838 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9839 {
9840 /* Set the value. */
9841 tree result_quo
9842 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9843 build_int_cst (TREE_TYPE (arg_quo),
9844 integer_quo));
9845 TREE_SIDE_EFFECTS (result_quo) = 1;
9846 /* Combine the quo assignment with the rem. */
9847 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9848 result_quo, result_rem));
9849 }
9850 }
9851 }
9852 }
9853 return result;
9854 }
9855
9856 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9857 resulting value as a tree with type TYPE. The mpfr precision is
9858 set to the precision of TYPE. We assume that this mpfr function
9859 returns zero if the result could be calculated exactly within the
9860 requested precision. In addition, the integer pointer represented
9861 by ARG_SG will be dereferenced and set to the appropriate signgam
9862 (-1,1) value. */
9863
9864 static tree
9865 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9866 {
9867 tree result = NULL_TREE;
9868
9869 STRIP_NOPS (arg);
9870
9871 /* To proceed, MPFR must exactly represent the target floating point
9872 format, which only happens when the target base equals two. Also
9873 verify ARG is a constant and that ARG_SG is an int pointer. */
9874 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9875 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9876 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9877 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9878 {
9879 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9880
9881 /* In addition to NaN and Inf, the argument cannot be zero or a
9882 negative integer. */
9883 if (real_isfinite (ra)
9884 && ra->cl != rvc_zero
9885 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9886 {
9887 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9888 const int prec = fmt->p;
9889 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9890 int inexact, sg;
9891 mpfr_t m;
9892 tree result_lg;
9893
9894 mpfr_init2 (m, prec);
9895 mpfr_from_real (m, ra, GMP_RNDN);
9896 mpfr_clear_flags ();
9897 inexact = mpfr_lgamma (m, &sg, m, rnd);
9898 result_lg = do_mpfr_ckconv (m, type, inexact);
9899 mpfr_clear (m);
9900 if (result_lg)
9901 {
9902 tree result_sg;
9903
9904 /* Dereference the arg_sg pointer argument. */
9905 arg_sg = build_fold_indirect_ref (arg_sg);
9906 /* Assign the signgam value into *arg_sg. */
9907 result_sg = fold_build2 (MODIFY_EXPR,
9908 TREE_TYPE (arg_sg), arg_sg,
9909 build_int_cst (TREE_TYPE (arg_sg), sg));
9910 TREE_SIDE_EFFECTS (result_sg) = 1;
9911 /* Combine the signgam assignment with the lgamma result. */
9912 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9913 result_sg, result_lg));
9914 }
9915 }
9916 }
9917
9918 return result;
9919 }
9920
9921 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9922 mpc function FUNC on it and return the resulting value as a tree
9923 with type TYPE. The mpfr precision is set to the precision of
9924 TYPE. We assume that function FUNC returns zero if the result
9925 could be calculated exactly within the requested precision. If
9926 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9927 in the arguments and/or results. */
9928
9929 tree
9930 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9931 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9932 {
9933 tree result = NULL_TREE;
9934
9935 STRIP_NOPS (arg0);
9936 STRIP_NOPS (arg1);
9937
9938 /* To proceed, MPFR must exactly represent the target floating point
9939 format, which only happens when the target base equals two. */
9940 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9941 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9942 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9944 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9945 {
9946 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9947 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9948 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9949 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9950
9951 if (do_nonfinite
9952 || (real_isfinite (re0) && real_isfinite (im0)
9953 && real_isfinite (re1) && real_isfinite (im1)))
9954 {
9955 const struct real_format *const fmt =
9956 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9957 const int prec = fmt->p;
9958 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9959 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9960 int inexact;
9961 mpc_t m0, m1;
9962
9963 mpc_init2 (m0, prec);
9964 mpc_init2 (m1, prec);
9965 mpfr_from_real (mpc_realref (m0), re0, rnd);
9966 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9967 mpfr_from_real (mpc_realref (m1), re1, rnd);
9968 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9969 mpfr_clear_flags ();
9970 inexact = func (m0, m0, m1, crnd);
9971 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9972 mpc_clear (m0);
9973 mpc_clear (m1);
9974 }
9975 }
9976
9977 return result;
9978 }
9979
9980 /* A wrapper function for builtin folding that prevents warnings for
9981 "statement without effect" and the like, caused by removing the
9982 call node earlier than the warning is generated. */
9983
9984 tree
9985 fold_call_stmt (gcall *stmt, bool ignore)
9986 {
9987 tree ret = NULL_TREE;
9988 tree fndecl = gimple_call_fndecl (stmt);
9989 location_t loc = gimple_location (stmt);
9990 if (fndecl
9991 && TREE_CODE (fndecl) == FUNCTION_DECL
9992 && DECL_BUILT_IN (fndecl)
9993 && !gimple_call_va_arg_pack_p (stmt))
9994 {
9995 int nargs = gimple_call_num_args (stmt);
9996 tree *args = (nargs > 0
9997 ? gimple_call_arg_ptr (stmt, 0)
9998 : &error_mark_node);
9999
10000 if (avoid_folding_inline_builtin (fndecl))
10001 return NULL_TREE;
10002 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10003 {
10004 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10005 }
10006 else
10007 {
10008 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10009 if (ret)
10010 {
10011 /* Propagate location information from original call to
10012 expansion of builtin. Otherwise things like
10013 maybe_emit_chk_warning, that operate on the expansion
10014 of a builtin, will use the wrong location information. */
10015 if (gimple_has_location (stmt))
10016 {
10017 tree realret = ret;
10018 if (TREE_CODE (ret) == NOP_EXPR)
10019 realret = TREE_OPERAND (ret, 0);
10020 if (CAN_HAVE_LOCATION_P (realret)
10021 && !EXPR_HAS_LOCATION (realret))
10022 SET_EXPR_LOCATION (realret, loc);
10023 return realret;
10024 }
10025 return ret;
10026 }
10027 }
10028 }
10029 return NULL_TREE;
10030 }
10031
10032 /* Look up the function in builtin_decl that corresponds to DECL
10033 and set ASMSPEC as its user assembler name. DECL must be a
10034 function decl that declares a builtin. */
10035
10036 void
10037 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10038 {
10039 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10040 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10041 && asmspec != 0);
10042
10043 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10044 set_user_assembler_name (builtin, asmspec);
10045
10046 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10047 && INT_TYPE_SIZE < BITS_PER_WORD)
10048 {
10049 set_user_assembler_libfunc ("ffs", asmspec);
10050 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10051 "ffs");
10052 }
10053 }
10054
10055 /* Return true if DECL is a builtin that expands to a constant or similarly
10056 simple code. */
10057 bool
10058 is_simple_builtin (tree decl)
10059 {
10060 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10061 switch (DECL_FUNCTION_CODE (decl))
10062 {
10063 /* Builtins that expand to constants. */
10064 case BUILT_IN_CONSTANT_P:
10065 case BUILT_IN_EXPECT:
10066 case BUILT_IN_OBJECT_SIZE:
10067 case BUILT_IN_UNREACHABLE:
10068 /* Simple register moves or loads from stack. */
10069 case BUILT_IN_ASSUME_ALIGNED:
10070 case BUILT_IN_RETURN_ADDRESS:
10071 case BUILT_IN_EXTRACT_RETURN_ADDR:
10072 case BUILT_IN_FROB_RETURN_ADDR:
10073 case BUILT_IN_RETURN:
10074 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10075 case BUILT_IN_FRAME_ADDRESS:
10076 case BUILT_IN_VA_END:
10077 case BUILT_IN_STACK_SAVE:
10078 case BUILT_IN_STACK_RESTORE:
10079 /* Exception state returns or moves registers around. */
10080 case BUILT_IN_EH_FILTER:
10081 case BUILT_IN_EH_POINTER:
10082 case BUILT_IN_EH_COPY_VALUES:
10083 return true;
10084
10085 default:
10086 return false;
10087 }
10088
10089 return false;
10090 }
10091
10092 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10093 most probably expanded inline into reasonably simple code. This is a
10094 superset of is_simple_builtin. */
10095 bool
10096 is_inexpensive_builtin (tree decl)
10097 {
10098 if (!decl)
10099 return false;
10100 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10101 return true;
10102 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10103 switch (DECL_FUNCTION_CODE (decl))
10104 {
10105 case BUILT_IN_ABS:
10106 case BUILT_IN_ALLOCA:
10107 case BUILT_IN_ALLOCA_WITH_ALIGN:
10108 case BUILT_IN_BSWAP16:
10109 case BUILT_IN_BSWAP32:
10110 case BUILT_IN_BSWAP64:
10111 case BUILT_IN_CLZ:
10112 case BUILT_IN_CLZIMAX:
10113 case BUILT_IN_CLZL:
10114 case BUILT_IN_CLZLL:
10115 case BUILT_IN_CTZ:
10116 case BUILT_IN_CTZIMAX:
10117 case BUILT_IN_CTZL:
10118 case BUILT_IN_CTZLL:
10119 case BUILT_IN_FFS:
10120 case BUILT_IN_FFSIMAX:
10121 case BUILT_IN_FFSL:
10122 case BUILT_IN_FFSLL:
10123 case BUILT_IN_IMAXABS:
10124 case BUILT_IN_FINITE:
10125 case BUILT_IN_FINITEF:
10126 case BUILT_IN_FINITEL:
10127 case BUILT_IN_FINITED32:
10128 case BUILT_IN_FINITED64:
10129 case BUILT_IN_FINITED128:
10130 case BUILT_IN_FPCLASSIFY:
10131 case BUILT_IN_ISFINITE:
10132 case BUILT_IN_ISINF_SIGN:
10133 case BUILT_IN_ISINF:
10134 case BUILT_IN_ISINFF:
10135 case BUILT_IN_ISINFL:
10136 case BUILT_IN_ISINFD32:
10137 case BUILT_IN_ISINFD64:
10138 case BUILT_IN_ISINFD128:
10139 case BUILT_IN_ISNAN:
10140 case BUILT_IN_ISNANF:
10141 case BUILT_IN_ISNANL:
10142 case BUILT_IN_ISNAND32:
10143 case BUILT_IN_ISNAND64:
10144 case BUILT_IN_ISNAND128:
10145 case BUILT_IN_ISNORMAL:
10146 case BUILT_IN_ISGREATER:
10147 case BUILT_IN_ISGREATEREQUAL:
10148 case BUILT_IN_ISLESS:
10149 case BUILT_IN_ISLESSEQUAL:
10150 case BUILT_IN_ISLESSGREATER:
10151 case BUILT_IN_ISUNORDERED:
10152 case BUILT_IN_VA_ARG_PACK:
10153 case BUILT_IN_VA_ARG_PACK_LEN:
10154 case BUILT_IN_VA_COPY:
10155 case BUILT_IN_TRAP:
10156 case BUILT_IN_SAVEREGS:
10157 case BUILT_IN_POPCOUNTL:
10158 case BUILT_IN_POPCOUNTLL:
10159 case BUILT_IN_POPCOUNTIMAX:
10160 case BUILT_IN_POPCOUNT:
10161 case BUILT_IN_PARITYL:
10162 case BUILT_IN_PARITYLL:
10163 case BUILT_IN_PARITYIMAX:
10164 case BUILT_IN_PARITY:
10165 case BUILT_IN_LABS:
10166 case BUILT_IN_LLABS:
10167 case BUILT_IN_PREFETCH:
10168 case BUILT_IN_ACC_ON_DEVICE:
10169 return true;
10170
10171 default:
10172 return is_simple_builtin (decl);
10173 }
10174
10175 return false;
10176 }