re PR ada/37139 (DEP prevents using Ada tasking)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70
71
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
76
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
83 {
84 #include "builtins.def"
85 };
86
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
90
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
93
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
152 static tree fold_builtin_isascii (location_t, tree);
153 static tree fold_builtin_toascii (location_t, tree);
154 static tree fold_builtin_isdigit (location_t, tree);
155 static tree fold_builtin_fabs (location_t, tree, tree);
156 static tree fold_builtin_abs (location_t, tree, tree);
157 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
158 enum tree_code);
159 static tree fold_builtin_0 (location_t, tree);
160 static tree fold_builtin_1 (location_t, tree, tree);
161 static tree fold_builtin_2 (location_t, tree, tree, tree);
162 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_varargs (location_t, tree, tree*, int);
164
165 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
166 static tree fold_builtin_strstr (location_t, tree, tree, tree);
167 static tree fold_builtin_strspn (location_t, tree, tree);
168 static tree fold_builtin_strcspn (location_t, tree, tree);
169
170 static rtx expand_builtin_object_size (tree);
171 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
172 enum built_in_function);
173 static void maybe_emit_chk_warning (tree, enum built_in_function);
174 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
175 static void maybe_emit_free_warning (tree);
176 static tree fold_builtin_object_size (tree, tree);
177
178 unsigned HOST_WIDE_INT target_newline;
179 unsigned HOST_WIDE_INT target_percent;
180 static unsigned HOST_WIDE_INT target_c;
181 static unsigned HOST_WIDE_INT target_s;
182 char target_percent_c[3];
183 char target_percent_s[3];
184 char target_percent_s_newline[4];
185 static tree do_mpfr_remquo (tree, tree, tree);
186 static tree do_mpfr_lgamma_r (tree, tree, tree);
187 static void expand_builtin_sync_synchronize (void);
188
189 /* Return true if NAME starts with __builtin_ or __sync_. */
190
191 static bool
192 is_builtin_name (const char *name)
193 {
194 if (strncmp (name, "__builtin_", 10) == 0)
195 return true;
196 if (strncmp (name, "__sync_", 7) == 0)
197 return true;
198 if (strncmp (name, "__atomic_", 9) == 0)
199 return true;
200 if (flag_cilkplus
201 && (!strcmp (name, "__cilkrts_detach")
202 || !strcmp (name, "__cilkrts_pop_frame")))
203 return true;
204 return false;
205 }
206
207
208 /* Return true if DECL is a function symbol representing a built-in. */
209
210 bool
211 is_builtin_fn (tree decl)
212 {
213 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
214 }
215
216 /* Return true if NODE should be considered for inline expansion regardless
217 of the optimization level. This means whenever a function is invoked with
218 its "internal" name, which normally contains the prefix "__builtin". */
219
220 bool
221 called_as_built_in (tree node)
222 {
223 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
224 we want the name used to call the function, not the name it
225 will have. */
226 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
227 return is_builtin_name (name);
228 }
229
230 /* Compute values M and N such that M divides (address of EXP - N) and such
231 that N < M. If these numbers can be determined, store M in alignp and N in
232 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
233 *alignp and any bit-offset to *bitposp.
234
235 Note that the address (and thus the alignment) computed here is based
236 on the address to which a symbol resolves, whereas DECL_ALIGN is based
237 on the address at which an object is actually located. These two
238 addresses are not always the same. For example, on ARM targets,
239 the address &foo of a Thumb function foo() has the lowest bit set,
240 whereas foo() itself starts on an even address.
241
242 If ADDR_P is true we are taking the address of the memory reference EXP
243 and thus cannot rely on the access taking place. */
244
245 static bool
246 get_object_alignment_2 (tree exp, unsigned int *alignp,
247 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
248 {
249 HOST_WIDE_INT bitsize, bitpos;
250 tree offset;
251 machine_mode mode;
252 int unsignedp, reversep, volatilep;
253 unsigned int align = BITS_PER_UNIT;
254 bool known_alignment = false;
255
256 /* Get the innermost object and the constant (bitpos) and possibly
257 variable (offset) offset of the access. */
258 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
259 &unsignedp, &reversep, &volatilep);
260
261 /* Extract alignment information from the innermost object and
262 possibly adjust bitpos and offset. */
263 if (TREE_CODE (exp) == FUNCTION_DECL)
264 {
265 /* Function addresses can encode extra information besides their
266 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
267 allows the low bit to be used as a virtual bit, we know
268 that the address itself must be at least 2-byte aligned. */
269 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
270 align = 2 * BITS_PER_UNIT;
271 }
272 else if (TREE_CODE (exp) == LABEL_DECL)
273 ;
274 else if (TREE_CODE (exp) == CONST_DECL)
275 {
276 /* The alignment of a CONST_DECL is determined by its initializer. */
277 exp = DECL_INITIAL (exp);
278 align = TYPE_ALIGN (TREE_TYPE (exp));
279 if (CONSTANT_CLASS_P (exp))
280 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
281
282 known_alignment = true;
283 }
284 else if (DECL_P (exp))
285 {
286 align = DECL_ALIGN (exp);
287 known_alignment = true;
288 }
289 else if (TREE_CODE (exp) == INDIRECT_REF
290 || TREE_CODE (exp) == MEM_REF
291 || TREE_CODE (exp) == TARGET_MEM_REF)
292 {
293 tree addr = TREE_OPERAND (exp, 0);
294 unsigned ptr_align;
295 unsigned HOST_WIDE_INT ptr_bitpos;
296 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
297
298 /* If the address is explicitely aligned, handle that. */
299 if (TREE_CODE (addr) == BIT_AND_EXPR
300 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
301 {
302 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
303 ptr_bitmask *= BITS_PER_UNIT;
304 align = least_bit_hwi (ptr_bitmask);
305 addr = TREE_OPERAND (addr, 0);
306 }
307
308 known_alignment
309 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
310 align = MAX (ptr_align, align);
311
312 /* Re-apply explicit alignment to the bitpos. */
313 ptr_bitpos &= ptr_bitmask;
314
315 /* The alignment of the pointer operand in a TARGET_MEM_REF
316 has to take the variable offset parts into account. */
317 if (TREE_CODE (exp) == TARGET_MEM_REF)
318 {
319 if (TMR_INDEX (exp))
320 {
321 unsigned HOST_WIDE_INT step = 1;
322 if (TMR_STEP (exp))
323 step = TREE_INT_CST_LOW (TMR_STEP (exp));
324 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
325 }
326 if (TMR_INDEX2 (exp))
327 align = BITS_PER_UNIT;
328 known_alignment = false;
329 }
330
331 /* When EXP is an actual memory reference then we can use
332 TYPE_ALIGN of a pointer indirection to derive alignment.
333 Do so only if get_pointer_alignment_1 did not reveal absolute
334 alignment knowledge and if using that alignment would
335 improve the situation. */
336 if (!addr_p && !known_alignment
337 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
338 align = TYPE_ALIGN (TREE_TYPE (exp));
339 else
340 {
341 /* Else adjust bitpos accordingly. */
342 bitpos += ptr_bitpos;
343 if (TREE_CODE (exp) == MEM_REF
344 || TREE_CODE (exp) == TARGET_MEM_REF)
345 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
346 }
347 }
348 else if (TREE_CODE (exp) == STRING_CST)
349 {
350 /* STRING_CST are the only constant objects we allow to be not
351 wrapped inside a CONST_DECL. */
352 align = TYPE_ALIGN (TREE_TYPE (exp));
353 if (CONSTANT_CLASS_P (exp))
354 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
355
356 known_alignment = true;
357 }
358
359 /* If there is a non-constant offset part extract the maximum
360 alignment that can prevail. */
361 if (offset)
362 {
363 unsigned int trailing_zeros = tree_ctz (offset);
364 if (trailing_zeros < HOST_BITS_PER_INT)
365 {
366 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
367 if (inner)
368 align = MIN (align, inner);
369 }
370 }
371
372 *alignp = align;
373 *bitposp = bitpos & (*alignp - 1);
374 return known_alignment;
375 }
376
377 /* For a memory reference expression EXP compute values M and N such that M
378 divides (&EXP - N) and such that N < M. If these numbers can be determined,
379 store M in alignp and N in *BITPOSP and return true. Otherwise return false
380 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
381
382 bool
383 get_object_alignment_1 (tree exp, unsigned int *alignp,
384 unsigned HOST_WIDE_INT *bitposp)
385 {
386 return get_object_alignment_2 (exp, alignp, bitposp, false);
387 }
388
389 /* Return the alignment in bits of EXP, an object. */
390
391 unsigned int
392 get_object_alignment (tree exp)
393 {
394 unsigned HOST_WIDE_INT bitpos = 0;
395 unsigned int align;
396
397 get_object_alignment_1 (exp, &align, &bitpos);
398
399 /* align and bitpos now specify known low bits of the pointer.
400 ptr & (align - 1) == bitpos. */
401
402 if (bitpos != 0)
403 align = least_bit_hwi (bitpos);
404 return align;
405 }
406
407 /* For a pointer valued expression EXP compute values M and N such that M
408 divides (EXP - N) and such that N < M. If these numbers can be determined,
409 store M in alignp and N in *BITPOSP and return true. Return false if
410 the results are just a conservative approximation.
411
412 If EXP is not a pointer, false is returned too. */
413
414 bool
415 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
416 unsigned HOST_WIDE_INT *bitposp)
417 {
418 STRIP_NOPS (exp);
419
420 if (TREE_CODE (exp) == ADDR_EXPR)
421 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
422 alignp, bitposp, true);
423 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
424 {
425 unsigned int align;
426 unsigned HOST_WIDE_INT bitpos;
427 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
428 &align, &bitpos);
429 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
430 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
431 else
432 {
433 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
434 if (trailing_zeros < HOST_BITS_PER_INT)
435 {
436 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
437 if (inner)
438 align = MIN (align, inner);
439 }
440 }
441 *alignp = align;
442 *bitposp = bitpos & (align - 1);
443 return res;
444 }
445 else if (TREE_CODE (exp) == SSA_NAME
446 && POINTER_TYPE_P (TREE_TYPE (exp)))
447 {
448 unsigned int ptr_align, ptr_misalign;
449 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
450
451 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
452 {
453 *bitposp = ptr_misalign * BITS_PER_UNIT;
454 *alignp = ptr_align * BITS_PER_UNIT;
455 /* Make sure to return a sensible alignment when the multiplication
456 by BITS_PER_UNIT overflowed. */
457 if (*alignp == 0)
458 *alignp = 1u << (HOST_BITS_PER_INT - 1);
459 /* We cannot really tell whether this result is an approximation. */
460 return false;
461 }
462 else
463 {
464 *bitposp = 0;
465 *alignp = BITS_PER_UNIT;
466 return false;
467 }
468 }
469 else if (TREE_CODE (exp) == INTEGER_CST)
470 {
471 *alignp = BIGGEST_ALIGNMENT;
472 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
473 & (BIGGEST_ALIGNMENT - 1));
474 return true;
475 }
476
477 *bitposp = 0;
478 *alignp = BITS_PER_UNIT;
479 return false;
480 }
481
482 /* Return the alignment in bits of EXP, a pointer valued expression.
483 The alignment returned is, by default, the alignment of the thing that
484 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
485
486 Otherwise, look at the expression to see if we can do better, i.e., if the
487 expression is actually pointing at an object whose alignment is tighter. */
488
489 unsigned int
490 get_pointer_alignment (tree exp)
491 {
492 unsigned HOST_WIDE_INT bitpos = 0;
493 unsigned int align;
494
495 get_pointer_alignment_1 (exp, &align, &bitpos);
496
497 /* align and bitpos now specify known low bits of the pointer.
498 ptr & (align - 1) == bitpos. */
499
500 if (bitpos != 0)
501 align = least_bit_hwi (bitpos);
502
503 return align;
504 }
505
506 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
507 way, because it could contain a zero byte in the middle.
508 TREE_STRING_LENGTH is the size of the character array, not the string.
509
510 ONLY_VALUE should be nonzero if the result is not going to be emitted
511 into the instruction stream and zero if it is going to be expanded.
512 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
513 is returned, otherwise NULL, since
514 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
515 evaluate the side-effects.
516
517 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
518 accesses. Note that this implies the result is not going to be emitted
519 into the instruction stream.
520
521 The value returned is of type `ssizetype'.
522
523 Unfortunately, string_constant can't access the values of const char
524 arrays with initializers, so neither can we do so here. */
525
526 tree
527 c_strlen (tree src, int only_value)
528 {
529 tree offset_node;
530 HOST_WIDE_INT offset;
531 int max;
532 const char *ptr;
533 location_t loc;
534
535 STRIP_NOPS (src);
536 if (TREE_CODE (src) == COND_EXPR
537 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
538 {
539 tree len1, len2;
540
541 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
542 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
543 if (tree_int_cst_equal (len1, len2))
544 return len1;
545 }
546
547 if (TREE_CODE (src) == COMPOUND_EXPR
548 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
549 return c_strlen (TREE_OPERAND (src, 1), only_value);
550
551 loc = EXPR_LOC_OR_LOC (src, input_location);
552
553 src = string_constant (src, &offset_node);
554 if (src == 0)
555 return NULL_TREE;
556
557 max = TREE_STRING_LENGTH (src) - 1;
558 ptr = TREE_STRING_POINTER (src);
559
560 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
561 {
562 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
563 compute the offset to the following null if we don't know where to
564 start searching for it. */
565 int i;
566
567 for (i = 0; i < max; i++)
568 if (ptr[i] == 0)
569 return NULL_TREE;
570
571 /* We don't know the starting offset, but we do know that the string
572 has no internal zero bytes. We can assume that the offset falls
573 within the bounds of the string; otherwise, the programmer deserves
574 what he gets. Subtract the offset from the length of the string,
575 and return that. This would perhaps not be valid if we were dealing
576 with named arrays in addition to literal string constants. */
577
578 return size_diffop_loc (loc, size_int (max), offset_node);
579 }
580
581 /* We have a known offset into the string. Start searching there for
582 a null character if we can represent it as a single HOST_WIDE_INT. */
583 if (offset_node == 0)
584 offset = 0;
585 else if (! tree_fits_shwi_p (offset_node))
586 offset = -1;
587 else
588 offset = tree_to_shwi (offset_node);
589
590 /* If the offset is known to be out of bounds, warn, and call strlen at
591 runtime. */
592 if (offset < 0 || offset > max)
593 {
594 /* Suppress multiple warnings for propagated constant strings. */
595 if (only_value != 2
596 && !TREE_NO_WARNING (src))
597 {
598 warning_at (loc, 0, "offset outside bounds of constant string");
599 TREE_NO_WARNING (src) = 1;
600 }
601 return NULL_TREE;
602 }
603
604 /* Use strlen to search for the first zero byte. Since any strings
605 constructed with build_string will have nulls appended, we win even
606 if we get handed something like (char[4])"abcd".
607
608 Since OFFSET is our starting index into the string, no further
609 calculation is needed. */
610 return ssize_int (strlen (ptr + offset));
611 }
612
613 /* Return a constant integer corresponding to target reading
614 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
615
616 static rtx
617 c_readstr (const char *str, machine_mode mode)
618 {
619 HOST_WIDE_INT ch;
620 unsigned int i, j;
621 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
622
623 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
624 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
625 / HOST_BITS_PER_WIDE_INT;
626
627 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
628 for (i = 0; i < len; i++)
629 tmp[i] = 0;
630
631 ch = 1;
632 for (i = 0; i < GET_MODE_SIZE (mode); i++)
633 {
634 j = i;
635 if (WORDS_BIG_ENDIAN)
636 j = GET_MODE_SIZE (mode) - i - 1;
637 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
638 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
639 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
640 j *= BITS_PER_UNIT;
641
642 if (ch)
643 ch = (unsigned char) str[i];
644 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
645 }
646
647 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
648 return immed_wide_int_const (c, mode);
649 }
650
651 /* Cast a target constant CST to target CHAR and if that value fits into
652 host char type, return zero and put that value into variable pointed to by
653 P. */
654
655 static int
656 target_char_cast (tree cst, char *p)
657 {
658 unsigned HOST_WIDE_INT val, hostval;
659
660 if (TREE_CODE (cst) != INTEGER_CST
661 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
662 return 1;
663
664 /* Do not care if it fits or not right here. */
665 val = TREE_INT_CST_LOW (cst);
666
667 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
668 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
669
670 hostval = val;
671 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
672 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
673
674 if (val != hostval)
675 return 1;
676
677 *p = hostval;
678 return 0;
679 }
680
681 /* Similar to save_expr, but assumes that arbitrary code is not executed
682 in between the multiple evaluations. In particular, we assume that a
683 non-addressable local variable will not be modified. */
684
685 static tree
686 builtin_save_expr (tree exp)
687 {
688 if (TREE_CODE (exp) == SSA_NAME
689 || (TREE_ADDRESSABLE (exp) == 0
690 && (TREE_CODE (exp) == PARM_DECL
691 || (VAR_P (exp) && !TREE_STATIC (exp)))))
692 return exp;
693
694 return save_expr (exp);
695 }
696
697 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
698 times to get the address of either a higher stack frame, or a return
699 address located within it (depending on FNDECL_CODE). */
700
701 static rtx
702 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
703 {
704 int i;
705 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
706 if (tem == NULL_RTX)
707 {
708 /* For a zero count with __builtin_return_address, we don't care what
709 frame address we return, because target-specific definitions will
710 override us. Therefore frame pointer elimination is OK, and using
711 the soft frame pointer is OK.
712
713 For a nonzero count, or a zero count with __builtin_frame_address,
714 we require a stable offset from the current frame pointer to the
715 previous one, so we must use the hard frame pointer, and
716 we must disable frame pointer elimination. */
717 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
718 tem = frame_pointer_rtx;
719 else
720 {
721 tem = hard_frame_pointer_rtx;
722
723 /* Tell reload not to eliminate the frame pointer. */
724 crtl->accesses_prior_frames = 1;
725 }
726 }
727
728 if (count > 0)
729 SETUP_FRAME_ADDRESSES ();
730
731 /* On the SPARC, the return address is not in the frame, it is in a
732 register. There is no way to access it off of the current frame
733 pointer, but it can be accessed off the previous frame pointer by
734 reading the value from the register window save area. */
735 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
736 count--;
737
738 /* Scan back COUNT frames to the specified frame. */
739 for (i = 0; i < count; i++)
740 {
741 /* Assume the dynamic chain pointer is in the word that the
742 frame address points to, unless otherwise specified. */
743 tem = DYNAMIC_CHAIN_ADDRESS (tem);
744 tem = memory_address (Pmode, tem);
745 tem = gen_frame_mem (Pmode, tem);
746 tem = copy_to_reg (tem);
747 }
748
749 /* For __builtin_frame_address, return what we've got. But, on
750 the SPARC for example, we may have to add a bias. */
751 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
752 return FRAME_ADDR_RTX (tem);
753
754 /* For __builtin_return_address, get the return address from that frame. */
755 #ifdef RETURN_ADDR_RTX
756 tem = RETURN_ADDR_RTX (count, tem);
757 #else
758 tem = memory_address (Pmode,
759 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
760 tem = gen_frame_mem (Pmode, tem);
761 #endif
762 return tem;
763 }
764
765 /* Alias set used for setjmp buffer. */
766 static alias_set_type setjmp_alias_set = -1;
767
768 /* Construct the leading half of a __builtin_setjmp call. Control will
769 return to RECEIVER_LABEL. This is also called directly by the SJLJ
770 exception handling code. */
771
772 void
773 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
774 {
775 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
776 rtx stack_save;
777 rtx mem;
778
779 if (setjmp_alias_set == -1)
780 setjmp_alias_set = new_alias_set ();
781
782 buf_addr = convert_memory_address (Pmode, buf_addr);
783
784 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
785
786 /* We store the frame pointer and the address of receiver_label in
787 the buffer and use the rest of it for the stack save area, which
788 is machine-dependent. */
789
790 mem = gen_rtx_MEM (Pmode, buf_addr);
791 set_mem_alias_set (mem, setjmp_alias_set);
792 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
793
794 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
795 GET_MODE_SIZE (Pmode))),
796 set_mem_alias_set (mem, setjmp_alias_set);
797
798 emit_move_insn (validize_mem (mem),
799 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
800
801 stack_save = gen_rtx_MEM (sa_mode,
802 plus_constant (Pmode, buf_addr,
803 2 * GET_MODE_SIZE (Pmode)));
804 set_mem_alias_set (stack_save, setjmp_alias_set);
805 emit_stack_save (SAVE_NONLOCAL, &stack_save);
806
807 /* If there is further processing to do, do it. */
808 if (targetm.have_builtin_setjmp_setup ())
809 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
810
811 /* We have a nonlocal label. */
812 cfun->has_nonlocal_label = 1;
813 }
814
815 /* Construct the trailing part of a __builtin_setjmp call. This is
816 also called directly by the SJLJ exception handling code.
817 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
818
819 void
820 expand_builtin_setjmp_receiver (rtx receiver_label)
821 {
822 rtx chain;
823
824 /* Mark the FP as used when we get here, so we have to make sure it's
825 marked as used by this function. */
826 emit_use (hard_frame_pointer_rtx);
827
828 /* Mark the static chain as clobbered here so life information
829 doesn't get messed up for it. */
830 chain = targetm.calls.static_chain (current_function_decl, true);
831 if (chain && REG_P (chain))
832 emit_clobber (chain);
833
834 /* Now put in the code to restore the frame pointer, and argument
835 pointer, if needed. */
836 if (! targetm.have_nonlocal_goto ())
837 {
838 /* First adjust our frame pointer to its actual value. It was
839 previously set to the start of the virtual area corresponding to
840 the stacked variables when we branched here and now needs to be
841 adjusted to the actual hardware fp value.
842
843 Assignments to virtual registers are converted by
844 instantiate_virtual_regs into the corresponding assignment
845 to the underlying register (fp in this case) that makes
846 the original assignment true.
847 So the following insn will actually be decrementing fp by
848 STARTING_FRAME_OFFSET. */
849 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
850
851 /* Restoring the frame pointer also modifies the hard frame pointer.
852 Mark it used (so that the previous assignment remains live once
853 the frame pointer is eliminated) and clobbered (to represent the
854 implicit update from the assignment). */
855 emit_use (hard_frame_pointer_rtx);
856 emit_clobber (hard_frame_pointer_rtx);
857 }
858
859 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
860 {
861 /* If the argument pointer can be eliminated in favor of the
862 frame pointer, we don't need to restore it. We assume here
863 that if such an elimination is present, it can always be used.
864 This is the case on all known machines; if we don't make this
865 assumption, we do unnecessary saving on many machines. */
866 size_t i;
867 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
868
869 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
870 if (elim_regs[i].from == ARG_POINTER_REGNUM
871 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
872 break;
873
874 if (i == ARRAY_SIZE (elim_regs))
875 {
876 /* Now restore our arg pointer from the address at which it
877 was saved in our stack frame. */
878 emit_move_insn (crtl->args.internal_arg_pointer,
879 copy_to_reg (get_arg_pointer_save_area ()));
880 }
881 }
882
883 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
884 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
885 else if (targetm.have_nonlocal_goto_receiver ())
886 emit_insn (targetm.gen_nonlocal_goto_receiver ());
887 else
888 { /* Nothing */ }
889
890 /* We must not allow the code we just generated to be reordered by
891 scheduling. Specifically, the update of the frame pointer must
892 happen immediately, not later. */
893 emit_insn (gen_blockage ());
894 }
895
896 /* __builtin_longjmp is passed a pointer to an array of five words (not
897 all will be used on all machines). It operates similarly to the C
898 library function of the same name, but is more efficient. Much of
899 the code below is copied from the handling of non-local gotos. */
900
901 static void
902 expand_builtin_longjmp (rtx buf_addr, rtx value)
903 {
904 rtx fp, lab, stack;
905 rtx_insn *insn, *last;
906 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
907
908 /* DRAP is needed for stack realign if longjmp is expanded to current
909 function */
910 if (SUPPORTS_STACK_ALIGNMENT)
911 crtl->need_drap = true;
912
913 if (setjmp_alias_set == -1)
914 setjmp_alias_set = new_alias_set ();
915
916 buf_addr = convert_memory_address (Pmode, buf_addr);
917
918 buf_addr = force_reg (Pmode, buf_addr);
919
920 /* We require that the user must pass a second argument of 1, because
921 that is what builtin_setjmp will return. */
922 gcc_assert (value == const1_rtx);
923
924 last = get_last_insn ();
925 if (targetm.have_builtin_longjmp ())
926 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
927 else
928 {
929 fp = gen_rtx_MEM (Pmode, buf_addr);
930 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
931 GET_MODE_SIZE (Pmode)));
932
933 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
934 2 * GET_MODE_SIZE (Pmode)));
935 set_mem_alias_set (fp, setjmp_alias_set);
936 set_mem_alias_set (lab, setjmp_alias_set);
937 set_mem_alias_set (stack, setjmp_alias_set);
938
939 /* Pick up FP, label, and SP from the block and jump. This code is
940 from expand_goto in stmt.c; see there for detailed comments. */
941 if (targetm.have_nonlocal_goto ())
942 /* We have to pass a value to the nonlocal_goto pattern that will
943 get copied into the static_chain pointer, but it does not matter
944 what that value is, because builtin_setjmp does not use it. */
945 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
946 else
947 {
948 lab = copy_to_reg (lab);
949
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
952
953 emit_move_insn (hard_frame_pointer_rtx, fp);
954 emit_stack_restore (SAVE_NONLOCAL, stack);
955
956 emit_use (hard_frame_pointer_rtx);
957 emit_use (stack_pointer_rtx);
958 emit_indirect_jump (lab);
959 }
960 }
961
962 /* Search backwards and mark the jump insn as a non-local goto.
963 Note that this precludes the use of __builtin_longjmp to a
964 __builtin_setjmp target in the same function. However, we've
965 already cautioned the user that these functions are for
966 internal exception handling use only. */
967 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
968 {
969 gcc_assert (insn != last);
970
971 if (JUMP_P (insn))
972 {
973 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
974 break;
975 }
976 else if (CALL_P (insn))
977 break;
978 }
979 }
980
981 static inline bool
982 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
983 {
984 return (iter->i < iter->n);
985 }
986
987 /* This function validates the types of a function call argument list
988 against a specified list of tree_codes. If the last specifier is a 0,
989 that represents an ellipses, otherwise the last specifier must be a
990 VOID_TYPE. */
991
992 static bool
993 validate_arglist (const_tree callexpr, ...)
994 {
995 enum tree_code code;
996 bool res = 0;
997 va_list ap;
998 const_call_expr_arg_iterator iter;
999 const_tree arg;
1000
1001 va_start (ap, callexpr);
1002 init_const_call_expr_arg_iterator (callexpr, &iter);
1003
1004 do
1005 {
1006 code = (enum tree_code) va_arg (ap, int);
1007 switch (code)
1008 {
1009 case 0:
1010 /* This signifies an ellipses, any further arguments are all ok. */
1011 res = true;
1012 goto end;
1013 case VOID_TYPE:
1014 /* This signifies an endlink, if no arguments remain, return
1015 true, otherwise return false. */
1016 res = !more_const_call_expr_args_p (&iter);
1017 goto end;
1018 default:
1019 /* If no parameters remain or the parameter's code does not
1020 match the specified code, return false. Otherwise continue
1021 checking any remaining arguments. */
1022 arg = next_const_call_expr_arg (&iter);
1023 if (!validate_arg (arg, code))
1024 goto end;
1025 break;
1026 }
1027 }
1028 while (1);
1029
1030 /* We need gotos here since we can only have one VA_CLOSE in a
1031 function. */
1032 end: ;
1033 va_end (ap);
1034
1035 return res;
1036 }
1037
1038 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1039 and the address of the save area. */
1040
1041 static rtx
1042 expand_builtin_nonlocal_goto (tree exp)
1043 {
1044 tree t_label, t_save_area;
1045 rtx r_label, r_save_area, r_fp, r_sp;
1046 rtx_insn *insn;
1047
1048 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1049 return NULL_RTX;
1050
1051 t_label = CALL_EXPR_ARG (exp, 0);
1052 t_save_area = CALL_EXPR_ARG (exp, 1);
1053
1054 r_label = expand_normal (t_label);
1055 r_label = convert_memory_address (Pmode, r_label);
1056 r_save_area = expand_normal (t_save_area);
1057 r_save_area = convert_memory_address (Pmode, r_save_area);
1058 /* Copy the address of the save location to a register just in case it was
1059 based on the frame pointer. */
1060 r_save_area = copy_to_reg (r_save_area);
1061 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1062 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1063 plus_constant (Pmode, r_save_area,
1064 GET_MODE_SIZE (Pmode)));
1065
1066 crtl->has_nonlocal_goto = 1;
1067
1068 /* ??? We no longer need to pass the static chain value, afaik. */
1069 if (targetm.have_nonlocal_goto ())
1070 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1071 else
1072 {
1073 r_label = copy_to_reg (r_label);
1074
1075 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1076 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1077
1078 /* Restore frame pointer for containing function. */
1079 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1080 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1081
1082 /* USE of hard_frame_pointer_rtx added for consistency;
1083 not clear if really needed. */
1084 emit_use (hard_frame_pointer_rtx);
1085 emit_use (stack_pointer_rtx);
1086
1087 /* If the architecture is using a GP register, we must
1088 conservatively assume that the target function makes use of it.
1089 The prologue of functions with nonlocal gotos must therefore
1090 initialize the GP register to the appropriate value, and we
1091 must then make sure that this value is live at the point
1092 of the jump. (Note that this doesn't necessarily apply
1093 to targets with a nonlocal_goto pattern; they are free
1094 to implement it in their own way. Note also that this is
1095 a no-op if the GP register is a global invariant.) */
1096 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1097 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1098 emit_use (pic_offset_table_rtx);
1099
1100 emit_indirect_jump (r_label);
1101 }
1102
1103 /* Search backwards to the jump insn and mark it as a
1104 non-local goto. */
1105 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1106 {
1107 if (JUMP_P (insn))
1108 {
1109 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1110 break;
1111 }
1112 else if (CALL_P (insn))
1113 break;
1114 }
1115
1116 return const0_rtx;
1117 }
1118
1119 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1120 (not all will be used on all machines) that was passed to __builtin_setjmp.
1121 It updates the stack pointer in that block to the current value. This is
1122 also called directly by the SJLJ exception handling code. */
1123
1124 void
1125 expand_builtin_update_setjmp_buf (rtx buf_addr)
1126 {
1127 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1128 rtx stack_save
1129 = gen_rtx_MEM (sa_mode,
1130 memory_address
1131 (sa_mode,
1132 plus_constant (Pmode, buf_addr,
1133 2 * GET_MODE_SIZE (Pmode))));
1134
1135 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1136 }
1137
1138 /* Expand a call to __builtin_prefetch. For a target that does not support
1139 data prefetch, evaluate the memory address argument in case it has side
1140 effects. */
1141
1142 static void
1143 expand_builtin_prefetch (tree exp)
1144 {
1145 tree arg0, arg1, arg2;
1146 int nargs;
1147 rtx op0, op1, op2;
1148
1149 if (!validate_arglist (exp, POINTER_TYPE, 0))
1150 return;
1151
1152 arg0 = CALL_EXPR_ARG (exp, 0);
1153
1154 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1155 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1156 locality). */
1157 nargs = call_expr_nargs (exp);
1158 if (nargs > 1)
1159 arg1 = CALL_EXPR_ARG (exp, 1);
1160 else
1161 arg1 = integer_zero_node;
1162 if (nargs > 2)
1163 arg2 = CALL_EXPR_ARG (exp, 2);
1164 else
1165 arg2 = integer_three_node;
1166
1167 /* Argument 0 is an address. */
1168 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1169
1170 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1171 if (TREE_CODE (arg1) != INTEGER_CST)
1172 {
1173 error ("second argument to %<__builtin_prefetch%> must be a constant");
1174 arg1 = integer_zero_node;
1175 }
1176 op1 = expand_normal (arg1);
1177 /* Argument 1 must be either zero or one. */
1178 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1179 {
1180 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1181 " using zero");
1182 op1 = const0_rtx;
1183 }
1184
1185 /* Argument 2 (locality) must be a compile-time constant int. */
1186 if (TREE_CODE (arg2) != INTEGER_CST)
1187 {
1188 error ("third argument to %<__builtin_prefetch%> must be a constant");
1189 arg2 = integer_zero_node;
1190 }
1191 op2 = expand_normal (arg2);
1192 /* Argument 2 must be 0, 1, 2, or 3. */
1193 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1194 {
1195 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1196 op2 = const0_rtx;
1197 }
1198
1199 if (targetm.have_prefetch ())
1200 {
1201 struct expand_operand ops[3];
1202
1203 create_address_operand (&ops[0], op0);
1204 create_integer_operand (&ops[1], INTVAL (op1));
1205 create_integer_operand (&ops[2], INTVAL (op2));
1206 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1207 return;
1208 }
1209
1210 /* Don't do anything with direct references to volatile memory, but
1211 generate code to handle other side effects. */
1212 if (!MEM_P (op0) && side_effects_p (op0))
1213 emit_insn (op0);
1214 }
1215
1216 /* Get a MEM rtx for expression EXP which is the address of an operand
1217 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1218 the maximum length of the block of memory that might be accessed or
1219 NULL if unknown. */
1220
1221 static rtx
1222 get_memory_rtx (tree exp, tree len)
1223 {
1224 tree orig_exp = exp;
1225 rtx addr, mem;
1226
1227 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1228 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1229 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1230 exp = TREE_OPERAND (exp, 0);
1231
1232 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1233 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1234
1235 /* Get an expression we can use to find the attributes to assign to MEM.
1236 First remove any nops. */
1237 while (CONVERT_EXPR_P (exp)
1238 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1239 exp = TREE_OPERAND (exp, 0);
1240
1241 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1242 (as builtin stringops may alias with anything). */
1243 exp = fold_build2 (MEM_REF,
1244 build_array_type (char_type_node,
1245 build_range_type (sizetype,
1246 size_one_node, len)),
1247 exp, build_int_cst (ptr_type_node, 0));
1248
1249 /* If the MEM_REF has no acceptable address, try to get the base object
1250 from the original address we got, and build an all-aliasing
1251 unknown-sized access to that one. */
1252 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1253 set_mem_attributes (mem, exp, 0);
1254 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1255 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1256 0))))
1257 {
1258 exp = build_fold_addr_expr (exp);
1259 exp = fold_build2 (MEM_REF,
1260 build_array_type (char_type_node,
1261 build_range_type (sizetype,
1262 size_zero_node,
1263 NULL)),
1264 exp, build_int_cst (ptr_type_node, 0));
1265 set_mem_attributes (mem, exp, 0);
1266 }
1267 set_mem_alias_set (mem, 0);
1268 return mem;
1269 }
1270 \f
1271 /* Built-in functions to perform an untyped call and return. */
1272
1273 #define apply_args_mode \
1274 (this_target_builtins->x_apply_args_mode)
1275 #define apply_result_mode \
1276 (this_target_builtins->x_apply_result_mode)
1277
1278 /* Return the size required for the block returned by __builtin_apply_args,
1279 and initialize apply_args_mode. */
1280
1281 static int
1282 apply_args_size (void)
1283 {
1284 static int size = -1;
1285 int align;
1286 unsigned int regno;
1287 machine_mode mode;
1288
1289 /* The values computed by this function never change. */
1290 if (size < 0)
1291 {
1292 /* The first value is the incoming arg-pointer. */
1293 size = GET_MODE_SIZE (Pmode);
1294
1295 /* The second value is the structure value address unless this is
1296 passed as an "invisible" first argument. */
1297 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1298 size += GET_MODE_SIZE (Pmode);
1299
1300 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1301 if (FUNCTION_ARG_REGNO_P (regno))
1302 {
1303 mode = targetm.calls.get_raw_arg_mode (regno);
1304
1305 gcc_assert (mode != VOIDmode);
1306
1307 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1308 if (size % align != 0)
1309 size = CEIL (size, align) * align;
1310 size += GET_MODE_SIZE (mode);
1311 apply_args_mode[regno] = mode;
1312 }
1313 else
1314 {
1315 apply_args_mode[regno] = VOIDmode;
1316 }
1317 }
1318 return size;
1319 }
1320
1321 /* Return the size required for the block returned by __builtin_apply,
1322 and initialize apply_result_mode. */
1323
1324 static int
1325 apply_result_size (void)
1326 {
1327 static int size = -1;
1328 int align, regno;
1329 machine_mode mode;
1330
1331 /* The values computed by this function never change. */
1332 if (size < 0)
1333 {
1334 size = 0;
1335
1336 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1337 if (targetm.calls.function_value_regno_p (regno))
1338 {
1339 mode = targetm.calls.get_raw_result_mode (regno);
1340
1341 gcc_assert (mode != VOIDmode);
1342
1343 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1344 if (size % align != 0)
1345 size = CEIL (size, align) * align;
1346 size += GET_MODE_SIZE (mode);
1347 apply_result_mode[regno] = mode;
1348 }
1349 else
1350 apply_result_mode[regno] = VOIDmode;
1351
1352 /* Allow targets that use untyped_call and untyped_return to override
1353 the size so that machine-specific information can be stored here. */
1354 #ifdef APPLY_RESULT_SIZE
1355 size = APPLY_RESULT_SIZE;
1356 #endif
1357 }
1358 return size;
1359 }
1360
1361 /* Create a vector describing the result block RESULT. If SAVEP is true,
1362 the result block is used to save the values; otherwise it is used to
1363 restore the values. */
1364
1365 static rtx
1366 result_vector (int savep, rtx result)
1367 {
1368 int regno, size, align, nelts;
1369 machine_mode mode;
1370 rtx reg, mem;
1371 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1372
1373 size = nelts = 0;
1374 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1375 if ((mode = apply_result_mode[regno]) != VOIDmode)
1376 {
1377 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1378 if (size % align != 0)
1379 size = CEIL (size, align) * align;
1380 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1381 mem = adjust_address (result, mode, size);
1382 savevec[nelts++] = (savep
1383 ? gen_rtx_SET (mem, reg)
1384 : gen_rtx_SET (reg, mem));
1385 size += GET_MODE_SIZE (mode);
1386 }
1387 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1388 }
1389
1390 /* Save the state required to perform an untyped call with the same
1391 arguments as were passed to the current function. */
1392
1393 static rtx
1394 expand_builtin_apply_args_1 (void)
1395 {
1396 rtx registers, tem;
1397 int size, align, regno;
1398 machine_mode mode;
1399 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1400
1401 /* Create a block where the arg-pointer, structure value address,
1402 and argument registers can be saved. */
1403 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1404
1405 /* Walk past the arg-pointer and structure value address. */
1406 size = GET_MODE_SIZE (Pmode);
1407 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1408 size += GET_MODE_SIZE (Pmode);
1409
1410 /* Save each register used in calling a function to the block. */
1411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1412 if ((mode = apply_args_mode[regno]) != VOIDmode)
1413 {
1414 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1415 if (size % align != 0)
1416 size = CEIL (size, align) * align;
1417
1418 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1419
1420 emit_move_insn (adjust_address (registers, mode, size), tem);
1421 size += GET_MODE_SIZE (mode);
1422 }
1423
1424 /* Save the arg pointer to the block. */
1425 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1426 /* We need the pointer as the caller actually passed them to us, not
1427 as we might have pretended they were passed. Make sure it's a valid
1428 operand, as emit_move_insn isn't expected to handle a PLUS. */
1429 if (STACK_GROWS_DOWNWARD)
1430 tem
1431 = force_operand (plus_constant (Pmode, tem,
1432 crtl->args.pretend_args_size),
1433 NULL_RTX);
1434 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1435
1436 size = GET_MODE_SIZE (Pmode);
1437
1438 /* Save the structure value address unless this is passed as an
1439 "invisible" first argument. */
1440 if (struct_incoming_value)
1441 {
1442 emit_move_insn (adjust_address (registers, Pmode, size),
1443 copy_to_reg (struct_incoming_value));
1444 size += GET_MODE_SIZE (Pmode);
1445 }
1446
1447 /* Return the address of the block. */
1448 return copy_addr_to_reg (XEXP (registers, 0));
1449 }
1450
1451 /* __builtin_apply_args returns block of memory allocated on
1452 the stack into which is stored the arg pointer, structure
1453 value address, static chain, and all the registers that might
1454 possibly be used in performing a function call. The code is
1455 moved to the start of the function so the incoming values are
1456 saved. */
1457
1458 static rtx
1459 expand_builtin_apply_args (void)
1460 {
1461 /* Don't do __builtin_apply_args more than once in a function.
1462 Save the result of the first call and reuse it. */
1463 if (apply_args_value != 0)
1464 return apply_args_value;
1465 {
1466 /* When this function is called, it means that registers must be
1467 saved on entry to this function. So we migrate the
1468 call to the first insn of this function. */
1469 rtx temp;
1470
1471 start_sequence ();
1472 temp = expand_builtin_apply_args_1 ();
1473 rtx_insn *seq = get_insns ();
1474 end_sequence ();
1475
1476 apply_args_value = temp;
1477
1478 /* Put the insns after the NOTE that starts the function.
1479 If this is inside a start_sequence, make the outer-level insn
1480 chain current, so the code is placed at the start of the
1481 function. If internal_arg_pointer is a non-virtual pseudo,
1482 it needs to be placed after the function that initializes
1483 that pseudo. */
1484 push_topmost_sequence ();
1485 if (REG_P (crtl->args.internal_arg_pointer)
1486 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1487 emit_insn_before (seq, parm_birth_insn);
1488 else
1489 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1490 pop_topmost_sequence ();
1491 return temp;
1492 }
1493 }
1494
1495 /* Perform an untyped call and save the state required to perform an
1496 untyped return of whatever value was returned by the given function. */
1497
1498 static rtx
1499 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1500 {
1501 int size, align, regno;
1502 machine_mode mode;
1503 rtx incoming_args, result, reg, dest, src;
1504 rtx_call_insn *call_insn;
1505 rtx old_stack_level = 0;
1506 rtx call_fusage = 0;
1507 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1508
1509 arguments = convert_memory_address (Pmode, arguments);
1510
1511 /* Create a block where the return registers can be saved. */
1512 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1513
1514 /* Fetch the arg pointer from the ARGUMENTS block. */
1515 incoming_args = gen_reg_rtx (Pmode);
1516 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1517 if (!STACK_GROWS_DOWNWARD)
1518 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1519 incoming_args, 0, OPTAB_LIB_WIDEN);
1520
1521 /* Push a new argument block and copy the arguments. Do not allow
1522 the (potential) memcpy call below to interfere with our stack
1523 manipulations. */
1524 do_pending_stack_adjust ();
1525 NO_DEFER_POP;
1526
1527 /* Save the stack with nonlocal if available. */
1528 if (targetm.have_save_stack_nonlocal ())
1529 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1530 else
1531 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1532
1533 /* Allocate a block of memory onto the stack and copy the memory
1534 arguments to the outgoing arguments address. We can pass TRUE
1535 as the 4th argument because we just saved the stack pointer
1536 and will restore it right after the call. */
1537 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1538
1539 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1540 may have already set current_function_calls_alloca to true.
1541 current_function_calls_alloca won't be set if argsize is zero,
1542 so we have to guarantee need_drap is true here. */
1543 if (SUPPORTS_STACK_ALIGNMENT)
1544 crtl->need_drap = true;
1545
1546 dest = virtual_outgoing_args_rtx;
1547 if (!STACK_GROWS_DOWNWARD)
1548 {
1549 if (CONST_INT_P (argsize))
1550 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1551 else
1552 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1553 }
1554 dest = gen_rtx_MEM (BLKmode, dest);
1555 set_mem_align (dest, PARM_BOUNDARY);
1556 src = gen_rtx_MEM (BLKmode, incoming_args);
1557 set_mem_align (src, PARM_BOUNDARY);
1558 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1559
1560 /* Refer to the argument block. */
1561 apply_args_size ();
1562 arguments = gen_rtx_MEM (BLKmode, arguments);
1563 set_mem_align (arguments, PARM_BOUNDARY);
1564
1565 /* Walk past the arg-pointer and structure value address. */
1566 size = GET_MODE_SIZE (Pmode);
1567 if (struct_value)
1568 size += GET_MODE_SIZE (Pmode);
1569
1570 /* Restore each of the registers previously saved. Make USE insns
1571 for each of these registers for use in making the call. */
1572 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1573 if ((mode = apply_args_mode[regno]) != VOIDmode)
1574 {
1575 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1576 if (size % align != 0)
1577 size = CEIL (size, align) * align;
1578 reg = gen_rtx_REG (mode, regno);
1579 emit_move_insn (reg, adjust_address (arguments, mode, size));
1580 use_reg (&call_fusage, reg);
1581 size += GET_MODE_SIZE (mode);
1582 }
1583
1584 /* Restore the structure value address unless this is passed as an
1585 "invisible" first argument. */
1586 size = GET_MODE_SIZE (Pmode);
1587 if (struct_value)
1588 {
1589 rtx value = gen_reg_rtx (Pmode);
1590 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1591 emit_move_insn (struct_value, value);
1592 if (REG_P (struct_value))
1593 use_reg (&call_fusage, struct_value);
1594 size += GET_MODE_SIZE (Pmode);
1595 }
1596
1597 /* All arguments and registers used for the call are set up by now! */
1598 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1599
1600 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1601 and we don't want to load it into a register as an optimization,
1602 because prepare_call_address already did it if it should be done. */
1603 if (GET_CODE (function) != SYMBOL_REF)
1604 function = memory_address (FUNCTION_MODE, function);
1605
1606 /* Generate the actual call instruction and save the return value. */
1607 if (targetm.have_untyped_call ())
1608 {
1609 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1610 emit_call_insn (targetm.gen_untyped_call (mem, result,
1611 result_vector (1, result)));
1612 }
1613 else if (targetm.have_call_value ())
1614 {
1615 rtx valreg = 0;
1616
1617 /* Locate the unique return register. It is not possible to
1618 express a call that sets more than one return register using
1619 call_value; use untyped_call for that. In fact, untyped_call
1620 only needs to save the return registers in the given block. */
1621 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1622 if ((mode = apply_result_mode[regno]) != VOIDmode)
1623 {
1624 gcc_assert (!valreg); /* have_untyped_call required. */
1625
1626 valreg = gen_rtx_REG (mode, regno);
1627 }
1628
1629 emit_insn (targetm.gen_call_value (valreg,
1630 gen_rtx_MEM (FUNCTION_MODE, function),
1631 const0_rtx, NULL_RTX, const0_rtx));
1632
1633 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1634 }
1635 else
1636 gcc_unreachable ();
1637
1638 /* Find the CALL insn we just emitted, and attach the register usage
1639 information. */
1640 call_insn = last_call_insn ();
1641 add_function_usage_to (call_insn, call_fusage);
1642
1643 /* Restore the stack. */
1644 if (targetm.have_save_stack_nonlocal ())
1645 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1646 else
1647 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1648 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1649
1650 OK_DEFER_POP;
1651
1652 /* Return the address of the result block. */
1653 result = copy_addr_to_reg (XEXP (result, 0));
1654 return convert_memory_address (ptr_mode, result);
1655 }
1656
1657 /* Perform an untyped return. */
1658
1659 static void
1660 expand_builtin_return (rtx result)
1661 {
1662 int size, align, regno;
1663 machine_mode mode;
1664 rtx reg;
1665 rtx_insn *call_fusage = 0;
1666
1667 result = convert_memory_address (Pmode, result);
1668
1669 apply_result_size ();
1670 result = gen_rtx_MEM (BLKmode, result);
1671
1672 if (targetm.have_untyped_return ())
1673 {
1674 rtx vector = result_vector (0, result);
1675 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1676 emit_barrier ();
1677 return;
1678 }
1679
1680 /* Restore the return value and note that each value is used. */
1681 size = 0;
1682 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1683 if ((mode = apply_result_mode[regno]) != VOIDmode)
1684 {
1685 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1686 if (size % align != 0)
1687 size = CEIL (size, align) * align;
1688 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1689 emit_move_insn (reg, adjust_address (result, mode, size));
1690
1691 push_to_sequence (call_fusage);
1692 emit_use (reg);
1693 call_fusage = get_insns ();
1694 end_sequence ();
1695 size += GET_MODE_SIZE (mode);
1696 }
1697
1698 /* Put the USE insns before the return. */
1699 emit_insn (call_fusage);
1700
1701 /* Return whatever values was restored by jumping directly to the end
1702 of the function. */
1703 expand_naked_return ();
1704 }
1705
1706 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1707
1708 static enum type_class
1709 type_to_class (tree type)
1710 {
1711 switch (TREE_CODE (type))
1712 {
1713 case VOID_TYPE: return void_type_class;
1714 case INTEGER_TYPE: return integer_type_class;
1715 case ENUMERAL_TYPE: return enumeral_type_class;
1716 case BOOLEAN_TYPE: return boolean_type_class;
1717 case POINTER_TYPE: return pointer_type_class;
1718 case REFERENCE_TYPE: return reference_type_class;
1719 case OFFSET_TYPE: return offset_type_class;
1720 case REAL_TYPE: return real_type_class;
1721 case COMPLEX_TYPE: return complex_type_class;
1722 case FUNCTION_TYPE: return function_type_class;
1723 case METHOD_TYPE: return method_type_class;
1724 case RECORD_TYPE: return record_type_class;
1725 case UNION_TYPE:
1726 case QUAL_UNION_TYPE: return union_type_class;
1727 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1728 ? string_type_class : array_type_class);
1729 case LANG_TYPE: return lang_type_class;
1730 default: return no_type_class;
1731 }
1732 }
1733
1734 /* Expand a call EXP to __builtin_classify_type. */
1735
1736 static rtx
1737 expand_builtin_classify_type (tree exp)
1738 {
1739 if (call_expr_nargs (exp))
1740 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1741 return GEN_INT (no_type_class);
1742 }
1743
1744 /* This helper macro, meant to be used in mathfn_built_in below,
1745 determines which among a set of three builtin math functions is
1746 appropriate for a given type mode. The `F' and `L' cases are
1747 automatically generated from the `double' case. */
1748 #define CASE_MATHFN(MATHFN) \
1749 CASE_CFN_##MATHFN: \
1750 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1751 fcodel = BUILT_IN_##MATHFN##L ; break;
1752 /* Similar to above, but appends _R after any F/L suffix. */
1753 #define CASE_MATHFN_REENT(MATHFN) \
1754 case CFN_BUILT_IN_##MATHFN##_R: \
1755 case CFN_BUILT_IN_##MATHFN##F_R: \
1756 case CFN_BUILT_IN_##MATHFN##L_R: \
1757 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1758 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1759
1760 /* Return a function equivalent to FN but operating on floating-point
1761 values of type TYPE, or END_BUILTINS if no such function exists.
1762 This is purely an operation on function codes; it does not guarantee
1763 that the target actually has an implementation of the function. */
1764
1765 static built_in_function
1766 mathfn_built_in_2 (tree type, combined_fn fn)
1767 {
1768 built_in_function fcode, fcodef, fcodel;
1769
1770 switch (fn)
1771 {
1772 CASE_MATHFN (ACOS)
1773 CASE_MATHFN (ACOSH)
1774 CASE_MATHFN (ASIN)
1775 CASE_MATHFN (ASINH)
1776 CASE_MATHFN (ATAN)
1777 CASE_MATHFN (ATAN2)
1778 CASE_MATHFN (ATANH)
1779 CASE_MATHFN (CBRT)
1780 CASE_MATHFN (CEIL)
1781 CASE_MATHFN (CEXPI)
1782 CASE_MATHFN (COPYSIGN)
1783 CASE_MATHFN (COS)
1784 CASE_MATHFN (COSH)
1785 CASE_MATHFN (DREM)
1786 CASE_MATHFN (ERF)
1787 CASE_MATHFN (ERFC)
1788 CASE_MATHFN (EXP)
1789 CASE_MATHFN (EXP10)
1790 CASE_MATHFN (EXP2)
1791 CASE_MATHFN (EXPM1)
1792 CASE_MATHFN (FABS)
1793 CASE_MATHFN (FDIM)
1794 CASE_MATHFN (FLOOR)
1795 CASE_MATHFN (FMA)
1796 CASE_MATHFN (FMAX)
1797 CASE_MATHFN (FMIN)
1798 CASE_MATHFN (FMOD)
1799 CASE_MATHFN (FREXP)
1800 CASE_MATHFN (GAMMA)
1801 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1802 CASE_MATHFN (HUGE_VAL)
1803 CASE_MATHFN (HYPOT)
1804 CASE_MATHFN (ILOGB)
1805 CASE_MATHFN (ICEIL)
1806 CASE_MATHFN (IFLOOR)
1807 CASE_MATHFN (INF)
1808 CASE_MATHFN (IRINT)
1809 CASE_MATHFN (IROUND)
1810 CASE_MATHFN (ISINF)
1811 CASE_MATHFN (J0)
1812 CASE_MATHFN (J1)
1813 CASE_MATHFN (JN)
1814 CASE_MATHFN (LCEIL)
1815 CASE_MATHFN (LDEXP)
1816 CASE_MATHFN (LFLOOR)
1817 CASE_MATHFN (LGAMMA)
1818 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1819 CASE_MATHFN (LLCEIL)
1820 CASE_MATHFN (LLFLOOR)
1821 CASE_MATHFN (LLRINT)
1822 CASE_MATHFN (LLROUND)
1823 CASE_MATHFN (LOG)
1824 CASE_MATHFN (LOG10)
1825 CASE_MATHFN (LOG1P)
1826 CASE_MATHFN (LOG2)
1827 CASE_MATHFN (LOGB)
1828 CASE_MATHFN (LRINT)
1829 CASE_MATHFN (LROUND)
1830 CASE_MATHFN (MODF)
1831 CASE_MATHFN (NAN)
1832 CASE_MATHFN (NANS)
1833 CASE_MATHFN (NEARBYINT)
1834 CASE_MATHFN (NEXTAFTER)
1835 CASE_MATHFN (NEXTTOWARD)
1836 CASE_MATHFN (POW)
1837 CASE_MATHFN (POWI)
1838 CASE_MATHFN (POW10)
1839 CASE_MATHFN (REMAINDER)
1840 CASE_MATHFN (REMQUO)
1841 CASE_MATHFN (RINT)
1842 CASE_MATHFN (ROUND)
1843 CASE_MATHFN (SCALB)
1844 CASE_MATHFN (SCALBLN)
1845 CASE_MATHFN (SCALBN)
1846 CASE_MATHFN (SIGNBIT)
1847 CASE_MATHFN (SIGNIFICAND)
1848 CASE_MATHFN (SIN)
1849 CASE_MATHFN (SINCOS)
1850 CASE_MATHFN (SINH)
1851 CASE_MATHFN (SQRT)
1852 CASE_MATHFN (TAN)
1853 CASE_MATHFN (TANH)
1854 CASE_MATHFN (TGAMMA)
1855 CASE_MATHFN (TRUNC)
1856 CASE_MATHFN (Y0)
1857 CASE_MATHFN (Y1)
1858 CASE_MATHFN (YN)
1859
1860 default:
1861 return END_BUILTINS;
1862 }
1863
1864 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1865 return fcode;
1866 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1867 return fcodef;
1868 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1869 return fcodel;
1870 else
1871 return END_BUILTINS;
1872 }
1873
1874 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1875 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1876 otherwise use the explicit declaration. If we can't do the conversion,
1877 return null. */
1878
1879 static tree
1880 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1881 {
1882 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1883 if (fcode2 == END_BUILTINS)
1884 return NULL_TREE;
1885
1886 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1887 return NULL_TREE;
1888
1889 return builtin_decl_explicit (fcode2);
1890 }
1891
1892 /* Like mathfn_built_in_1, but always use the implicit array. */
1893
1894 tree
1895 mathfn_built_in (tree type, combined_fn fn)
1896 {
1897 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1898 }
1899
1900 /* Like mathfn_built_in_1, but take a built_in_function and
1901 always use the implicit array. */
1902
1903 tree
1904 mathfn_built_in (tree type, enum built_in_function fn)
1905 {
1906 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1907 }
1908
1909 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1910 return its code, otherwise return IFN_LAST. Note that this function
1911 only tests whether the function is defined in internals.def, not whether
1912 it is actually available on the target. */
1913
1914 internal_fn
1915 associated_internal_fn (tree fndecl)
1916 {
1917 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1918 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1919 switch (DECL_FUNCTION_CODE (fndecl))
1920 {
1921 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1922 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1923 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1924 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1925 #include "internal-fn.def"
1926
1927 CASE_FLT_FN (BUILT_IN_POW10):
1928 return IFN_EXP10;
1929
1930 CASE_FLT_FN (BUILT_IN_DREM):
1931 return IFN_REMAINDER;
1932
1933 CASE_FLT_FN (BUILT_IN_SCALBN):
1934 CASE_FLT_FN (BUILT_IN_SCALBLN):
1935 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1936 return IFN_LDEXP;
1937 return IFN_LAST;
1938
1939 default:
1940 return IFN_LAST;
1941 }
1942 }
1943
1944 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1945 on the current target by a call to an internal function, return the
1946 code of that internal function, otherwise return IFN_LAST. The caller
1947 is responsible for ensuring that any side-effects of the built-in
1948 call are dealt with correctly. E.g. if CALL sets errno, the caller
1949 must decide that the errno result isn't needed or make it available
1950 in some other way. */
1951
1952 internal_fn
1953 replacement_internal_fn (gcall *call)
1954 {
1955 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1956 {
1957 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1958 if (ifn != IFN_LAST)
1959 {
1960 tree_pair types = direct_internal_fn_types (ifn, call);
1961 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1962 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1963 return ifn;
1964 }
1965 }
1966 return IFN_LAST;
1967 }
1968
1969 /* Expand a call to the builtin trinary math functions (fma).
1970 Return NULL_RTX if a normal call should be emitted rather than expanding the
1971 function in-line. EXP is the expression that is a call to the builtin
1972 function; if convenient, the result should be placed in TARGET.
1973 SUBTARGET may be used as the target for computing one of EXP's
1974 operands. */
1975
1976 static rtx
1977 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1978 {
1979 optab builtin_optab;
1980 rtx op0, op1, op2, result;
1981 rtx_insn *insns;
1982 tree fndecl = get_callee_fndecl (exp);
1983 tree arg0, arg1, arg2;
1984 machine_mode mode;
1985
1986 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1987 return NULL_RTX;
1988
1989 arg0 = CALL_EXPR_ARG (exp, 0);
1990 arg1 = CALL_EXPR_ARG (exp, 1);
1991 arg2 = CALL_EXPR_ARG (exp, 2);
1992
1993 switch (DECL_FUNCTION_CODE (fndecl))
1994 {
1995 CASE_FLT_FN (BUILT_IN_FMA):
1996 builtin_optab = fma_optab; break;
1997 default:
1998 gcc_unreachable ();
1999 }
2000
2001 /* Make a suitable register to place result in. */
2002 mode = TYPE_MODE (TREE_TYPE (exp));
2003
2004 /* Before working hard, check whether the instruction is available. */
2005 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2006 return NULL_RTX;
2007
2008 result = gen_reg_rtx (mode);
2009
2010 /* Always stabilize the argument list. */
2011 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2012 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2013 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2014
2015 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2016 op1 = expand_normal (arg1);
2017 op2 = expand_normal (arg2);
2018
2019 start_sequence ();
2020
2021 /* Compute into RESULT.
2022 Set RESULT to wherever the result comes back. */
2023 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2024 result, 0);
2025
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2029 if (result == 0)
2030 {
2031 end_sequence ();
2032 return expand_call (exp, target, target == const0_rtx);
2033 }
2034
2035 /* Output the entire sequence. */
2036 insns = get_insns ();
2037 end_sequence ();
2038 emit_insn (insns);
2039
2040 return result;
2041 }
2042
2043 /* Expand a call to the builtin sin and cos math functions.
2044 Return NULL_RTX if a normal call should be emitted rather than expanding the
2045 function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's
2048 operands. */
2049
2050 static rtx
2051 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2052 {
2053 optab builtin_optab;
2054 rtx op0;
2055 rtx_insn *insns;
2056 tree fndecl = get_callee_fndecl (exp);
2057 machine_mode mode;
2058 tree arg;
2059
2060 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2061 return NULL_RTX;
2062
2063 arg = CALL_EXPR_ARG (exp, 0);
2064
2065 switch (DECL_FUNCTION_CODE (fndecl))
2066 {
2067 CASE_FLT_FN (BUILT_IN_SIN):
2068 CASE_FLT_FN (BUILT_IN_COS):
2069 builtin_optab = sincos_optab; break;
2070 default:
2071 gcc_unreachable ();
2072 }
2073
2074 /* Make a suitable register to place result in. */
2075 mode = TYPE_MODE (TREE_TYPE (exp));
2076
2077 /* Check if sincos insn is available, otherwise fallback
2078 to sin or cos insn. */
2079 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2080 switch (DECL_FUNCTION_CODE (fndecl))
2081 {
2082 CASE_FLT_FN (BUILT_IN_SIN):
2083 builtin_optab = sin_optab; break;
2084 CASE_FLT_FN (BUILT_IN_COS):
2085 builtin_optab = cos_optab; break;
2086 default:
2087 gcc_unreachable ();
2088 }
2089
2090 /* Before working hard, check whether the instruction is available. */
2091 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2092 {
2093 rtx result = gen_reg_rtx (mode);
2094
2095 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2096 need to expand the argument again. This way, we will not perform
2097 side-effects more the once. */
2098 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2099
2100 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2101
2102 start_sequence ();
2103
2104 /* Compute into RESULT.
2105 Set RESULT to wherever the result comes back. */
2106 if (builtin_optab == sincos_optab)
2107 {
2108 int ok;
2109
2110 switch (DECL_FUNCTION_CODE (fndecl))
2111 {
2112 CASE_FLT_FN (BUILT_IN_SIN):
2113 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2114 break;
2115 CASE_FLT_FN (BUILT_IN_COS):
2116 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2117 break;
2118 default:
2119 gcc_unreachable ();
2120 }
2121 gcc_assert (ok);
2122 }
2123 else
2124 result = expand_unop (mode, builtin_optab, op0, result, 0);
2125
2126 if (result != 0)
2127 {
2128 /* Output the entire sequence. */
2129 insns = get_insns ();
2130 end_sequence ();
2131 emit_insn (insns);
2132 return result;
2133 }
2134
2135 /* If we were unable to expand via the builtin, stop the sequence
2136 (without outputting the insns) and call to the library function
2137 with the stabilized argument list. */
2138 end_sequence ();
2139 }
2140
2141 return expand_call (exp, target, target == const0_rtx);
2142 }
2143
2144 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2145 return an RTL instruction code that implements the functionality.
2146 If that isn't possible or available return CODE_FOR_nothing. */
2147
2148 static enum insn_code
2149 interclass_mathfn_icode (tree arg, tree fndecl)
2150 {
2151 bool errno_set = false;
2152 optab builtin_optab = unknown_optab;
2153 machine_mode mode;
2154
2155 switch (DECL_FUNCTION_CODE (fndecl))
2156 {
2157 CASE_FLT_FN (BUILT_IN_ILOGB):
2158 errno_set = true; builtin_optab = ilogb_optab; break;
2159 CASE_FLT_FN (BUILT_IN_ISINF):
2160 builtin_optab = isinf_optab; break;
2161 case BUILT_IN_ISNORMAL:
2162 case BUILT_IN_ISFINITE:
2163 CASE_FLT_FN (BUILT_IN_FINITE):
2164 case BUILT_IN_FINITED32:
2165 case BUILT_IN_FINITED64:
2166 case BUILT_IN_FINITED128:
2167 case BUILT_IN_ISINFD32:
2168 case BUILT_IN_ISINFD64:
2169 case BUILT_IN_ISINFD128:
2170 /* These builtins have no optabs (yet). */
2171 break;
2172 default:
2173 gcc_unreachable ();
2174 }
2175
2176 /* There's no easy way to detect the case we need to set EDOM. */
2177 if (flag_errno_math && errno_set)
2178 return CODE_FOR_nothing;
2179
2180 /* Optab mode depends on the mode of the input argument. */
2181 mode = TYPE_MODE (TREE_TYPE (arg));
2182
2183 if (builtin_optab)
2184 return optab_handler (builtin_optab, mode);
2185 return CODE_FOR_nothing;
2186 }
2187
2188 /* Expand a call to one of the builtin math functions that operate on
2189 floating point argument and output an integer result (ilogb, isinf,
2190 isnan, etc).
2191 Return 0 if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET. */
2194
2195 static rtx
2196 expand_builtin_interclass_mathfn (tree exp, rtx target)
2197 {
2198 enum insn_code icode = CODE_FOR_nothing;
2199 rtx op0;
2200 tree fndecl = get_callee_fndecl (exp);
2201 machine_mode mode;
2202 tree arg;
2203
2204 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2205 return NULL_RTX;
2206
2207 arg = CALL_EXPR_ARG (exp, 0);
2208 icode = interclass_mathfn_icode (arg, fndecl);
2209 mode = TYPE_MODE (TREE_TYPE (arg));
2210
2211 if (icode != CODE_FOR_nothing)
2212 {
2213 struct expand_operand ops[1];
2214 rtx_insn *last = get_last_insn ();
2215 tree orig_arg = arg;
2216
2217 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2218 need to expand the argument again. This way, we will not perform
2219 side-effects more the once. */
2220 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2221
2222 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2223
2224 if (mode != GET_MODE (op0))
2225 op0 = convert_to_mode (mode, op0, 0);
2226
2227 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2228 if (maybe_legitimize_operands (icode, 0, 1, ops)
2229 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2230 return ops[0].value;
2231
2232 delete_insns_since (last);
2233 CALL_EXPR_ARG (exp, 0) = orig_arg;
2234 }
2235
2236 return NULL_RTX;
2237 }
2238
2239 /* Expand a call to the builtin sincos math function.
2240 Return NULL_RTX if a normal call should be emitted rather than expanding the
2241 function in-line. EXP is the expression that is a call to the builtin
2242 function. */
2243
2244 static rtx
2245 expand_builtin_sincos (tree exp)
2246 {
2247 rtx op0, op1, op2, target1, target2;
2248 machine_mode mode;
2249 tree arg, sinp, cosp;
2250 int result;
2251 location_t loc = EXPR_LOCATION (exp);
2252 tree alias_type, alias_off;
2253
2254 if (!validate_arglist (exp, REAL_TYPE,
2255 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2256 return NULL_RTX;
2257
2258 arg = CALL_EXPR_ARG (exp, 0);
2259 sinp = CALL_EXPR_ARG (exp, 1);
2260 cosp = CALL_EXPR_ARG (exp, 2);
2261
2262 /* Make a suitable register to place result in. */
2263 mode = TYPE_MODE (TREE_TYPE (arg));
2264
2265 /* Check if sincos insn is available, otherwise emit the call. */
2266 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2267 return NULL_RTX;
2268
2269 target1 = gen_reg_rtx (mode);
2270 target2 = gen_reg_rtx (mode);
2271
2272 op0 = expand_normal (arg);
2273 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2274 alias_off = build_int_cst (alias_type, 0);
2275 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2276 sinp, alias_off));
2277 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2278 cosp, alias_off));
2279
2280 /* Compute into target1 and target2.
2281 Set TARGET to wherever the result comes back. */
2282 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2283 gcc_assert (result);
2284
2285 /* Move target1 and target2 to the memory locations indicated
2286 by op1 and op2. */
2287 emit_move_insn (op1, target1);
2288 emit_move_insn (op2, target2);
2289
2290 return const0_rtx;
2291 }
2292
2293 /* Expand a call to the internal cexpi builtin to the sincos math function.
2294 EXP is the expression that is a call to the builtin function; if convenient,
2295 the result should be placed in TARGET. */
2296
2297 static rtx
2298 expand_builtin_cexpi (tree exp, rtx target)
2299 {
2300 tree fndecl = get_callee_fndecl (exp);
2301 tree arg, type;
2302 machine_mode mode;
2303 rtx op0, op1, op2;
2304 location_t loc = EXPR_LOCATION (exp);
2305
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2308
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 type = TREE_TYPE (arg);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2312
2313 /* Try expanding via a sincos optab, fall back to emitting a libcall
2314 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2315 is only generated from sincos, cexp or if we have either of them. */
2316 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2317 {
2318 op1 = gen_reg_rtx (mode);
2319 op2 = gen_reg_rtx (mode);
2320
2321 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2322
2323 /* Compute into op1 and op2. */
2324 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2325 }
2326 else if (targetm.libc_has_function (function_sincos))
2327 {
2328 tree call, fn = NULL_TREE;
2329 tree top1, top2;
2330 rtx op1a, op2a;
2331
2332 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2333 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2334 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2335 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2336 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2337 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2338 else
2339 gcc_unreachable ();
2340
2341 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2342 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2343 op1a = copy_addr_to_reg (XEXP (op1, 0));
2344 op2a = copy_addr_to_reg (XEXP (op2, 0));
2345 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2346 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2347
2348 /* Make sure not to fold the sincos call again. */
2349 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2350 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2351 call, 3, arg, top1, top2));
2352 }
2353 else
2354 {
2355 tree call, fn = NULL_TREE, narg;
2356 tree ctype = build_complex_type (type);
2357
2358 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2359 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2360 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2361 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2362 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2363 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2364 else
2365 gcc_unreachable ();
2366
2367 /* If we don't have a decl for cexp create one. This is the
2368 friendliest fallback if the user calls __builtin_cexpi
2369 without full target C99 function support. */
2370 if (fn == NULL_TREE)
2371 {
2372 tree fntype;
2373 const char *name = NULL;
2374
2375 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2376 name = "cexpf";
2377 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2378 name = "cexp";
2379 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2380 name = "cexpl";
2381
2382 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2383 fn = build_fn_decl (name, fntype);
2384 }
2385
2386 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2387 build_real (type, dconst0), arg);
2388
2389 /* Make sure not to fold the cexp call again. */
2390 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2391 return expand_expr (build_call_nary (ctype, call, 1, narg),
2392 target, VOIDmode, EXPAND_NORMAL);
2393 }
2394
2395 /* Now build the proper return type. */
2396 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2397 make_tree (TREE_TYPE (arg), op2),
2398 make_tree (TREE_TYPE (arg), op1)),
2399 target, VOIDmode, EXPAND_NORMAL);
2400 }
2401
2402 /* Conveniently construct a function call expression. FNDECL names the
2403 function to be called, N is the number of arguments, and the "..."
2404 parameters are the argument expressions. Unlike build_call_exr
2405 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2406
2407 static tree
2408 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2409 {
2410 va_list ap;
2411 tree fntype = TREE_TYPE (fndecl);
2412 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2413
2414 va_start (ap, n);
2415 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2416 va_end (ap);
2417 SET_EXPR_LOCATION (fn, loc);
2418 return fn;
2419 }
2420
2421 /* Expand a call to one of the builtin rounding functions gcc defines
2422 as an extension (lfloor and lceil). As these are gcc extensions we
2423 do not need to worry about setting errno to EDOM.
2424 If expanding via optab fails, lower expression to (int)(floor(x)).
2425 EXP is the expression that is a call to the builtin function;
2426 if convenient, the result should be placed in TARGET. */
2427
2428 static rtx
2429 expand_builtin_int_roundingfn (tree exp, rtx target)
2430 {
2431 convert_optab builtin_optab;
2432 rtx op0, tmp;
2433 rtx_insn *insns;
2434 tree fndecl = get_callee_fndecl (exp);
2435 enum built_in_function fallback_fn;
2436 tree fallback_fndecl;
2437 machine_mode mode;
2438 tree arg;
2439
2440 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2441 gcc_unreachable ();
2442
2443 arg = CALL_EXPR_ARG (exp, 0);
2444
2445 switch (DECL_FUNCTION_CODE (fndecl))
2446 {
2447 CASE_FLT_FN (BUILT_IN_ICEIL):
2448 CASE_FLT_FN (BUILT_IN_LCEIL):
2449 CASE_FLT_FN (BUILT_IN_LLCEIL):
2450 builtin_optab = lceil_optab;
2451 fallback_fn = BUILT_IN_CEIL;
2452 break;
2453
2454 CASE_FLT_FN (BUILT_IN_IFLOOR):
2455 CASE_FLT_FN (BUILT_IN_LFLOOR):
2456 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2457 builtin_optab = lfloor_optab;
2458 fallback_fn = BUILT_IN_FLOOR;
2459 break;
2460
2461 default:
2462 gcc_unreachable ();
2463 }
2464
2465 /* Make a suitable register to place result in. */
2466 mode = TYPE_MODE (TREE_TYPE (exp));
2467
2468 target = gen_reg_rtx (mode);
2469
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2474
2475 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2476
2477 start_sequence ();
2478
2479 /* Compute into TARGET. */
2480 if (expand_sfix_optab (target, op0, builtin_optab))
2481 {
2482 /* Output the entire sequence. */
2483 insns = get_insns ();
2484 end_sequence ();
2485 emit_insn (insns);
2486 return target;
2487 }
2488
2489 /* If we were unable to expand via the builtin, stop the sequence
2490 (without outputting the insns). */
2491 end_sequence ();
2492
2493 /* Fall back to floating point rounding optab. */
2494 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2495
2496 /* For non-C99 targets we may end up without a fallback fndecl here
2497 if the user called __builtin_lfloor directly. In this case emit
2498 a call to the floor/ceil variants nevertheless. This should result
2499 in the best user experience for not full C99 targets. */
2500 if (fallback_fndecl == NULL_TREE)
2501 {
2502 tree fntype;
2503 const char *name = NULL;
2504
2505 switch (DECL_FUNCTION_CODE (fndecl))
2506 {
2507 case BUILT_IN_ICEIL:
2508 case BUILT_IN_LCEIL:
2509 case BUILT_IN_LLCEIL:
2510 name = "ceil";
2511 break;
2512 case BUILT_IN_ICEILF:
2513 case BUILT_IN_LCEILF:
2514 case BUILT_IN_LLCEILF:
2515 name = "ceilf";
2516 break;
2517 case BUILT_IN_ICEILL:
2518 case BUILT_IN_LCEILL:
2519 case BUILT_IN_LLCEILL:
2520 name = "ceill";
2521 break;
2522 case BUILT_IN_IFLOOR:
2523 case BUILT_IN_LFLOOR:
2524 case BUILT_IN_LLFLOOR:
2525 name = "floor";
2526 break;
2527 case BUILT_IN_IFLOORF:
2528 case BUILT_IN_LFLOORF:
2529 case BUILT_IN_LLFLOORF:
2530 name = "floorf";
2531 break;
2532 case BUILT_IN_IFLOORL:
2533 case BUILT_IN_LFLOORL:
2534 case BUILT_IN_LLFLOORL:
2535 name = "floorl";
2536 break;
2537 default:
2538 gcc_unreachable ();
2539 }
2540
2541 fntype = build_function_type_list (TREE_TYPE (arg),
2542 TREE_TYPE (arg), NULL_TREE);
2543 fallback_fndecl = build_fn_decl (name, fntype);
2544 }
2545
2546 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2547
2548 tmp = expand_normal (exp);
2549 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2550
2551 /* Truncate the result of floating point optab to integer
2552 via expand_fix (). */
2553 target = gen_reg_rtx (mode);
2554 expand_fix (target, tmp, 0);
2555
2556 return target;
2557 }
2558
2559 /* Expand a call to one of the builtin math functions doing integer
2560 conversion (lrint).
2561 Return 0 if a normal call should be emitted rather than expanding the
2562 function in-line. EXP is the expression that is a call to the builtin
2563 function; if convenient, the result should be placed in TARGET. */
2564
2565 static rtx
2566 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2567 {
2568 convert_optab builtin_optab;
2569 rtx op0;
2570 rtx_insn *insns;
2571 tree fndecl = get_callee_fndecl (exp);
2572 tree arg;
2573 machine_mode mode;
2574 enum built_in_function fallback_fn = BUILT_IN_NONE;
2575
2576 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2577 gcc_unreachable ();
2578
2579 arg = CALL_EXPR_ARG (exp, 0);
2580
2581 switch (DECL_FUNCTION_CODE (fndecl))
2582 {
2583 CASE_FLT_FN (BUILT_IN_IRINT):
2584 fallback_fn = BUILT_IN_LRINT;
2585 gcc_fallthrough ();
2586 CASE_FLT_FN (BUILT_IN_LRINT):
2587 CASE_FLT_FN (BUILT_IN_LLRINT):
2588 builtin_optab = lrint_optab;
2589 break;
2590
2591 CASE_FLT_FN (BUILT_IN_IROUND):
2592 fallback_fn = BUILT_IN_LROUND;
2593 gcc_fallthrough ();
2594 CASE_FLT_FN (BUILT_IN_LROUND):
2595 CASE_FLT_FN (BUILT_IN_LLROUND):
2596 builtin_optab = lround_optab;
2597 break;
2598
2599 default:
2600 gcc_unreachable ();
2601 }
2602
2603 /* There's no easy way to detect the case we need to set EDOM. */
2604 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2605 return NULL_RTX;
2606
2607 /* Make a suitable register to place result in. */
2608 mode = TYPE_MODE (TREE_TYPE (exp));
2609
2610 /* There's no easy way to detect the case we need to set EDOM. */
2611 if (!flag_errno_math)
2612 {
2613 rtx result = gen_reg_rtx (mode);
2614
2615 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2616 need to expand the argument again. This way, we will not perform
2617 side-effects more the once. */
2618 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2619
2620 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2621
2622 start_sequence ();
2623
2624 if (expand_sfix_optab (result, op0, builtin_optab))
2625 {
2626 /* Output the entire sequence. */
2627 insns = get_insns ();
2628 end_sequence ();
2629 emit_insn (insns);
2630 return result;
2631 }
2632
2633 /* If we were unable to expand via the builtin, stop the sequence
2634 (without outputting the insns) and call to the library function
2635 with the stabilized argument list. */
2636 end_sequence ();
2637 }
2638
2639 if (fallback_fn != BUILT_IN_NONE)
2640 {
2641 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2642 targets, (int) round (x) should never be transformed into
2643 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2644 a call to lround in the hope that the target provides at least some
2645 C99 functions. This should result in the best user experience for
2646 not full C99 targets. */
2647 tree fallback_fndecl = mathfn_built_in_1
2648 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2649
2650 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2651 fallback_fndecl, 1, arg);
2652
2653 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2654 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2655 return convert_to_mode (mode, target, 0);
2656 }
2657
2658 return expand_call (exp, target, target == const0_rtx);
2659 }
2660
2661 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2662 a normal call should be emitted rather than expanding the function
2663 in-line. EXP is the expression that is a call to the builtin
2664 function; if convenient, the result should be placed in TARGET. */
2665
2666 static rtx
2667 expand_builtin_powi (tree exp, rtx target)
2668 {
2669 tree arg0, arg1;
2670 rtx op0, op1;
2671 machine_mode mode;
2672 machine_mode mode2;
2673
2674 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2675 return NULL_RTX;
2676
2677 arg0 = CALL_EXPR_ARG (exp, 0);
2678 arg1 = CALL_EXPR_ARG (exp, 1);
2679 mode = TYPE_MODE (TREE_TYPE (exp));
2680
2681 /* Emit a libcall to libgcc. */
2682
2683 /* Mode of the 2nd argument must match that of an int. */
2684 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2685
2686 if (target == NULL_RTX)
2687 target = gen_reg_rtx (mode);
2688
2689 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2690 if (GET_MODE (op0) != mode)
2691 op0 = convert_to_mode (mode, op0, 0);
2692 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2693 if (GET_MODE (op1) != mode2)
2694 op1 = convert_to_mode (mode2, op1, 0);
2695
2696 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2697 target, LCT_CONST, mode, 2,
2698 op0, mode, op1, mode2);
2699
2700 return target;
2701 }
2702
2703 /* Expand expression EXP which is a call to the strlen builtin. Return
2704 NULL_RTX if we failed the caller should emit a normal call, otherwise
2705 try to get the result in TARGET, if convenient. */
2706
2707 static rtx
2708 expand_builtin_strlen (tree exp, rtx target,
2709 machine_mode target_mode)
2710 {
2711 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2712 return NULL_RTX;
2713 else
2714 {
2715 struct expand_operand ops[4];
2716 rtx pat;
2717 tree len;
2718 tree src = CALL_EXPR_ARG (exp, 0);
2719 rtx src_reg;
2720 rtx_insn *before_strlen;
2721 machine_mode insn_mode = target_mode;
2722 enum insn_code icode = CODE_FOR_nothing;
2723 unsigned int align;
2724
2725 /* If the length can be computed at compile-time, return it. */
2726 len = c_strlen (src, 0);
2727 if (len)
2728 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2729
2730 /* If the length can be computed at compile-time and is constant
2731 integer, but there are side-effects in src, evaluate
2732 src for side-effects, then return len.
2733 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2734 can be optimized into: i++; x = 3; */
2735 len = c_strlen (src, 1);
2736 if (len && TREE_CODE (len) == INTEGER_CST)
2737 {
2738 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2739 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2740 }
2741
2742 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2743
2744 /* If SRC is not a pointer type, don't do this operation inline. */
2745 if (align == 0)
2746 return NULL_RTX;
2747
2748 /* Bail out if we can't compute strlen in the right mode. */
2749 while (insn_mode != VOIDmode)
2750 {
2751 icode = optab_handler (strlen_optab, insn_mode);
2752 if (icode != CODE_FOR_nothing)
2753 break;
2754
2755 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2756 }
2757 if (insn_mode == VOIDmode)
2758 return NULL_RTX;
2759
2760 /* Make a place to hold the source address. We will not expand
2761 the actual source until we are sure that the expansion will
2762 not fail -- there are trees that cannot be expanded twice. */
2763 src_reg = gen_reg_rtx (Pmode);
2764
2765 /* Mark the beginning of the strlen sequence so we can emit the
2766 source operand later. */
2767 before_strlen = get_last_insn ();
2768
2769 create_output_operand (&ops[0], target, insn_mode);
2770 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2771 create_integer_operand (&ops[2], 0);
2772 create_integer_operand (&ops[3], align);
2773 if (!maybe_expand_insn (icode, 4, ops))
2774 return NULL_RTX;
2775
2776 /* Now that we are assured of success, expand the source. */
2777 start_sequence ();
2778 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2779 if (pat != src_reg)
2780 {
2781 #ifdef POINTERS_EXTEND_UNSIGNED
2782 if (GET_MODE (pat) != Pmode)
2783 pat = convert_to_mode (Pmode, pat,
2784 POINTERS_EXTEND_UNSIGNED);
2785 #endif
2786 emit_move_insn (src_reg, pat);
2787 }
2788 pat = get_insns ();
2789 end_sequence ();
2790
2791 if (before_strlen)
2792 emit_insn_after (pat, before_strlen);
2793 else
2794 emit_insn_before (pat, get_insns ());
2795
2796 /* Return the value in the proper mode for this function. */
2797 if (GET_MODE (ops[0].value) == target_mode)
2798 target = ops[0].value;
2799 else if (target != 0)
2800 convert_move (target, ops[0].value, 0);
2801 else
2802 target = convert_to_mode (target_mode, ops[0].value, 0);
2803
2804 return target;
2805 }
2806 }
2807
2808 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2809 bytes from constant string DATA + OFFSET and return it as target
2810 constant. */
2811
2812 static rtx
2813 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2814 machine_mode mode)
2815 {
2816 const char *str = (const char *) data;
2817
2818 gcc_assert (offset >= 0
2819 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2820 <= strlen (str) + 1));
2821
2822 return c_readstr (str + offset, mode);
2823 }
2824
2825 /* LEN specify length of the block of memcpy/memset operation.
2826 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2827 In some cases we can make very likely guess on max size, then we
2828 set it into PROBABLE_MAX_SIZE. */
2829
2830 static void
2831 determine_block_size (tree len, rtx len_rtx,
2832 unsigned HOST_WIDE_INT *min_size,
2833 unsigned HOST_WIDE_INT *max_size,
2834 unsigned HOST_WIDE_INT *probable_max_size)
2835 {
2836 if (CONST_INT_P (len_rtx))
2837 {
2838 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2839 return;
2840 }
2841 else
2842 {
2843 wide_int min, max;
2844 enum value_range_type range_type = VR_UNDEFINED;
2845
2846 /* Determine bounds from the type. */
2847 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2848 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2849 else
2850 *min_size = 0;
2851 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2852 *probable_max_size = *max_size
2853 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2854 else
2855 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2856
2857 if (TREE_CODE (len) == SSA_NAME)
2858 range_type = get_range_info (len, &min, &max);
2859 if (range_type == VR_RANGE)
2860 {
2861 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2862 *min_size = min.to_uhwi ();
2863 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2864 *probable_max_size = *max_size = max.to_uhwi ();
2865 }
2866 else if (range_type == VR_ANTI_RANGE)
2867 {
2868 /* Anti range 0...N lets us to determine minimal size to N+1. */
2869 if (min == 0)
2870 {
2871 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2872 *min_size = max.to_uhwi () + 1;
2873 }
2874 /* Code like
2875
2876 int n;
2877 if (n < 100)
2878 memcpy (a, b, n)
2879
2880 Produce anti range allowing negative values of N. We still
2881 can use the information and make a guess that N is not negative.
2882 */
2883 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2884 *probable_max_size = min.to_uhwi () - 1;
2885 }
2886 }
2887 gcc_checking_assert (*max_size <=
2888 (unsigned HOST_WIDE_INT)
2889 GET_MODE_MASK (GET_MODE (len_rtx)));
2890 }
2891
2892 /* Helper function to do the actual work for expand_builtin_memcpy. */
2893
2894 static rtx
2895 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2896 {
2897 const char *src_str;
2898 unsigned int src_align = get_pointer_alignment (src);
2899 unsigned int dest_align = get_pointer_alignment (dest);
2900 rtx dest_mem, src_mem, dest_addr, len_rtx;
2901 HOST_WIDE_INT expected_size = -1;
2902 unsigned int expected_align = 0;
2903 unsigned HOST_WIDE_INT min_size;
2904 unsigned HOST_WIDE_INT max_size;
2905 unsigned HOST_WIDE_INT probable_max_size;
2906
2907 /* If DEST is not a pointer type, call the normal function. */
2908 if (dest_align == 0)
2909 return NULL_RTX;
2910
2911 /* If either SRC is not a pointer type, don't do this
2912 operation in-line. */
2913 if (src_align == 0)
2914 return NULL_RTX;
2915
2916 if (currently_expanding_gimple_stmt)
2917 stringop_block_profile (currently_expanding_gimple_stmt,
2918 &expected_align, &expected_size);
2919
2920 if (expected_align < dest_align)
2921 expected_align = dest_align;
2922 dest_mem = get_memory_rtx (dest, len);
2923 set_mem_align (dest_mem, dest_align);
2924 len_rtx = expand_normal (len);
2925 determine_block_size (len, len_rtx, &min_size, &max_size,
2926 &probable_max_size);
2927 src_str = c_getstr (src);
2928
2929 /* If SRC is a string constant and block move would be done
2930 by pieces, we can avoid loading the string from memory
2931 and only stored the computed constants. */
2932 if (src_str
2933 && CONST_INT_P (len_rtx)
2934 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2935 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2936 CONST_CAST (char *, src_str),
2937 dest_align, false))
2938 {
2939 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2940 builtin_memcpy_read_str,
2941 CONST_CAST (char *, src_str),
2942 dest_align, false, 0);
2943 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2944 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2945 return dest_mem;
2946 }
2947
2948 src_mem = get_memory_rtx (src, len);
2949 set_mem_align (src_mem, src_align);
2950
2951 /* Copy word part most expediently. */
2952 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2953 CALL_EXPR_TAILCALL (exp)
2954 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2955 expected_align, expected_size,
2956 min_size, max_size, probable_max_size);
2957
2958 if (dest_addr == 0)
2959 {
2960 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2961 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2962 }
2963
2964 return dest_addr;
2965 }
2966
2967 /* Expand a call EXP to the memcpy builtin.
2968 Return NULL_RTX if we failed, the caller should emit a normal call,
2969 otherwise try to get the result in TARGET, if convenient (and in
2970 mode MODE if that's convenient). */
2971
2972 static rtx
2973 expand_builtin_memcpy (tree exp, rtx target)
2974 {
2975 if (!validate_arglist (exp,
2976 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2977 return NULL_RTX;
2978 else
2979 {
2980 tree dest = CALL_EXPR_ARG (exp, 0);
2981 tree src = CALL_EXPR_ARG (exp, 1);
2982 tree len = CALL_EXPR_ARG (exp, 2);
2983 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2984 }
2985 }
2986
2987 /* Expand an instrumented call EXP to the memcpy builtin.
2988 Return NULL_RTX if we failed, the caller should emit a normal call,
2989 otherwise try to get the result in TARGET, if convenient (and in
2990 mode MODE if that's convenient). */
2991
2992 static rtx
2993 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
2994 {
2995 if (!validate_arglist (exp,
2996 POINTER_TYPE, POINTER_BOUNDS_TYPE,
2997 POINTER_TYPE, POINTER_BOUNDS_TYPE,
2998 INTEGER_TYPE, VOID_TYPE))
2999 return NULL_RTX;
3000 else
3001 {
3002 tree dest = CALL_EXPR_ARG (exp, 0);
3003 tree src = CALL_EXPR_ARG (exp, 2);
3004 tree len = CALL_EXPR_ARG (exp, 4);
3005 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3006
3007 /* Return src bounds with the result. */
3008 if (res)
3009 {
3010 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3011 expand_normal (CALL_EXPR_ARG (exp, 1)));
3012 res = chkp_join_splitted_slot (res, bnd);
3013 }
3014 return res;
3015 }
3016 }
3017
3018 /* Expand a call EXP to the mempcpy builtin.
3019 Return NULL_RTX if we failed; the caller should emit a normal call,
3020 otherwise try to get the result in TARGET, if convenient (and in
3021 mode MODE if that's convenient). If ENDP is 0 return the
3022 destination pointer, if ENDP is 1 return the end pointer ala
3023 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3024 stpcpy. */
3025
3026 static rtx
3027 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3028 {
3029 if (!validate_arglist (exp,
3030 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3031 return NULL_RTX;
3032 else
3033 {
3034 tree dest = CALL_EXPR_ARG (exp, 0);
3035 tree src = CALL_EXPR_ARG (exp, 1);
3036 tree len = CALL_EXPR_ARG (exp, 2);
3037 return expand_builtin_mempcpy_args (dest, src, len,
3038 target, mode, /*endp=*/ 1,
3039 exp);
3040 }
3041 }
3042
3043 /* Expand an instrumented call EXP to the mempcpy builtin.
3044 Return NULL_RTX if we failed, the caller should emit a normal call,
3045 otherwise try to get the result in TARGET, if convenient (and in
3046 mode MODE if that's convenient). */
3047
3048 static rtx
3049 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3050 {
3051 if (!validate_arglist (exp,
3052 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3053 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3054 INTEGER_TYPE, VOID_TYPE))
3055 return NULL_RTX;
3056 else
3057 {
3058 tree dest = CALL_EXPR_ARG (exp, 0);
3059 tree src = CALL_EXPR_ARG (exp, 2);
3060 tree len = CALL_EXPR_ARG (exp, 4);
3061 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3062 mode, 1, exp);
3063
3064 /* Return src bounds with the result. */
3065 if (res)
3066 {
3067 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3068 expand_normal (CALL_EXPR_ARG (exp, 1)));
3069 res = chkp_join_splitted_slot (res, bnd);
3070 }
3071 return res;
3072 }
3073 }
3074
3075 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3076 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3077 so that this can also be called without constructing an actual CALL_EXPR.
3078 The other arguments and return value are the same as for
3079 expand_builtin_mempcpy. */
3080
3081 static rtx
3082 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3083 rtx target, machine_mode mode, int endp,
3084 tree orig_exp)
3085 {
3086 tree fndecl = get_callee_fndecl (orig_exp);
3087
3088 /* If return value is ignored, transform mempcpy into memcpy. */
3089 if (target == const0_rtx
3090 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3091 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3092 {
3093 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3094 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3095 dest, src, len);
3096 return expand_expr (result, target, mode, EXPAND_NORMAL);
3097 }
3098 else if (target == const0_rtx
3099 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3100 {
3101 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3102 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3103 dest, src, len);
3104 return expand_expr (result, target, mode, EXPAND_NORMAL);
3105 }
3106 else
3107 {
3108 const char *src_str;
3109 unsigned int src_align = get_pointer_alignment (src);
3110 unsigned int dest_align = get_pointer_alignment (dest);
3111 rtx dest_mem, src_mem, len_rtx;
3112
3113 /* If either SRC or DEST is not a pointer type, don't do this
3114 operation in-line. */
3115 if (dest_align == 0 || src_align == 0)
3116 return NULL_RTX;
3117
3118 /* If LEN is not constant, call the normal function. */
3119 if (! tree_fits_uhwi_p (len))
3120 return NULL_RTX;
3121
3122 len_rtx = expand_normal (len);
3123 src_str = c_getstr (src);
3124
3125 /* If SRC is a string constant and block move would be done
3126 by pieces, we can avoid loading the string from memory
3127 and only stored the computed constants. */
3128 if (src_str
3129 && CONST_INT_P (len_rtx)
3130 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3131 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3132 CONST_CAST (char *, src_str),
3133 dest_align, false))
3134 {
3135 dest_mem = get_memory_rtx (dest, len);
3136 set_mem_align (dest_mem, dest_align);
3137 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3138 builtin_memcpy_read_str,
3139 CONST_CAST (char *, src_str),
3140 dest_align, false, endp);
3141 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3142 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3143 return dest_mem;
3144 }
3145
3146 if (CONST_INT_P (len_rtx)
3147 && can_move_by_pieces (INTVAL (len_rtx),
3148 MIN (dest_align, src_align)))
3149 {
3150 dest_mem = get_memory_rtx (dest, len);
3151 set_mem_align (dest_mem, dest_align);
3152 src_mem = get_memory_rtx (src, len);
3153 set_mem_align (src_mem, src_align);
3154 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3155 MIN (dest_align, src_align), endp);
3156 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3157 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3158 return dest_mem;
3159 }
3160
3161 return NULL_RTX;
3162 }
3163 }
3164
3165 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3166 we failed, the caller should emit a normal call, otherwise try to
3167 get the result in TARGET, if convenient. If ENDP is 0 return the
3168 destination pointer, if ENDP is 1 return the end pointer ala
3169 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3170 stpcpy. */
3171
3172 static rtx
3173 expand_movstr (tree dest, tree src, rtx target, int endp)
3174 {
3175 struct expand_operand ops[3];
3176 rtx dest_mem;
3177 rtx src_mem;
3178
3179 if (!targetm.have_movstr ())
3180 return NULL_RTX;
3181
3182 dest_mem = get_memory_rtx (dest, NULL);
3183 src_mem = get_memory_rtx (src, NULL);
3184 if (!endp)
3185 {
3186 target = force_reg (Pmode, XEXP (dest_mem, 0));
3187 dest_mem = replace_equiv_address (dest_mem, target);
3188 }
3189
3190 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3191 create_fixed_operand (&ops[1], dest_mem);
3192 create_fixed_operand (&ops[2], src_mem);
3193 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3194 return NULL_RTX;
3195
3196 if (endp && target != const0_rtx)
3197 {
3198 target = ops[0].value;
3199 /* movstr is supposed to set end to the address of the NUL
3200 terminator. If the caller requested a mempcpy-like return value,
3201 adjust it. */
3202 if (endp == 1)
3203 {
3204 rtx tem = plus_constant (GET_MODE (target),
3205 gen_lowpart (GET_MODE (target), target), 1);
3206 emit_move_insn (target, force_operand (tem, NULL_RTX));
3207 }
3208 }
3209 return target;
3210 }
3211
3212 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3213 NULL_RTX if we failed the caller should emit a normal call, otherwise
3214 try to get the result in TARGET, if convenient (and in mode MODE if that's
3215 convenient). */
3216
3217 static rtx
3218 expand_builtin_strcpy (tree exp, rtx target)
3219 {
3220 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3221 {
3222 tree dest = CALL_EXPR_ARG (exp, 0);
3223 tree src = CALL_EXPR_ARG (exp, 1);
3224 return expand_builtin_strcpy_args (dest, src, target);
3225 }
3226 return NULL_RTX;
3227 }
3228
3229 /* Helper function to do the actual work for expand_builtin_strcpy. The
3230 arguments to the builtin_strcpy call DEST and SRC are broken out
3231 so that this can also be called without constructing an actual CALL_EXPR.
3232 The other arguments and return value are the same as for
3233 expand_builtin_strcpy. */
3234
3235 static rtx
3236 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3237 {
3238 return expand_movstr (dest, src, target, /*endp=*/0);
3239 }
3240
3241 /* Expand a call EXP to the stpcpy builtin.
3242 Return NULL_RTX if we failed the caller should emit a normal call,
3243 otherwise try to get the result in TARGET, if convenient (and in
3244 mode MODE if that's convenient). */
3245
3246 static rtx
3247 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3248 {
3249 tree dst, src;
3250 location_t loc = EXPR_LOCATION (exp);
3251
3252 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3253 return NULL_RTX;
3254
3255 dst = CALL_EXPR_ARG (exp, 0);
3256 src = CALL_EXPR_ARG (exp, 1);
3257
3258 /* If return value is ignored, transform stpcpy into strcpy. */
3259 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3260 {
3261 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3262 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3263 return expand_expr (result, target, mode, EXPAND_NORMAL);
3264 }
3265 else
3266 {
3267 tree len, lenp1;
3268 rtx ret;
3269
3270 /* Ensure we get an actual string whose length can be evaluated at
3271 compile-time, not an expression containing a string. This is
3272 because the latter will potentially produce pessimized code
3273 when used to produce the return value. */
3274 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3275 return expand_movstr (dst, src, target, /*endp=*/2);
3276
3277 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3278 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3279 target, mode, /*endp=*/2,
3280 exp);
3281
3282 if (ret)
3283 return ret;
3284
3285 if (TREE_CODE (len) == INTEGER_CST)
3286 {
3287 rtx len_rtx = expand_normal (len);
3288
3289 if (CONST_INT_P (len_rtx))
3290 {
3291 ret = expand_builtin_strcpy_args (dst, src, target);
3292
3293 if (ret)
3294 {
3295 if (! target)
3296 {
3297 if (mode != VOIDmode)
3298 target = gen_reg_rtx (mode);
3299 else
3300 target = gen_reg_rtx (GET_MODE (ret));
3301 }
3302 if (GET_MODE (target) != GET_MODE (ret))
3303 ret = gen_lowpart (GET_MODE (target), ret);
3304
3305 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3306 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3307 gcc_assert (ret);
3308
3309 return target;
3310 }
3311 }
3312 }
3313
3314 return expand_movstr (dst, src, target, /*endp=*/2);
3315 }
3316 }
3317
3318 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3319 bytes from constant string DATA + OFFSET and return it as target
3320 constant. */
3321
3322 rtx
3323 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3324 machine_mode mode)
3325 {
3326 const char *str = (const char *) data;
3327
3328 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3329 return const0_rtx;
3330
3331 return c_readstr (str + offset, mode);
3332 }
3333
3334 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3335 NULL_RTX if we failed the caller should emit a normal call. */
3336
3337 static rtx
3338 expand_builtin_strncpy (tree exp, rtx target)
3339 {
3340 location_t loc = EXPR_LOCATION (exp);
3341
3342 if (validate_arglist (exp,
3343 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3344 {
3345 tree dest = CALL_EXPR_ARG (exp, 0);
3346 tree src = CALL_EXPR_ARG (exp, 1);
3347 tree len = CALL_EXPR_ARG (exp, 2);
3348 tree slen = c_strlen (src, 1);
3349
3350 /* We must be passed a constant len and src parameter. */
3351 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3352 return NULL_RTX;
3353
3354 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3355
3356 /* We're required to pad with trailing zeros if the requested
3357 len is greater than strlen(s2)+1. In that case try to
3358 use store_by_pieces, if it fails, punt. */
3359 if (tree_int_cst_lt (slen, len))
3360 {
3361 unsigned int dest_align = get_pointer_alignment (dest);
3362 const char *p = c_getstr (src);
3363 rtx dest_mem;
3364
3365 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3366 || !can_store_by_pieces (tree_to_uhwi (len),
3367 builtin_strncpy_read_str,
3368 CONST_CAST (char *, p),
3369 dest_align, false))
3370 return NULL_RTX;
3371
3372 dest_mem = get_memory_rtx (dest, len);
3373 store_by_pieces (dest_mem, tree_to_uhwi (len),
3374 builtin_strncpy_read_str,
3375 CONST_CAST (char *, p), dest_align, false, 0);
3376 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3377 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3378 return dest_mem;
3379 }
3380 }
3381 return NULL_RTX;
3382 }
3383
3384 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3385 bytes from constant string DATA + OFFSET and return it as target
3386 constant. */
3387
3388 rtx
3389 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3390 machine_mode mode)
3391 {
3392 const char *c = (const char *) data;
3393 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3394
3395 memset (p, *c, GET_MODE_SIZE (mode));
3396
3397 return c_readstr (p, mode);
3398 }
3399
3400 /* Callback routine for store_by_pieces. Return the RTL of a register
3401 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3402 char value given in the RTL register data. For example, if mode is
3403 4 bytes wide, return the RTL for 0x01010101*data. */
3404
3405 static rtx
3406 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3407 machine_mode mode)
3408 {
3409 rtx target, coeff;
3410 size_t size;
3411 char *p;
3412
3413 size = GET_MODE_SIZE (mode);
3414 if (size == 1)
3415 return (rtx) data;
3416
3417 p = XALLOCAVEC (char, size);
3418 memset (p, 1, size);
3419 coeff = c_readstr (p, mode);
3420
3421 target = convert_to_mode (mode, (rtx) data, 1);
3422 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3423 return force_reg (mode, target);
3424 }
3425
3426 /* Expand expression EXP, which is a call to the memset builtin. Return
3427 NULL_RTX if we failed the caller should emit a normal call, otherwise
3428 try to get the result in TARGET, if convenient (and in mode MODE if that's
3429 convenient). */
3430
3431 static rtx
3432 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3433 {
3434 if (!validate_arglist (exp,
3435 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3436 return NULL_RTX;
3437 else
3438 {
3439 tree dest = CALL_EXPR_ARG (exp, 0);
3440 tree val = CALL_EXPR_ARG (exp, 1);
3441 tree len = CALL_EXPR_ARG (exp, 2);
3442 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3443 }
3444 }
3445
3446 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3447 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3448 try to get the result in TARGET, if convenient (and in mode MODE if that's
3449 convenient). */
3450
3451 static rtx
3452 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3453 {
3454 if (!validate_arglist (exp,
3455 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3456 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3457 return NULL_RTX;
3458 else
3459 {
3460 tree dest = CALL_EXPR_ARG (exp, 0);
3461 tree val = CALL_EXPR_ARG (exp, 2);
3462 tree len = CALL_EXPR_ARG (exp, 3);
3463 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3464
3465 /* Return src bounds with the result. */
3466 if (res)
3467 {
3468 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3469 expand_normal (CALL_EXPR_ARG (exp, 1)));
3470 res = chkp_join_splitted_slot (res, bnd);
3471 }
3472 return res;
3473 }
3474 }
3475
3476 /* Helper function to do the actual work for expand_builtin_memset. The
3477 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3478 so that this can also be called without constructing an actual CALL_EXPR.
3479 The other arguments and return value are the same as for
3480 expand_builtin_memset. */
3481
3482 static rtx
3483 expand_builtin_memset_args (tree dest, tree val, tree len,
3484 rtx target, machine_mode mode, tree orig_exp)
3485 {
3486 tree fndecl, fn;
3487 enum built_in_function fcode;
3488 machine_mode val_mode;
3489 char c;
3490 unsigned int dest_align;
3491 rtx dest_mem, dest_addr, len_rtx;
3492 HOST_WIDE_INT expected_size = -1;
3493 unsigned int expected_align = 0;
3494 unsigned HOST_WIDE_INT min_size;
3495 unsigned HOST_WIDE_INT max_size;
3496 unsigned HOST_WIDE_INT probable_max_size;
3497
3498 dest_align = get_pointer_alignment (dest);
3499
3500 /* If DEST is not a pointer type, don't do this operation in-line. */
3501 if (dest_align == 0)
3502 return NULL_RTX;
3503
3504 if (currently_expanding_gimple_stmt)
3505 stringop_block_profile (currently_expanding_gimple_stmt,
3506 &expected_align, &expected_size);
3507
3508 if (expected_align < dest_align)
3509 expected_align = dest_align;
3510
3511 /* If the LEN parameter is zero, return DEST. */
3512 if (integer_zerop (len))
3513 {
3514 /* Evaluate and ignore VAL in case it has side-effects. */
3515 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3516 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3517 }
3518
3519 /* Stabilize the arguments in case we fail. */
3520 dest = builtin_save_expr (dest);
3521 val = builtin_save_expr (val);
3522 len = builtin_save_expr (len);
3523
3524 len_rtx = expand_normal (len);
3525 determine_block_size (len, len_rtx, &min_size, &max_size,
3526 &probable_max_size);
3527 dest_mem = get_memory_rtx (dest, len);
3528 val_mode = TYPE_MODE (unsigned_char_type_node);
3529
3530 if (TREE_CODE (val) != INTEGER_CST)
3531 {
3532 rtx val_rtx;
3533
3534 val_rtx = expand_normal (val);
3535 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3536
3537 /* Assume that we can memset by pieces if we can store
3538 * the coefficients by pieces (in the required modes).
3539 * We can't pass builtin_memset_gen_str as that emits RTL. */
3540 c = 1;
3541 if (tree_fits_uhwi_p (len)
3542 && can_store_by_pieces (tree_to_uhwi (len),
3543 builtin_memset_read_str, &c, dest_align,
3544 true))
3545 {
3546 val_rtx = force_reg (val_mode, val_rtx);
3547 store_by_pieces (dest_mem, tree_to_uhwi (len),
3548 builtin_memset_gen_str, val_rtx, dest_align,
3549 true, 0);
3550 }
3551 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3552 dest_align, expected_align,
3553 expected_size, min_size, max_size,
3554 probable_max_size))
3555 goto do_libcall;
3556
3557 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3558 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3559 return dest_mem;
3560 }
3561
3562 if (target_char_cast (val, &c))
3563 goto do_libcall;
3564
3565 if (c)
3566 {
3567 if (tree_fits_uhwi_p (len)
3568 && can_store_by_pieces (tree_to_uhwi (len),
3569 builtin_memset_read_str, &c, dest_align,
3570 true))
3571 store_by_pieces (dest_mem, tree_to_uhwi (len),
3572 builtin_memset_read_str, &c, dest_align, true, 0);
3573 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3574 gen_int_mode (c, val_mode),
3575 dest_align, expected_align,
3576 expected_size, min_size, max_size,
3577 probable_max_size))
3578 goto do_libcall;
3579
3580 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3581 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3582 return dest_mem;
3583 }
3584
3585 set_mem_align (dest_mem, dest_align);
3586 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3587 CALL_EXPR_TAILCALL (orig_exp)
3588 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3589 expected_align, expected_size,
3590 min_size, max_size,
3591 probable_max_size);
3592
3593 if (dest_addr == 0)
3594 {
3595 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3596 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3597 }
3598
3599 return dest_addr;
3600
3601 do_libcall:
3602 fndecl = get_callee_fndecl (orig_exp);
3603 fcode = DECL_FUNCTION_CODE (fndecl);
3604 if (fcode == BUILT_IN_MEMSET
3605 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3606 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3607 dest, val, len);
3608 else if (fcode == BUILT_IN_BZERO)
3609 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3610 dest, len);
3611 else
3612 gcc_unreachable ();
3613 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3614 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3615 return expand_call (fn, target, target == const0_rtx);
3616 }
3617
3618 /* Expand expression EXP, which is a call to the bzero builtin. Return
3619 NULL_RTX if we failed the caller should emit a normal call. */
3620
3621 static rtx
3622 expand_builtin_bzero (tree exp)
3623 {
3624 tree dest, size;
3625 location_t loc = EXPR_LOCATION (exp);
3626
3627 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3628 return NULL_RTX;
3629
3630 dest = CALL_EXPR_ARG (exp, 0);
3631 size = CALL_EXPR_ARG (exp, 1);
3632
3633 /* New argument list transforming bzero(ptr x, int y) to
3634 memset(ptr x, int 0, size_t y). This is done this way
3635 so that if it isn't expanded inline, we fallback to
3636 calling bzero instead of memset. */
3637
3638 return expand_builtin_memset_args (dest, integer_zero_node,
3639 fold_convert_loc (loc,
3640 size_type_node, size),
3641 const0_rtx, VOIDmode, exp);
3642 }
3643
3644 /* Try to expand cmpstr operation ICODE with the given operands.
3645 Return the result rtx on success, otherwise return null. */
3646
3647 static rtx
3648 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3649 HOST_WIDE_INT align)
3650 {
3651 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3652
3653 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3654 target = NULL_RTX;
3655
3656 struct expand_operand ops[4];
3657 create_output_operand (&ops[0], target, insn_mode);
3658 create_fixed_operand (&ops[1], arg1_rtx);
3659 create_fixed_operand (&ops[2], arg2_rtx);
3660 create_integer_operand (&ops[3], align);
3661 if (maybe_expand_insn (icode, 4, ops))
3662 return ops[0].value;
3663 return NULL_RTX;
3664 }
3665
3666 /* Expand expression EXP, which is a call to the memcmp built-in function.
3667 Return NULL_RTX if we failed and the caller should emit a normal call,
3668 otherwise try to get the result in TARGET, if convenient.
3669 RESULT_EQ is true if we can relax the returned value to be either zero
3670 or nonzero, without caring about the sign. */
3671
3672 static rtx
3673 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3674 {
3675 if (!validate_arglist (exp,
3676 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3677 return NULL_RTX;
3678
3679 tree arg1 = CALL_EXPR_ARG (exp, 0);
3680 tree arg2 = CALL_EXPR_ARG (exp, 1);
3681 tree len = CALL_EXPR_ARG (exp, 2);
3682 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3683 location_t loc = EXPR_LOCATION (exp);
3684
3685 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3686 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3687
3688 /* If we don't have POINTER_TYPE, call the function. */
3689 if (arg1_align == 0 || arg2_align == 0)
3690 return NULL_RTX;
3691
3692 rtx arg1_rtx = get_memory_rtx (arg1, len);
3693 rtx arg2_rtx = get_memory_rtx (arg2, len);
3694 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3695
3696 /* Set MEM_SIZE as appropriate. */
3697 if (CONST_INT_P (len_rtx))
3698 {
3699 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3700 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3701 }
3702
3703 by_pieces_constfn constfn = NULL;
3704
3705 const char *src_str = c_getstr (arg2);
3706 if (result_eq && src_str == NULL)
3707 {
3708 src_str = c_getstr (arg1);
3709 if (src_str != NULL)
3710 std::swap (arg1_rtx, arg2_rtx);
3711 }
3712
3713 /* If SRC is a string constant and block move would be done
3714 by pieces, we can avoid loading the string from memory
3715 and only stored the computed constants. */
3716 if (src_str
3717 && CONST_INT_P (len_rtx)
3718 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3719 constfn = builtin_memcpy_read_str;
3720
3721 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3722 TREE_TYPE (len), target,
3723 result_eq, constfn,
3724 CONST_CAST (char *, src_str));
3725
3726 if (result)
3727 {
3728 /* Return the value in the proper mode for this function. */
3729 if (GET_MODE (result) == mode)
3730 return result;
3731
3732 if (target != 0)
3733 {
3734 convert_move (target, result, 0);
3735 return target;
3736 }
3737
3738 return convert_to_mode (mode, result, 0);
3739 }
3740
3741 return NULL_RTX;
3742 }
3743
3744 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3745 if we failed the caller should emit a normal call, otherwise try to get
3746 the result in TARGET, if convenient. */
3747
3748 static rtx
3749 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3750 {
3751 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3752 return NULL_RTX;
3753
3754 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3755 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3756 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3757 {
3758 rtx arg1_rtx, arg2_rtx;
3759 tree fndecl, fn;
3760 tree arg1 = CALL_EXPR_ARG (exp, 0);
3761 tree arg2 = CALL_EXPR_ARG (exp, 1);
3762 rtx result = NULL_RTX;
3763
3764 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3765 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3766
3767 /* If we don't have POINTER_TYPE, call the function. */
3768 if (arg1_align == 0 || arg2_align == 0)
3769 return NULL_RTX;
3770
3771 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3772 arg1 = builtin_save_expr (arg1);
3773 arg2 = builtin_save_expr (arg2);
3774
3775 arg1_rtx = get_memory_rtx (arg1, NULL);
3776 arg2_rtx = get_memory_rtx (arg2, NULL);
3777
3778 /* Try to call cmpstrsi. */
3779 if (cmpstr_icode != CODE_FOR_nothing)
3780 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3781 MIN (arg1_align, arg2_align));
3782
3783 /* Try to determine at least one length and call cmpstrnsi. */
3784 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3785 {
3786 tree len;
3787 rtx arg3_rtx;
3788
3789 tree len1 = c_strlen (arg1, 1);
3790 tree len2 = c_strlen (arg2, 1);
3791
3792 if (len1)
3793 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3794 if (len2)
3795 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3796
3797 /* If we don't have a constant length for the first, use the length
3798 of the second, if we know it. We don't require a constant for
3799 this case; some cost analysis could be done if both are available
3800 but neither is constant. For now, assume they're equally cheap,
3801 unless one has side effects. If both strings have constant lengths,
3802 use the smaller. */
3803
3804 if (!len1)
3805 len = len2;
3806 else if (!len2)
3807 len = len1;
3808 else if (TREE_SIDE_EFFECTS (len1))
3809 len = len2;
3810 else if (TREE_SIDE_EFFECTS (len2))
3811 len = len1;
3812 else if (TREE_CODE (len1) != INTEGER_CST)
3813 len = len2;
3814 else if (TREE_CODE (len2) != INTEGER_CST)
3815 len = len1;
3816 else if (tree_int_cst_lt (len1, len2))
3817 len = len1;
3818 else
3819 len = len2;
3820
3821 /* If both arguments have side effects, we cannot optimize. */
3822 if (len && !TREE_SIDE_EFFECTS (len))
3823 {
3824 arg3_rtx = expand_normal (len);
3825 result = expand_cmpstrn_or_cmpmem
3826 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3827 arg3_rtx, MIN (arg1_align, arg2_align));
3828 }
3829 }
3830
3831 if (result)
3832 {
3833 /* Return the value in the proper mode for this function. */
3834 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3835 if (GET_MODE (result) == mode)
3836 return result;
3837 if (target == 0)
3838 return convert_to_mode (mode, result, 0);
3839 convert_move (target, result, 0);
3840 return target;
3841 }
3842
3843 /* Expand the library call ourselves using a stabilized argument
3844 list to avoid re-evaluating the function's arguments twice. */
3845 fndecl = get_callee_fndecl (exp);
3846 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3847 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3848 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3849 return expand_call (fn, target, target == const0_rtx);
3850 }
3851 return NULL_RTX;
3852 }
3853
3854 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3855 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3856 the result in TARGET, if convenient. */
3857
3858 static rtx
3859 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3860 ATTRIBUTE_UNUSED machine_mode mode)
3861 {
3862 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3863
3864 if (!validate_arglist (exp,
3865 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3866 return NULL_RTX;
3867
3868 /* If c_strlen can determine an expression for one of the string
3869 lengths, and it doesn't have side effects, then emit cmpstrnsi
3870 using length MIN(strlen(string)+1, arg3). */
3871 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3872 if (cmpstrn_icode != CODE_FOR_nothing)
3873 {
3874 tree len, len1, len2;
3875 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3876 rtx result;
3877 tree fndecl, fn;
3878 tree arg1 = CALL_EXPR_ARG (exp, 0);
3879 tree arg2 = CALL_EXPR_ARG (exp, 1);
3880 tree arg3 = CALL_EXPR_ARG (exp, 2);
3881
3882 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3883 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3884
3885 len1 = c_strlen (arg1, 1);
3886 len2 = c_strlen (arg2, 1);
3887
3888 if (len1)
3889 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3890 if (len2)
3891 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3892
3893 /* If we don't have a constant length for the first, use the length
3894 of the second, if we know it. We don't require a constant for
3895 this case; some cost analysis could be done if both are available
3896 but neither is constant. For now, assume they're equally cheap,
3897 unless one has side effects. If both strings have constant lengths,
3898 use the smaller. */
3899
3900 if (!len1)
3901 len = len2;
3902 else if (!len2)
3903 len = len1;
3904 else if (TREE_SIDE_EFFECTS (len1))
3905 len = len2;
3906 else if (TREE_SIDE_EFFECTS (len2))
3907 len = len1;
3908 else if (TREE_CODE (len1) != INTEGER_CST)
3909 len = len2;
3910 else if (TREE_CODE (len2) != INTEGER_CST)
3911 len = len1;
3912 else if (tree_int_cst_lt (len1, len2))
3913 len = len1;
3914 else
3915 len = len2;
3916
3917 /* If both arguments have side effects, we cannot optimize. */
3918 if (!len || TREE_SIDE_EFFECTS (len))
3919 return NULL_RTX;
3920
3921 /* The actual new length parameter is MIN(len,arg3). */
3922 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3923 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3924
3925 /* If we don't have POINTER_TYPE, call the function. */
3926 if (arg1_align == 0 || arg2_align == 0)
3927 return NULL_RTX;
3928
3929 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3930 arg1 = builtin_save_expr (arg1);
3931 arg2 = builtin_save_expr (arg2);
3932 len = builtin_save_expr (len);
3933
3934 arg1_rtx = get_memory_rtx (arg1, len);
3935 arg2_rtx = get_memory_rtx (arg2, len);
3936 arg3_rtx = expand_normal (len);
3937 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3938 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3939 MIN (arg1_align, arg2_align));
3940 if (result)
3941 {
3942 /* Return the value in the proper mode for this function. */
3943 mode = TYPE_MODE (TREE_TYPE (exp));
3944 if (GET_MODE (result) == mode)
3945 return result;
3946 if (target == 0)
3947 return convert_to_mode (mode, result, 0);
3948 convert_move (target, result, 0);
3949 return target;
3950 }
3951
3952 /* Expand the library call ourselves using a stabilized argument
3953 list to avoid re-evaluating the function's arguments twice. */
3954 fndecl = get_callee_fndecl (exp);
3955 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3956 arg1, arg2, len);
3957 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3958 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3959 return expand_call (fn, target, target == const0_rtx);
3960 }
3961 return NULL_RTX;
3962 }
3963
3964 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3965 if that's convenient. */
3966
3967 rtx
3968 expand_builtin_saveregs (void)
3969 {
3970 rtx val;
3971 rtx_insn *seq;
3972
3973 /* Don't do __builtin_saveregs more than once in a function.
3974 Save the result of the first call and reuse it. */
3975 if (saveregs_value != 0)
3976 return saveregs_value;
3977
3978 /* When this function is called, it means that registers must be
3979 saved on entry to this function. So we migrate the call to the
3980 first insn of this function. */
3981
3982 start_sequence ();
3983
3984 /* Do whatever the machine needs done in this case. */
3985 val = targetm.calls.expand_builtin_saveregs ();
3986
3987 seq = get_insns ();
3988 end_sequence ();
3989
3990 saveregs_value = val;
3991
3992 /* Put the insns after the NOTE that starts the function. If this
3993 is inside a start_sequence, make the outer-level insn chain current, so
3994 the code is placed at the start of the function. */
3995 push_topmost_sequence ();
3996 emit_insn_after (seq, entry_of_function ());
3997 pop_topmost_sequence ();
3998
3999 return val;
4000 }
4001
4002 /* Expand a call to __builtin_next_arg. */
4003
4004 static rtx
4005 expand_builtin_next_arg (void)
4006 {
4007 /* Checking arguments is already done in fold_builtin_next_arg
4008 that must be called before this function. */
4009 return expand_binop (ptr_mode, add_optab,
4010 crtl->args.internal_arg_pointer,
4011 crtl->args.arg_offset_rtx,
4012 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4013 }
4014
4015 /* Make it easier for the backends by protecting the valist argument
4016 from multiple evaluations. */
4017
4018 static tree
4019 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4020 {
4021 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4022
4023 /* The current way of determining the type of valist is completely
4024 bogus. We should have the information on the va builtin instead. */
4025 if (!vatype)
4026 vatype = targetm.fn_abi_va_list (cfun->decl);
4027
4028 if (TREE_CODE (vatype) == ARRAY_TYPE)
4029 {
4030 if (TREE_SIDE_EFFECTS (valist))
4031 valist = save_expr (valist);
4032
4033 /* For this case, the backends will be expecting a pointer to
4034 vatype, but it's possible we've actually been given an array
4035 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4036 So fix it. */
4037 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4038 {
4039 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4040 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4041 }
4042 }
4043 else
4044 {
4045 tree pt = build_pointer_type (vatype);
4046
4047 if (! needs_lvalue)
4048 {
4049 if (! TREE_SIDE_EFFECTS (valist))
4050 return valist;
4051
4052 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4053 TREE_SIDE_EFFECTS (valist) = 1;
4054 }
4055
4056 if (TREE_SIDE_EFFECTS (valist))
4057 valist = save_expr (valist);
4058 valist = fold_build2_loc (loc, MEM_REF,
4059 vatype, valist, build_int_cst (pt, 0));
4060 }
4061
4062 return valist;
4063 }
4064
4065 /* The "standard" definition of va_list is void*. */
4066
4067 tree
4068 std_build_builtin_va_list (void)
4069 {
4070 return ptr_type_node;
4071 }
4072
4073 /* The "standard" abi va_list is va_list_type_node. */
4074
4075 tree
4076 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4077 {
4078 return va_list_type_node;
4079 }
4080
4081 /* The "standard" type of va_list is va_list_type_node. */
4082
4083 tree
4084 std_canonical_va_list_type (tree type)
4085 {
4086 tree wtype, htype;
4087
4088 wtype = va_list_type_node;
4089 htype = type;
4090
4091 if (TREE_CODE (wtype) == ARRAY_TYPE)
4092 {
4093 /* If va_list is an array type, the argument may have decayed
4094 to a pointer type, e.g. by being passed to another function.
4095 In that case, unwrap both types so that we can compare the
4096 underlying records. */
4097 if (TREE_CODE (htype) == ARRAY_TYPE
4098 || POINTER_TYPE_P (htype))
4099 {
4100 wtype = TREE_TYPE (wtype);
4101 htype = TREE_TYPE (htype);
4102 }
4103 }
4104 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4105 return va_list_type_node;
4106
4107 return NULL_TREE;
4108 }
4109
4110 /* The "standard" implementation of va_start: just assign `nextarg' to
4111 the variable. */
4112
4113 void
4114 std_expand_builtin_va_start (tree valist, rtx nextarg)
4115 {
4116 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4117 convert_move (va_r, nextarg, 0);
4118
4119 /* We do not have any valid bounds for the pointer, so
4120 just store zero bounds for it. */
4121 if (chkp_function_instrumented_p (current_function_decl))
4122 chkp_expand_bounds_reset_for_mem (valist,
4123 make_tree (TREE_TYPE (valist),
4124 nextarg));
4125 }
4126
4127 /* Expand EXP, a call to __builtin_va_start. */
4128
4129 static rtx
4130 expand_builtin_va_start (tree exp)
4131 {
4132 rtx nextarg;
4133 tree valist;
4134 location_t loc = EXPR_LOCATION (exp);
4135
4136 if (call_expr_nargs (exp) < 2)
4137 {
4138 error_at (loc, "too few arguments to function %<va_start%>");
4139 return const0_rtx;
4140 }
4141
4142 if (fold_builtin_next_arg (exp, true))
4143 return const0_rtx;
4144
4145 nextarg = expand_builtin_next_arg ();
4146 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4147
4148 if (targetm.expand_builtin_va_start)
4149 targetm.expand_builtin_va_start (valist, nextarg);
4150 else
4151 std_expand_builtin_va_start (valist, nextarg);
4152
4153 return const0_rtx;
4154 }
4155
4156 /* Expand EXP, a call to __builtin_va_end. */
4157
4158 static rtx
4159 expand_builtin_va_end (tree exp)
4160 {
4161 tree valist = CALL_EXPR_ARG (exp, 0);
4162
4163 /* Evaluate for side effects, if needed. I hate macros that don't
4164 do that. */
4165 if (TREE_SIDE_EFFECTS (valist))
4166 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4167
4168 return const0_rtx;
4169 }
4170
4171 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4172 builtin rather than just as an assignment in stdarg.h because of the
4173 nastiness of array-type va_list types. */
4174
4175 static rtx
4176 expand_builtin_va_copy (tree exp)
4177 {
4178 tree dst, src, t;
4179 location_t loc = EXPR_LOCATION (exp);
4180
4181 dst = CALL_EXPR_ARG (exp, 0);
4182 src = CALL_EXPR_ARG (exp, 1);
4183
4184 dst = stabilize_va_list_loc (loc, dst, 1);
4185 src = stabilize_va_list_loc (loc, src, 0);
4186
4187 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4188
4189 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4190 {
4191 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4192 TREE_SIDE_EFFECTS (t) = 1;
4193 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4194 }
4195 else
4196 {
4197 rtx dstb, srcb, size;
4198
4199 /* Evaluate to pointers. */
4200 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4201 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4202 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4203 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4204
4205 dstb = convert_memory_address (Pmode, dstb);
4206 srcb = convert_memory_address (Pmode, srcb);
4207
4208 /* "Dereference" to BLKmode memories. */
4209 dstb = gen_rtx_MEM (BLKmode, dstb);
4210 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4211 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4212 srcb = gen_rtx_MEM (BLKmode, srcb);
4213 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4214 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4215
4216 /* Copy. */
4217 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4218 }
4219
4220 return const0_rtx;
4221 }
4222
4223 /* Expand a call to one of the builtin functions __builtin_frame_address or
4224 __builtin_return_address. */
4225
4226 static rtx
4227 expand_builtin_frame_address (tree fndecl, tree exp)
4228 {
4229 /* The argument must be a nonnegative integer constant.
4230 It counts the number of frames to scan up the stack.
4231 The value is either the frame pointer value or the return
4232 address saved in that frame. */
4233 if (call_expr_nargs (exp) == 0)
4234 /* Warning about missing arg was already issued. */
4235 return const0_rtx;
4236 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4237 {
4238 error ("invalid argument to %qD", fndecl);
4239 return const0_rtx;
4240 }
4241 else
4242 {
4243 /* Number of frames to scan up the stack. */
4244 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4245
4246 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4247
4248 /* Some ports cannot access arbitrary stack frames. */
4249 if (tem == NULL)
4250 {
4251 warning (0, "unsupported argument to %qD", fndecl);
4252 return const0_rtx;
4253 }
4254
4255 if (count)
4256 {
4257 /* Warn since no effort is made to ensure that any frame
4258 beyond the current one exists or can be safely reached. */
4259 warning (OPT_Wframe_address, "calling %qD with "
4260 "a nonzero argument is unsafe", fndecl);
4261 }
4262
4263 /* For __builtin_frame_address, return what we've got. */
4264 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4265 return tem;
4266
4267 if (!REG_P (tem)
4268 && ! CONSTANT_P (tem))
4269 tem = copy_addr_to_reg (tem);
4270 return tem;
4271 }
4272 }
4273
4274 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4275 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4276 is the same as for allocate_dynamic_stack_space. */
4277
4278 static rtx
4279 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4280 {
4281 rtx op0;
4282 rtx result;
4283 bool valid_arglist;
4284 unsigned int align;
4285 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4286 == BUILT_IN_ALLOCA_WITH_ALIGN);
4287
4288 valid_arglist
4289 = (alloca_with_align
4290 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4291 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4292
4293 if (!valid_arglist)
4294 return NULL_RTX;
4295
4296 /* Compute the argument. */
4297 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4298
4299 /* Compute the alignment. */
4300 align = (alloca_with_align
4301 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4302 : BIGGEST_ALIGNMENT);
4303
4304 /* Allocate the desired space. */
4305 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4306 result = convert_memory_address (ptr_mode, result);
4307
4308 return result;
4309 }
4310
4311 /* Expand a call to bswap builtin in EXP.
4312 Return NULL_RTX if a normal call should be emitted rather than expanding the
4313 function in-line. If convenient, the result should be placed in TARGET.
4314 SUBTARGET may be used as the target for computing one of EXP's operands. */
4315
4316 static rtx
4317 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4318 rtx subtarget)
4319 {
4320 tree arg;
4321 rtx op0;
4322
4323 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4324 return NULL_RTX;
4325
4326 arg = CALL_EXPR_ARG (exp, 0);
4327 op0 = expand_expr (arg,
4328 subtarget && GET_MODE (subtarget) == target_mode
4329 ? subtarget : NULL_RTX,
4330 target_mode, EXPAND_NORMAL);
4331 if (GET_MODE (op0) != target_mode)
4332 op0 = convert_to_mode (target_mode, op0, 1);
4333
4334 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4335
4336 gcc_assert (target);
4337
4338 return convert_to_mode (target_mode, target, 1);
4339 }
4340
4341 /* Expand a call to a unary builtin in EXP.
4342 Return NULL_RTX if a normal call should be emitted rather than expanding the
4343 function in-line. If convenient, the result should be placed in TARGET.
4344 SUBTARGET may be used as the target for computing one of EXP's operands. */
4345
4346 static rtx
4347 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4348 rtx subtarget, optab op_optab)
4349 {
4350 rtx op0;
4351
4352 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4353 return NULL_RTX;
4354
4355 /* Compute the argument. */
4356 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4357 (subtarget
4358 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4359 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4360 VOIDmode, EXPAND_NORMAL);
4361 /* Compute op, into TARGET if possible.
4362 Set TARGET to wherever the result comes back. */
4363 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4364 op_optab, op0, target, op_optab != clrsb_optab);
4365 gcc_assert (target);
4366
4367 return convert_to_mode (target_mode, target, 0);
4368 }
4369
4370 /* Expand a call to __builtin_expect. We just return our argument
4371 as the builtin_expect semantic should've been already executed by
4372 tree branch prediction pass. */
4373
4374 static rtx
4375 expand_builtin_expect (tree exp, rtx target)
4376 {
4377 tree arg;
4378
4379 if (call_expr_nargs (exp) < 2)
4380 return const0_rtx;
4381 arg = CALL_EXPR_ARG (exp, 0);
4382
4383 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4384 /* When guessing was done, the hints should be already stripped away. */
4385 gcc_assert (!flag_guess_branch_prob
4386 || optimize == 0 || seen_error ());
4387 return target;
4388 }
4389
4390 /* Expand a call to __builtin_assume_aligned. We just return our first
4391 argument as the builtin_assume_aligned semantic should've been already
4392 executed by CCP. */
4393
4394 static rtx
4395 expand_builtin_assume_aligned (tree exp, rtx target)
4396 {
4397 if (call_expr_nargs (exp) < 2)
4398 return const0_rtx;
4399 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4400 EXPAND_NORMAL);
4401 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4402 && (call_expr_nargs (exp) < 3
4403 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4404 return target;
4405 }
4406
4407 void
4408 expand_builtin_trap (void)
4409 {
4410 if (targetm.have_trap ())
4411 {
4412 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4413 /* For trap insns when not accumulating outgoing args force
4414 REG_ARGS_SIZE note to prevent crossjumping of calls with
4415 different args sizes. */
4416 if (!ACCUMULATE_OUTGOING_ARGS)
4417 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4418 }
4419 else
4420 {
4421 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4422 tree call_expr = build_call_expr (fn, 0);
4423 expand_call (call_expr, NULL_RTX, false);
4424 }
4425
4426 emit_barrier ();
4427 }
4428
4429 /* Expand a call to __builtin_unreachable. We do nothing except emit
4430 a barrier saying that control flow will not pass here.
4431
4432 It is the responsibility of the program being compiled to ensure
4433 that control flow does never reach __builtin_unreachable. */
4434 static void
4435 expand_builtin_unreachable (void)
4436 {
4437 emit_barrier ();
4438 }
4439
4440 /* Expand EXP, a call to fabs, fabsf or fabsl.
4441 Return NULL_RTX if a normal call should be emitted rather than expanding
4442 the function inline. If convenient, the result should be placed
4443 in TARGET. SUBTARGET may be used as the target for computing
4444 the operand. */
4445
4446 static rtx
4447 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4448 {
4449 machine_mode mode;
4450 tree arg;
4451 rtx op0;
4452
4453 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4454 return NULL_RTX;
4455
4456 arg = CALL_EXPR_ARG (exp, 0);
4457 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4458 mode = TYPE_MODE (TREE_TYPE (arg));
4459 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4460 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4461 }
4462
4463 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4464 Return NULL is a normal call should be emitted rather than expanding the
4465 function inline. If convenient, the result should be placed in TARGET.
4466 SUBTARGET may be used as the target for computing the operand. */
4467
4468 static rtx
4469 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4470 {
4471 rtx op0, op1;
4472 tree arg;
4473
4474 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4475 return NULL_RTX;
4476
4477 arg = CALL_EXPR_ARG (exp, 0);
4478 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4479
4480 arg = CALL_EXPR_ARG (exp, 1);
4481 op1 = expand_normal (arg);
4482
4483 return expand_copysign (op0, op1, target);
4484 }
4485
4486 /* Expand a call to __builtin___clear_cache. */
4487
4488 static rtx
4489 expand_builtin___clear_cache (tree exp)
4490 {
4491 if (!targetm.code_for_clear_cache)
4492 {
4493 #ifdef CLEAR_INSN_CACHE
4494 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4495 does something. Just do the default expansion to a call to
4496 __clear_cache(). */
4497 return NULL_RTX;
4498 #else
4499 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4500 does nothing. There is no need to call it. Do nothing. */
4501 return const0_rtx;
4502 #endif /* CLEAR_INSN_CACHE */
4503 }
4504
4505 /* We have a "clear_cache" insn, and it will handle everything. */
4506 tree begin, end;
4507 rtx begin_rtx, end_rtx;
4508
4509 /* We must not expand to a library call. If we did, any
4510 fallback library function in libgcc that might contain a call to
4511 __builtin___clear_cache() would recurse infinitely. */
4512 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4513 {
4514 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4515 return const0_rtx;
4516 }
4517
4518 if (targetm.have_clear_cache ())
4519 {
4520 struct expand_operand ops[2];
4521
4522 begin = CALL_EXPR_ARG (exp, 0);
4523 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4524
4525 end = CALL_EXPR_ARG (exp, 1);
4526 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4527
4528 create_address_operand (&ops[0], begin_rtx);
4529 create_address_operand (&ops[1], end_rtx);
4530 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4531 return const0_rtx;
4532 }
4533 return const0_rtx;
4534 }
4535
4536 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4537
4538 static rtx
4539 round_trampoline_addr (rtx tramp)
4540 {
4541 rtx temp, addend, mask;
4542
4543 /* If we don't need too much alignment, we'll have been guaranteed
4544 proper alignment by get_trampoline_type. */
4545 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4546 return tramp;
4547
4548 /* Round address up to desired boundary. */
4549 temp = gen_reg_rtx (Pmode);
4550 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4551 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4552
4553 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4554 temp, 0, OPTAB_LIB_WIDEN);
4555 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4556 temp, 0, OPTAB_LIB_WIDEN);
4557
4558 return tramp;
4559 }
4560
4561 static rtx
4562 expand_builtin_init_trampoline (tree exp, bool onstack)
4563 {
4564 tree t_tramp, t_func, t_chain;
4565 rtx m_tramp, r_tramp, r_chain, tmp;
4566
4567 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4568 POINTER_TYPE, VOID_TYPE))
4569 return NULL_RTX;
4570
4571 t_tramp = CALL_EXPR_ARG (exp, 0);
4572 t_func = CALL_EXPR_ARG (exp, 1);
4573 t_chain = CALL_EXPR_ARG (exp, 2);
4574
4575 r_tramp = expand_normal (t_tramp);
4576 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4577 MEM_NOTRAP_P (m_tramp) = 1;
4578
4579 /* If ONSTACK, the TRAMP argument should be the address of a field
4580 within the local function's FRAME decl. Either way, let's see if
4581 we can fill in the MEM_ATTRs for this memory. */
4582 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4583 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4584
4585 /* Creator of a heap trampoline is responsible for making sure the
4586 address is aligned to at least STACK_BOUNDARY. Normally malloc
4587 will ensure this anyhow. */
4588 tmp = round_trampoline_addr (r_tramp);
4589 if (tmp != r_tramp)
4590 {
4591 m_tramp = change_address (m_tramp, BLKmode, tmp);
4592 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4593 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4594 }
4595
4596 /* The FUNC argument should be the address of the nested function.
4597 Extract the actual function decl to pass to the hook. */
4598 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4599 t_func = TREE_OPERAND (t_func, 0);
4600 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4601
4602 r_chain = expand_normal (t_chain);
4603
4604 /* Generate insns to initialize the trampoline. */
4605 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4606
4607 if (onstack)
4608 {
4609 trampolines_created = 1;
4610
4611 if (targetm.calls.custom_function_descriptors != 0)
4612 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4613 "trampoline generated for nested function %qD", t_func);
4614 }
4615
4616 return const0_rtx;
4617 }
4618
4619 static rtx
4620 expand_builtin_adjust_trampoline (tree exp)
4621 {
4622 rtx tramp;
4623
4624 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4625 return NULL_RTX;
4626
4627 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4628 tramp = round_trampoline_addr (tramp);
4629 if (targetm.calls.trampoline_adjust_address)
4630 tramp = targetm.calls.trampoline_adjust_address (tramp);
4631
4632 return tramp;
4633 }
4634
4635 /* Expand a call to the builtin descriptor initialization routine.
4636 A descriptor is made up of a couple of pointers to the static
4637 chain and the code entry in this order. */
4638
4639 static rtx
4640 expand_builtin_init_descriptor (tree exp)
4641 {
4642 tree t_descr, t_func, t_chain;
4643 rtx m_descr, r_descr, r_func, r_chain;
4644
4645 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
4646 VOID_TYPE))
4647 return NULL_RTX;
4648
4649 t_descr = CALL_EXPR_ARG (exp, 0);
4650 t_func = CALL_EXPR_ARG (exp, 1);
4651 t_chain = CALL_EXPR_ARG (exp, 2);
4652
4653 r_descr = expand_normal (t_descr);
4654 m_descr = gen_rtx_MEM (BLKmode, r_descr);
4655 MEM_NOTRAP_P (m_descr) = 1;
4656
4657 r_func = expand_normal (t_func);
4658 r_chain = expand_normal (t_chain);
4659
4660 /* Generate insns to initialize the descriptor. */
4661 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
4662 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
4663 POINTER_SIZE / BITS_PER_UNIT), r_func);
4664
4665 return const0_rtx;
4666 }
4667
4668 /* Expand a call to the builtin descriptor adjustment routine. */
4669
4670 static rtx
4671 expand_builtin_adjust_descriptor (tree exp)
4672 {
4673 rtx tramp;
4674
4675 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4676 return NULL_RTX;
4677
4678 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4679
4680 /* Unalign the descriptor to allow runtime identification. */
4681 tramp = plus_constant (ptr_mode, tramp,
4682 targetm.calls.custom_function_descriptors);
4683
4684 return force_operand (tramp, NULL_RTX);
4685 }
4686
4687 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4688 function. The function first checks whether the back end provides
4689 an insn to implement signbit for the respective mode. If not, it
4690 checks whether the floating point format of the value is such that
4691 the sign bit can be extracted. If that is not the case, error out.
4692 EXP is the expression that is a call to the builtin function; if
4693 convenient, the result should be placed in TARGET. */
4694 static rtx
4695 expand_builtin_signbit (tree exp, rtx target)
4696 {
4697 const struct real_format *fmt;
4698 machine_mode fmode, imode, rmode;
4699 tree arg;
4700 int word, bitpos;
4701 enum insn_code icode;
4702 rtx temp;
4703 location_t loc = EXPR_LOCATION (exp);
4704
4705 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4706 return NULL_RTX;
4707
4708 arg = CALL_EXPR_ARG (exp, 0);
4709 fmode = TYPE_MODE (TREE_TYPE (arg));
4710 rmode = TYPE_MODE (TREE_TYPE (exp));
4711 fmt = REAL_MODE_FORMAT (fmode);
4712
4713 arg = builtin_save_expr (arg);
4714
4715 /* Expand the argument yielding a RTX expression. */
4716 temp = expand_normal (arg);
4717
4718 /* Check if the back end provides an insn that handles signbit for the
4719 argument's mode. */
4720 icode = optab_handler (signbit_optab, fmode);
4721 if (icode != CODE_FOR_nothing)
4722 {
4723 rtx_insn *last = get_last_insn ();
4724 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4725 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4726 return target;
4727 delete_insns_since (last);
4728 }
4729
4730 /* For floating point formats without a sign bit, implement signbit
4731 as "ARG < 0.0". */
4732 bitpos = fmt->signbit_ro;
4733 if (bitpos < 0)
4734 {
4735 /* But we can't do this if the format supports signed zero. */
4736 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4737
4738 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4739 build_real (TREE_TYPE (arg), dconst0));
4740 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4741 }
4742
4743 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4744 {
4745 imode = int_mode_for_mode (fmode);
4746 gcc_assert (imode != BLKmode);
4747 temp = gen_lowpart (imode, temp);
4748 }
4749 else
4750 {
4751 imode = word_mode;
4752 /* Handle targets with different FP word orders. */
4753 if (FLOAT_WORDS_BIG_ENDIAN)
4754 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4755 else
4756 word = bitpos / BITS_PER_WORD;
4757 temp = operand_subword_force (temp, word, fmode);
4758 bitpos = bitpos % BITS_PER_WORD;
4759 }
4760
4761 /* Force the intermediate word_mode (or narrower) result into a
4762 register. This avoids attempting to create paradoxical SUBREGs
4763 of floating point modes below. */
4764 temp = force_reg (imode, temp);
4765
4766 /* If the bitpos is within the "result mode" lowpart, the operation
4767 can be implement with a single bitwise AND. Otherwise, we need
4768 a right shift and an AND. */
4769
4770 if (bitpos < GET_MODE_BITSIZE (rmode))
4771 {
4772 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4773
4774 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4775 temp = gen_lowpart (rmode, temp);
4776 temp = expand_binop (rmode, and_optab, temp,
4777 immed_wide_int_const (mask, rmode),
4778 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4779 }
4780 else
4781 {
4782 /* Perform a logical right shift to place the signbit in the least
4783 significant bit, then truncate the result to the desired mode
4784 and mask just this bit. */
4785 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4786 temp = gen_lowpart (rmode, temp);
4787 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4788 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4789 }
4790
4791 return temp;
4792 }
4793
4794 /* Expand fork or exec calls. TARGET is the desired target of the
4795 call. EXP is the call. FN is the
4796 identificator of the actual function. IGNORE is nonzero if the
4797 value is to be ignored. */
4798
4799 static rtx
4800 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4801 {
4802 tree id, decl;
4803 tree call;
4804
4805 /* If we are not profiling, just call the function. */
4806 if (!profile_arc_flag)
4807 return NULL_RTX;
4808
4809 /* Otherwise call the wrapper. This should be equivalent for the rest of
4810 compiler, so the code does not diverge, and the wrapper may run the
4811 code necessary for keeping the profiling sane. */
4812
4813 switch (DECL_FUNCTION_CODE (fn))
4814 {
4815 case BUILT_IN_FORK:
4816 id = get_identifier ("__gcov_fork");
4817 break;
4818
4819 case BUILT_IN_EXECL:
4820 id = get_identifier ("__gcov_execl");
4821 break;
4822
4823 case BUILT_IN_EXECV:
4824 id = get_identifier ("__gcov_execv");
4825 break;
4826
4827 case BUILT_IN_EXECLP:
4828 id = get_identifier ("__gcov_execlp");
4829 break;
4830
4831 case BUILT_IN_EXECLE:
4832 id = get_identifier ("__gcov_execle");
4833 break;
4834
4835 case BUILT_IN_EXECVP:
4836 id = get_identifier ("__gcov_execvp");
4837 break;
4838
4839 case BUILT_IN_EXECVE:
4840 id = get_identifier ("__gcov_execve");
4841 break;
4842
4843 default:
4844 gcc_unreachable ();
4845 }
4846
4847 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4848 FUNCTION_DECL, id, TREE_TYPE (fn));
4849 DECL_EXTERNAL (decl) = 1;
4850 TREE_PUBLIC (decl) = 1;
4851 DECL_ARTIFICIAL (decl) = 1;
4852 TREE_NOTHROW (decl) = 1;
4853 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4854 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4855 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4856 return expand_call (call, target, ignore);
4857 }
4858
4859
4860 \f
4861 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4862 the pointer in these functions is void*, the tree optimizers may remove
4863 casts. The mode computed in expand_builtin isn't reliable either, due
4864 to __sync_bool_compare_and_swap.
4865
4866 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4867 group of builtins. This gives us log2 of the mode size. */
4868
4869 static inline machine_mode
4870 get_builtin_sync_mode (int fcode_diff)
4871 {
4872 /* The size is not negotiable, so ask not to get BLKmode in return
4873 if the target indicates that a smaller size would be better. */
4874 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4875 }
4876
4877 /* Expand the memory expression LOC and return the appropriate memory operand
4878 for the builtin_sync operations. */
4879
4880 static rtx
4881 get_builtin_sync_mem (tree loc, machine_mode mode)
4882 {
4883 rtx addr, mem;
4884
4885 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4886 addr = convert_memory_address (Pmode, addr);
4887
4888 /* Note that we explicitly do not want any alias information for this
4889 memory, so that we kill all other live memories. Otherwise we don't
4890 satisfy the full barrier semantics of the intrinsic. */
4891 mem = validize_mem (gen_rtx_MEM (mode, addr));
4892
4893 /* The alignment needs to be at least according to that of the mode. */
4894 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4895 get_pointer_alignment (loc)));
4896 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4897 MEM_VOLATILE_P (mem) = 1;
4898
4899 return mem;
4900 }
4901
4902 /* Make sure an argument is in the right mode.
4903 EXP is the tree argument.
4904 MODE is the mode it should be in. */
4905
4906 static rtx
4907 expand_expr_force_mode (tree exp, machine_mode mode)
4908 {
4909 rtx val;
4910 machine_mode old_mode;
4911
4912 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4913 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4914 of CONST_INTs, where we know the old_mode only from the call argument. */
4915
4916 old_mode = GET_MODE (val);
4917 if (old_mode == VOIDmode)
4918 old_mode = TYPE_MODE (TREE_TYPE (exp));
4919 val = convert_modes (mode, old_mode, val, 1);
4920 return val;
4921 }
4922
4923
4924 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4925 EXP is the CALL_EXPR. CODE is the rtx code
4926 that corresponds to the arithmetic or logical operation from the name;
4927 an exception here is that NOT actually means NAND. TARGET is an optional
4928 place for us to store the results; AFTER is true if this is the
4929 fetch_and_xxx form. */
4930
4931 static rtx
4932 expand_builtin_sync_operation (machine_mode mode, tree exp,
4933 enum rtx_code code, bool after,
4934 rtx target)
4935 {
4936 rtx val, mem;
4937 location_t loc = EXPR_LOCATION (exp);
4938
4939 if (code == NOT && warn_sync_nand)
4940 {
4941 tree fndecl = get_callee_fndecl (exp);
4942 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4943
4944 static bool warned_f_a_n, warned_n_a_f;
4945
4946 switch (fcode)
4947 {
4948 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4949 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4950 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4951 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4952 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4953 if (warned_f_a_n)
4954 break;
4955
4956 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4957 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4958 warned_f_a_n = true;
4959 break;
4960
4961 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4962 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4963 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4964 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4965 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4966 if (warned_n_a_f)
4967 break;
4968
4969 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4970 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4971 warned_n_a_f = true;
4972 break;
4973
4974 default:
4975 gcc_unreachable ();
4976 }
4977 }
4978
4979 /* Expand the operands. */
4980 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4981 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4982
4983 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4984 after);
4985 }
4986
4987 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4988 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4989 true if this is the boolean form. TARGET is a place for us to store the
4990 results; this is NOT optional if IS_BOOL is true. */
4991
4992 static rtx
4993 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4994 bool is_bool, rtx target)
4995 {
4996 rtx old_val, new_val, mem;
4997 rtx *pbool, *poval;
4998
4999 /* Expand the operands. */
5000 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5001 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5002 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5003
5004 pbool = poval = NULL;
5005 if (target != const0_rtx)
5006 {
5007 if (is_bool)
5008 pbool = &target;
5009 else
5010 poval = &target;
5011 }
5012 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5013 false, MEMMODEL_SYNC_SEQ_CST,
5014 MEMMODEL_SYNC_SEQ_CST))
5015 return NULL_RTX;
5016
5017 return target;
5018 }
5019
5020 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5021 general form is actually an atomic exchange, and some targets only
5022 support a reduced form with the second argument being a constant 1.
5023 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5024 the results. */
5025
5026 static rtx
5027 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5028 rtx target)
5029 {
5030 rtx val, mem;
5031
5032 /* Expand the operands. */
5033 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5034 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5035
5036 return expand_sync_lock_test_and_set (target, mem, val);
5037 }
5038
5039 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5040
5041 static void
5042 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5043 {
5044 rtx mem;
5045
5046 /* Expand the operands. */
5047 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5048
5049 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5050 }
5051
5052 /* Given an integer representing an ``enum memmodel'', verify its
5053 correctness and return the memory model enum. */
5054
5055 static enum memmodel
5056 get_memmodel (tree exp)
5057 {
5058 rtx op;
5059 unsigned HOST_WIDE_INT val;
5060 source_location loc
5061 = expansion_point_location_if_in_system_header (input_location);
5062
5063 /* If the parameter is not a constant, it's a run time value so we'll just
5064 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5065 if (TREE_CODE (exp) != INTEGER_CST)
5066 return MEMMODEL_SEQ_CST;
5067
5068 op = expand_normal (exp);
5069
5070 val = INTVAL (op);
5071 if (targetm.memmodel_check)
5072 val = targetm.memmodel_check (val);
5073 else if (val & ~MEMMODEL_MASK)
5074 {
5075 warning_at (loc, OPT_Winvalid_memory_model,
5076 "unknown architecture specifier in memory model to builtin");
5077 return MEMMODEL_SEQ_CST;
5078 }
5079
5080 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5081 if (memmodel_base (val) >= MEMMODEL_LAST)
5082 {
5083 warning_at (loc, OPT_Winvalid_memory_model,
5084 "invalid memory model argument to builtin");
5085 return MEMMODEL_SEQ_CST;
5086 }
5087
5088 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5089 be conservative and promote consume to acquire. */
5090 if (val == MEMMODEL_CONSUME)
5091 val = MEMMODEL_ACQUIRE;
5092
5093 return (enum memmodel) val;
5094 }
5095
5096 /* Expand the __atomic_exchange intrinsic:
5097 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5098 EXP is the CALL_EXPR.
5099 TARGET is an optional place for us to store the results. */
5100
5101 static rtx
5102 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5103 {
5104 rtx val, mem;
5105 enum memmodel model;
5106
5107 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5108
5109 if (!flag_inline_atomics)
5110 return NULL_RTX;
5111
5112 /* Expand the operands. */
5113 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5114 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5115
5116 return expand_atomic_exchange (target, mem, val, model);
5117 }
5118
5119 /* Expand the __atomic_compare_exchange intrinsic:
5120 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5121 TYPE desired, BOOL weak,
5122 enum memmodel success,
5123 enum memmodel failure)
5124 EXP is the CALL_EXPR.
5125 TARGET is an optional place for us to store the results. */
5126
5127 static rtx
5128 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5129 rtx target)
5130 {
5131 rtx expect, desired, mem, oldval;
5132 rtx_code_label *label;
5133 enum memmodel success, failure;
5134 tree weak;
5135 bool is_weak;
5136 source_location loc
5137 = expansion_point_location_if_in_system_header (input_location);
5138
5139 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5140 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5141
5142 if (failure > success)
5143 {
5144 warning_at (loc, OPT_Winvalid_memory_model,
5145 "failure memory model cannot be stronger than success "
5146 "memory model for %<__atomic_compare_exchange%>");
5147 success = MEMMODEL_SEQ_CST;
5148 }
5149
5150 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5151 {
5152 warning_at (loc, OPT_Winvalid_memory_model,
5153 "invalid failure memory model for "
5154 "%<__atomic_compare_exchange%>");
5155 failure = MEMMODEL_SEQ_CST;
5156 success = MEMMODEL_SEQ_CST;
5157 }
5158
5159
5160 if (!flag_inline_atomics)
5161 return NULL_RTX;
5162
5163 /* Expand the operands. */
5164 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5165
5166 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5167 expect = convert_memory_address (Pmode, expect);
5168 expect = gen_rtx_MEM (mode, expect);
5169 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5170
5171 weak = CALL_EXPR_ARG (exp, 3);
5172 is_weak = false;
5173 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5174 is_weak = true;
5175
5176 if (target == const0_rtx)
5177 target = NULL;
5178
5179 /* Lest the rtl backend create a race condition with an imporoper store
5180 to memory, always create a new pseudo for OLDVAL. */
5181 oldval = NULL;
5182
5183 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5184 is_weak, success, failure))
5185 return NULL_RTX;
5186
5187 /* Conditionally store back to EXPECT, lest we create a race condition
5188 with an improper store to memory. */
5189 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5190 the normal case where EXPECT is totally private, i.e. a register. At
5191 which point the store can be unconditional. */
5192 label = gen_label_rtx ();
5193 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5194 GET_MODE (target), 1, label);
5195 emit_move_insn (expect, oldval);
5196 emit_label (label);
5197
5198 return target;
5199 }
5200
5201 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5202 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5203 call. The weak parameter must be dropped to match the expected parameter
5204 list and the expected argument changed from value to pointer to memory
5205 slot. */
5206
5207 static void
5208 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5209 {
5210 unsigned int z;
5211 vec<tree, va_gc> *vec;
5212
5213 vec_alloc (vec, 5);
5214 vec->quick_push (gimple_call_arg (call, 0));
5215 tree expected = gimple_call_arg (call, 1);
5216 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5217 TREE_TYPE (expected));
5218 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5219 if (expd != x)
5220 emit_move_insn (x, expd);
5221 tree v = make_tree (TREE_TYPE (expected), x);
5222 vec->quick_push (build1 (ADDR_EXPR,
5223 build_pointer_type (TREE_TYPE (expected)), v));
5224 vec->quick_push (gimple_call_arg (call, 2));
5225 /* Skip the boolean weak parameter. */
5226 for (z = 4; z < 6; z++)
5227 vec->quick_push (gimple_call_arg (call, z));
5228 built_in_function fncode
5229 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5230 + exact_log2 (GET_MODE_SIZE (mode)));
5231 tree fndecl = builtin_decl_explicit (fncode);
5232 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5233 fndecl);
5234 tree exp = build_call_vec (boolean_type_node, fn, vec);
5235 tree lhs = gimple_call_lhs (call);
5236 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5237 if (lhs)
5238 {
5239 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5240 if (GET_MODE (boolret) != mode)
5241 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5242 x = force_reg (mode, x);
5243 write_complex_part (target, boolret, true);
5244 write_complex_part (target, x, false);
5245 }
5246 }
5247
5248 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5249
5250 void
5251 expand_ifn_atomic_compare_exchange (gcall *call)
5252 {
5253 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5254 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5255 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5256 rtx expect, desired, mem, oldval, boolret;
5257 enum memmodel success, failure;
5258 tree lhs;
5259 bool is_weak;
5260 source_location loc
5261 = expansion_point_location_if_in_system_header (gimple_location (call));
5262
5263 success = get_memmodel (gimple_call_arg (call, 4));
5264 failure = get_memmodel (gimple_call_arg (call, 5));
5265
5266 if (failure > success)
5267 {
5268 warning_at (loc, OPT_Winvalid_memory_model,
5269 "failure memory model cannot be stronger than success "
5270 "memory model for %<__atomic_compare_exchange%>");
5271 success = MEMMODEL_SEQ_CST;
5272 }
5273
5274 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5275 {
5276 warning_at (loc, OPT_Winvalid_memory_model,
5277 "invalid failure memory model for "
5278 "%<__atomic_compare_exchange%>");
5279 failure = MEMMODEL_SEQ_CST;
5280 success = MEMMODEL_SEQ_CST;
5281 }
5282
5283 if (!flag_inline_atomics)
5284 {
5285 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5286 return;
5287 }
5288
5289 /* Expand the operands. */
5290 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5291
5292 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5293 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5294
5295 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5296
5297 boolret = NULL;
5298 oldval = NULL;
5299
5300 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5301 is_weak, success, failure))
5302 {
5303 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5304 return;
5305 }
5306
5307 lhs = gimple_call_lhs (call);
5308 if (lhs)
5309 {
5310 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5311 if (GET_MODE (boolret) != mode)
5312 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5313 write_complex_part (target, boolret, true);
5314 write_complex_part (target, oldval, false);
5315 }
5316 }
5317
5318 /* Expand the __atomic_load intrinsic:
5319 TYPE __atomic_load (TYPE *object, enum memmodel)
5320 EXP is the CALL_EXPR.
5321 TARGET is an optional place for us to store the results. */
5322
5323 static rtx
5324 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5325 {
5326 rtx mem;
5327 enum memmodel model;
5328
5329 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5330 if (is_mm_release (model) || is_mm_acq_rel (model))
5331 {
5332 source_location loc
5333 = expansion_point_location_if_in_system_header (input_location);
5334 warning_at (loc, OPT_Winvalid_memory_model,
5335 "invalid memory model for %<__atomic_load%>");
5336 model = MEMMODEL_SEQ_CST;
5337 }
5338
5339 if (!flag_inline_atomics)
5340 return NULL_RTX;
5341
5342 /* Expand the operand. */
5343 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5344
5345 return expand_atomic_load (target, mem, model);
5346 }
5347
5348
5349 /* Expand the __atomic_store intrinsic:
5350 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5351 EXP is the CALL_EXPR.
5352 TARGET is an optional place for us to store the results. */
5353
5354 static rtx
5355 expand_builtin_atomic_store (machine_mode mode, tree exp)
5356 {
5357 rtx mem, val;
5358 enum memmodel model;
5359
5360 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5361 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5362 || is_mm_release (model)))
5363 {
5364 source_location loc
5365 = expansion_point_location_if_in_system_header (input_location);
5366 warning_at (loc, OPT_Winvalid_memory_model,
5367 "invalid memory model for %<__atomic_store%>");
5368 model = MEMMODEL_SEQ_CST;
5369 }
5370
5371 if (!flag_inline_atomics)
5372 return NULL_RTX;
5373
5374 /* Expand the operands. */
5375 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5376 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5377
5378 return expand_atomic_store (mem, val, model, false);
5379 }
5380
5381 /* Expand the __atomic_fetch_XXX intrinsic:
5382 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5383 EXP is the CALL_EXPR.
5384 TARGET is an optional place for us to store the results.
5385 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5386 FETCH_AFTER is true if returning the result of the operation.
5387 FETCH_AFTER is false if returning the value before the operation.
5388 IGNORE is true if the result is not used.
5389 EXT_CALL is the correct builtin for an external call if this cannot be
5390 resolved to an instruction sequence. */
5391
5392 static rtx
5393 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5394 enum rtx_code code, bool fetch_after,
5395 bool ignore, enum built_in_function ext_call)
5396 {
5397 rtx val, mem, ret;
5398 enum memmodel model;
5399 tree fndecl;
5400 tree addr;
5401
5402 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5403
5404 /* Expand the operands. */
5405 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5406 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5407
5408 /* Only try generating instructions if inlining is turned on. */
5409 if (flag_inline_atomics)
5410 {
5411 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5412 if (ret)
5413 return ret;
5414 }
5415
5416 /* Return if a different routine isn't needed for the library call. */
5417 if (ext_call == BUILT_IN_NONE)
5418 return NULL_RTX;
5419
5420 /* Change the call to the specified function. */
5421 fndecl = get_callee_fndecl (exp);
5422 addr = CALL_EXPR_FN (exp);
5423 STRIP_NOPS (addr);
5424
5425 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5426 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5427
5428 /* Expand the call here so we can emit trailing code. */
5429 ret = expand_call (exp, target, ignore);
5430
5431 /* Replace the original function just in case it matters. */
5432 TREE_OPERAND (addr, 0) = fndecl;
5433
5434 /* Then issue the arithmetic correction to return the right result. */
5435 if (!ignore)
5436 {
5437 if (code == NOT)
5438 {
5439 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5440 OPTAB_LIB_WIDEN);
5441 ret = expand_simple_unop (mode, NOT, ret, target, true);
5442 }
5443 else
5444 ret = expand_simple_binop (mode, code, ret, val, target, true,
5445 OPTAB_LIB_WIDEN);
5446 }
5447 return ret;
5448 }
5449
5450 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5451
5452 void
5453 expand_ifn_atomic_bit_test_and (gcall *call)
5454 {
5455 tree ptr = gimple_call_arg (call, 0);
5456 tree bit = gimple_call_arg (call, 1);
5457 tree flag = gimple_call_arg (call, 2);
5458 tree lhs = gimple_call_lhs (call);
5459 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5460 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5461 enum rtx_code code;
5462 optab optab;
5463 struct expand_operand ops[5];
5464
5465 gcc_assert (flag_inline_atomics);
5466
5467 if (gimple_call_num_args (call) == 4)
5468 model = get_memmodel (gimple_call_arg (call, 3));
5469
5470 rtx mem = get_builtin_sync_mem (ptr, mode);
5471 rtx val = expand_expr_force_mode (bit, mode);
5472
5473 switch (gimple_call_internal_fn (call))
5474 {
5475 case IFN_ATOMIC_BIT_TEST_AND_SET:
5476 code = IOR;
5477 optab = atomic_bit_test_and_set_optab;
5478 break;
5479 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5480 code = XOR;
5481 optab = atomic_bit_test_and_complement_optab;
5482 break;
5483 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5484 code = AND;
5485 optab = atomic_bit_test_and_reset_optab;
5486 break;
5487 default:
5488 gcc_unreachable ();
5489 }
5490
5491 if (lhs == NULL_TREE)
5492 {
5493 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5494 val, NULL_RTX, true, OPTAB_DIRECT);
5495 if (code == AND)
5496 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5497 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5498 return;
5499 }
5500
5501 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5502 enum insn_code icode = direct_optab_handler (optab, mode);
5503 gcc_assert (icode != CODE_FOR_nothing);
5504 create_output_operand (&ops[0], target, mode);
5505 create_fixed_operand (&ops[1], mem);
5506 create_convert_operand_to (&ops[2], val, mode, true);
5507 create_integer_operand (&ops[3], model);
5508 create_integer_operand (&ops[4], integer_onep (flag));
5509 if (maybe_expand_insn (icode, 5, ops))
5510 return;
5511
5512 rtx bitval = val;
5513 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5514 val, NULL_RTX, true, OPTAB_DIRECT);
5515 rtx maskval = val;
5516 if (code == AND)
5517 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5518 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5519 code, model, false);
5520 if (integer_onep (flag))
5521 {
5522 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5523 NULL_RTX, true, OPTAB_DIRECT);
5524 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5525 true, OPTAB_DIRECT);
5526 }
5527 else
5528 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5529 OPTAB_DIRECT);
5530 if (result != target)
5531 emit_move_insn (target, result);
5532 }
5533
5534 /* Expand an atomic clear operation.
5535 void _atomic_clear (BOOL *obj, enum memmodel)
5536 EXP is the call expression. */
5537
5538 static rtx
5539 expand_builtin_atomic_clear (tree exp)
5540 {
5541 machine_mode mode;
5542 rtx mem, ret;
5543 enum memmodel model;
5544
5545 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5546 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5547 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5548
5549 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5550 {
5551 source_location loc
5552 = expansion_point_location_if_in_system_header (input_location);
5553 warning_at (loc, OPT_Winvalid_memory_model,
5554 "invalid memory model for %<__atomic_store%>");
5555 model = MEMMODEL_SEQ_CST;
5556 }
5557
5558 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5559 Failing that, a store is issued by __atomic_store. The only way this can
5560 fail is if the bool type is larger than a word size. Unlikely, but
5561 handle it anyway for completeness. Assume a single threaded model since
5562 there is no atomic support in this case, and no barriers are required. */
5563 ret = expand_atomic_store (mem, const0_rtx, model, true);
5564 if (!ret)
5565 emit_move_insn (mem, const0_rtx);
5566 return const0_rtx;
5567 }
5568
5569 /* Expand an atomic test_and_set operation.
5570 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5571 EXP is the call expression. */
5572
5573 static rtx
5574 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5575 {
5576 rtx mem;
5577 enum memmodel model;
5578 machine_mode mode;
5579
5580 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5581 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5582 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5583
5584 return expand_atomic_test_and_set (target, mem, model);
5585 }
5586
5587
5588 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5589 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5590
5591 static tree
5592 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5593 {
5594 int size;
5595 machine_mode mode;
5596 unsigned int mode_align, type_align;
5597
5598 if (TREE_CODE (arg0) != INTEGER_CST)
5599 return NULL_TREE;
5600
5601 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5602 mode = mode_for_size (size, MODE_INT, 0);
5603 mode_align = GET_MODE_ALIGNMENT (mode);
5604
5605 if (TREE_CODE (arg1) == INTEGER_CST)
5606 {
5607 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5608
5609 /* Either this argument is null, or it's a fake pointer encoding
5610 the alignment of the object. */
5611 val = least_bit_hwi (val);
5612 val *= BITS_PER_UNIT;
5613
5614 if (val == 0 || mode_align < val)
5615 type_align = mode_align;
5616 else
5617 type_align = val;
5618 }
5619 else
5620 {
5621 tree ttype = TREE_TYPE (arg1);
5622
5623 /* This function is usually invoked and folded immediately by the front
5624 end before anything else has a chance to look at it. The pointer
5625 parameter at this point is usually cast to a void *, so check for that
5626 and look past the cast. */
5627 if (CONVERT_EXPR_P (arg1)
5628 && POINTER_TYPE_P (ttype)
5629 && VOID_TYPE_P (TREE_TYPE (ttype))
5630 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5631 arg1 = TREE_OPERAND (arg1, 0);
5632
5633 ttype = TREE_TYPE (arg1);
5634 gcc_assert (POINTER_TYPE_P (ttype));
5635
5636 /* Get the underlying type of the object. */
5637 ttype = TREE_TYPE (ttype);
5638 type_align = TYPE_ALIGN (ttype);
5639 }
5640
5641 /* If the object has smaller alignment, the lock free routines cannot
5642 be used. */
5643 if (type_align < mode_align)
5644 return boolean_false_node;
5645
5646 /* Check if a compare_and_swap pattern exists for the mode which represents
5647 the required size. The pattern is not allowed to fail, so the existence
5648 of the pattern indicates support is present. */
5649 if (can_compare_and_swap_p (mode, true))
5650 return boolean_true_node;
5651 else
5652 return boolean_false_node;
5653 }
5654
5655 /* Return true if the parameters to call EXP represent an object which will
5656 always generate lock free instructions. The first argument represents the
5657 size of the object, and the second parameter is a pointer to the object
5658 itself. If NULL is passed for the object, then the result is based on
5659 typical alignment for an object of the specified size. Otherwise return
5660 false. */
5661
5662 static rtx
5663 expand_builtin_atomic_always_lock_free (tree exp)
5664 {
5665 tree size;
5666 tree arg0 = CALL_EXPR_ARG (exp, 0);
5667 tree arg1 = CALL_EXPR_ARG (exp, 1);
5668
5669 if (TREE_CODE (arg0) != INTEGER_CST)
5670 {
5671 error ("non-constant argument 1 to __atomic_always_lock_free");
5672 return const0_rtx;
5673 }
5674
5675 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5676 if (size == boolean_true_node)
5677 return const1_rtx;
5678 return const0_rtx;
5679 }
5680
5681 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5682 is lock free on this architecture. */
5683
5684 static tree
5685 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5686 {
5687 if (!flag_inline_atomics)
5688 return NULL_TREE;
5689
5690 /* If it isn't always lock free, don't generate a result. */
5691 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5692 return boolean_true_node;
5693
5694 return NULL_TREE;
5695 }
5696
5697 /* Return true if the parameters to call EXP represent an object which will
5698 always generate lock free instructions. The first argument represents the
5699 size of the object, and the second parameter is a pointer to the object
5700 itself. If NULL is passed for the object, then the result is based on
5701 typical alignment for an object of the specified size. Otherwise return
5702 NULL*/
5703
5704 static rtx
5705 expand_builtin_atomic_is_lock_free (tree exp)
5706 {
5707 tree size;
5708 tree arg0 = CALL_EXPR_ARG (exp, 0);
5709 tree arg1 = CALL_EXPR_ARG (exp, 1);
5710
5711 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5712 {
5713 error ("non-integer argument 1 to __atomic_is_lock_free");
5714 return NULL_RTX;
5715 }
5716
5717 if (!flag_inline_atomics)
5718 return NULL_RTX;
5719
5720 /* If the value is known at compile time, return the RTX for it. */
5721 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5722 if (size == boolean_true_node)
5723 return const1_rtx;
5724
5725 return NULL_RTX;
5726 }
5727
5728 /* Expand the __atomic_thread_fence intrinsic:
5729 void __atomic_thread_fence (enum memmodel)
5730 EXP is the CALL_EXPR. */
5731
5732 static void
5733 expand_builtin_atomic_thread_fence (tree exp)
5734 {
5735 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5736 expand_mem_thread_fence (model);
5737 }
5738
5739 /* Expand the __atomic_signal_fence intrinsic:
5740 void __atomic_signal_fence (enum memmodel)
5741 EXP is the CALL_EXPR. */
5742
5743 static void
5744 expand_builtin_atomic_signal_fence (tree exp)
5745 {
5746 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5747 expand_mem_signal_fence (model);
5748 }
5749
5750 /* Expand the __sync_synchronize intrinsic. */
5751
5752 static void
5753 expand_builtin_sync_synchronize (void)
5754 {
5755 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5756 }
5757
5758 static rtx
5759 expand_builtin_thread_pointer (tree exp, rtx target)
5760 {
5761 enum insn_code icode;
5762 if (!validate_arglist (exp, VOID_TYPE))
5763 return const0_rtx;
5764 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5765 if (icode != CODE_FOR_nothing)
5766 {
5767 struct expand_operand op;
5768 /* If the target is not sutitable then create a new target. */
5769 if (target == NULL_RTX
5770 || !REG_P (target)
5771 || GET_MODE (target) != Pmode)
5772 target = gen_reg_rtx (Pmode);
5773 create_output_operand (&op, target, Pmode);
5774 expand_insn (icode, 1, &op);
5775 return target;
5776 }
5777 error ("__builtin_thread_pointer is not supported on this target");
5778 return const0_rtx;
5779 }
5780
5781 static void
5782 expand_builtin_set_thread_pointer (tree exp)
5783 {
5784 enum insn_code icode;
5785 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5786 return;
5787 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5788 if (icode != CODE_FOR_nothing)
5789 {
5790 struct expand_operand op;
5791 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5792 Pmode, EXPAND_NORMAL);
5793 create_input_operand (&op, val, Pmode);
5794 expand_insn (icode, 1, &op);
5795 return;
5796 }
5797 error ("__builtin_set_thread_pointer is not supported on this target");
5798 }
5799
5800 \f
5801 /* Emit code to restore the current value of stack. */
5802
5803 static void
5804 expand_stack_restore (tree var)
5805 {
5806 rtx_insn *prev;
5807 rtx sa = expand_normal (var);
5808
5809 sa = convert_memory_address (Pmode, sa);
5810
5811 prev = get_last_insn ();
5812 emit_stack_restore (SAVE_BLOCK, sa);
5813
5814 record_new_stack_level ();
5815
5816 fixup_args_size_notes (prev, get_last_insn (), 0);
5817 }
5818
5819 /* Emit code to save the current value of stack. */
5820
5821 static rtx
5822 expand_stack_save (void)
5823 {
5824 rtx ret = NULL_RTX;
5825
5826 emit_stack_save (SAVE_BLOCK, &ret);
5827 return ret;
5828 }
5829
5830
5831 /* Expand an expression EXP that calls a built-in function,
5832 with result going to TARGET if that's convenient
5833 (and in mode MODE if that's convenient).
5834 SUBTARGET may be used as the target for computing one of EXP's operands.
5835 IGNORE is nonzero if the value is to be ignored. */
5836
5837 rtx
5838 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5839 int ignore)
5840 {
5841 tree fndecl = get_callee_fndecl (exp);
5842 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5843 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5844 int flags;
5845
5846 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5847 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5848
5849 /* When ASan is enabled, we don't want to expand some memory/string
5850 builtins and rely on libsanitizer's hooks. This allows us to avoid
5851 redundant checks and be sure, that possible overflow will be detected
5852 by ASan. */
5853
5854 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5855 return expand_call (exp, target, ignore);
5856
5857 /* When not optimizing, generate calls to library functions for a certain
5858 set of builtins. */
5859 if (!optimize
5860 && !called_as_built_in (fndecl)
5861 && fcode != BUILT_IN_FORK
5862 && fcode != BUILT_IN_EXECL
5863 && fcode != BUILT_IN_EXECV
5864 && fcode != BUILT_IN_EXECLP
5865 && fcode != BUILT_IN_EXECLE
5866 && fcode != BUILT_IN_EXECVP
5867 && fcode != BUILT_IN_EXECVE
5868 && fcode != BUILT_IN_ALLOCA
5869 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5870 && fcode != BUILT_IN_FREE
5871 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5872 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5873 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5874 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5875 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5876 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5877 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5878 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5879 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5880 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5881 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5882 && fcode != BUILT_IN_CHKP_BNDRET)
5883 return expand_call (exp, target, ignore);
5884
5885 /* The built-in function expanders test for target == const0_rtx
5886 to determine whether the function's result will be ignored. */
5887 if (ignore)
5888 target = const0_rtx;
5889
5890 /* If the result of a pure or const built-in function is ignored, and
5891 none of its arguments are volatile, we can avoid expanding the
5892 built-in call and just evaluate the arguments for side-effects. */
5893 if (target == const0_rtx
5894 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5895 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5896 {
5897 bool volatilep = false;
5898 tree arg;
5899 call_expr_arg_iterator iter;
5900
5901 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5902 if (TREE_THIS_VOLATILE (arg))
5903 {
5904 volatilep = true;
5905 break;
5906 }
5907
5908 if (! volatilep)
5909 {
5910 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5911 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5912 return const0_rtx;
5913 }
5914 }
5915
5916 /* expand_builtin_with_bounds is supposed to be used for
5917 instrumented builtin calls. */
5918 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5919
5920 switch (fcode)
5921 {
5922 CASE_FLT_FN (BUILT_IN_FABS):
5923 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5924 case BUILT_IN_FABSD32:
5925 case BUILT_IN_FABSD64:
5926 case BUILT_IN_FABSD128:
5927 target = expand_builtin_fabs (exp, target, subtarget);
5928 if (target)
5929 return target;
5930 break;
5931
5932 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5933 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5934 target = expand_builtin_copysign (exp, target, subtarget);
5935 if (target)
5936 return target;
5937 break;
5938
5939 /* Just do a normal library call if we were unable to fold
5940 the values. */
5941 CASE_FLT_FN (BUILT_IN_CABS):
5942 break;
5943
5944 CASE_FLT_FN (BUILT_IN_FMA):
5945 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5946 if (target)
5947 return target;
5948 break;
5949
5950 CASE_FLT_FN (BUILT_IN_ILOGB):
5951 if (! flag_unsafe_math_optimizations)
5952 break;
5953 gcc_fallthrough ();
5954 CASE_FLT_FN (BUILT_IN_ISINF):
5955 CASE_FLT_FN (BUILT_IN_FINITE):
5956 case BUILT_IN_ISFINITE:
5957 case BUILT_IN_ISNORMAL:
5958 target = expand_builtin_interclass_mathfn (exp, target);
5959 if (target)
5960 return target;
5961 break;
5962
5963 CASE_FLT_FN (BUILT_IN_ICEIL):
5964 CASE_FLT_FN (BUILT_IN_LCEIL):
5965 CASE_FLT_FN (BUILT_IN_LLCEIL):
5966 CASE_FLT_FN (BUILT_IN_LFLOOR):
5967 CASE_FLT_FN (BUILT_IN_IFLOOR):
5968 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5969 target = expand_builtin_int_roundingfn (exp, target);
5970 if (target)
5971 return target;
5972 break;
5973
5974 CASE_FLT_FN (BUILT_IN_IRINT):
5975 CASE_FLT_FN (BUILT_IN_LRINT):
5976 CASE_FLT_FN (BUILT_IN_LLRINT):
5977 CASE_FLT_FN (BUILT_IN_IROUND):
5978 CASE_FLT_FN (BUILT_IN_LROUND):
5979 CASE_FLT_FN (BUILT_IN_LLROUND):
5980 target = expand_builtin_int_roundingfn_2 (exp, target);
5981 if (target)
5982 return target;
5983 break;
5984
5985 CASE_FLT_FN (BUILT_IN_POWI):
5986 target = expand_builtin_powi (exp, target);
5987 if (target)
5988 return target;
5989 break;
5990
5991 CASE_FLT_FN (BUILT_IN_CEXPI):
5992 target = expand_builtin_cexpi (exp, target);
5993 gcc_assert (target);
5994 return target;
5995
5996 CASE_FLT_FN (BUILT_IN_SIN):
5997 CASE_FLT_FN (BUILT_IN_COS):
5998 if (! flag_unsafe_math_optimizations)
5999 break;
6000 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6001 if (target)
6002 return target;
6003 break;
6004
6005 CASE_FLT_FN (BUILT_IN_SINCOS):
6006 if (! flag_unsafe_math_optimizations)
6007 break;
6008 target = expand_builtin_sincos (exp);
6009 if (target)
6010 return target;
6011 break;
6012
6013 case BUILT_IN_APPLY_ARGS:
6014 return expand_builtin_apply_args ();
6015
6016 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6017 FUNCTION with a copy of the parameters described by
6018 ARGUMENTS, and ARGSIZE. It returns a block of memory
6019 allocated on the stack into which is stored all the registers
6020 that might possibly be used for returning the result of a
6021 function. ARGUMENTS is the value returned by
6022 __builtin_apply_args. ARGSIZE is the number of bytes of
6023 arguments that must be copied. ??? How should this value be
6024 computed? We'll also need a safe worst case value for varargs
6025 functions. */
6026 case BUILT_IN_APPLY:
6027 if (!validate_arglist (exp, POINTER_TYPE,
6028 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6029 && !validate_arglist (exp, REFERENCE_TYPE,
6030 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6031 return const0_rtx;
6032 else
6033 {
6034 rtx ops[3];
6035
6036 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6037 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6038 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6039
6040 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6041 }
6042
6043 /* __builtin_return (RESULT) causes the function to return the
6044 value described by RESULT. RESULT is address of the block of
6045 memory returned by __builtin_apply. */
6046 case BUILT_IN_RETURN:
6047 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6048 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6049 return const0_rtx;
6050
6051 case BUILT_IN_SAVEREGS:
6052 return expand_builtin_saveregs ();
6053
6054 case BUILT_IN_VA_ARG_PACK:
6055 /* All valid uses of __builtin_va_arg_pack () are removed during
6056 inlining. */
6057 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6058 return const0_rtx;
6059
6060 case BUILT_IN_VA_ARG_PACK_LEN:
6061 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6062 inlining. */
6063 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6064 return const0_rtx;
6065
6066 /* Return the address of the first anonymous stack arg. */
6067 case BUILT_IN_NEXT_ARG:
6068 if (fold_builtin_next_arg (exp, false))
6069 return const0_rtx;
6070 return expand_builtin_next_arg ();
6071
6072 case BUILT_IN_CLEAR_CACHE:
6073 target = expand_builtin___clear_cache (exp);
6074 if (target)
6075 return target;
6076 break;
6077
6078 case BUILT_IN_CLASSIFY_TYPE:
6079 return expand_builtin_classify_type (exp);
6080
6081 case BUILT_IN_CONSTANT_P:
6082 return const0_rtx;
6083
6084 case BUILT_IN_FRAME_ADDRESS:
6085 case BUILT_IN_RETURN_ADDRESS:
6086 return expand_builtin_frame_address (fndecl, exp);
6087
6088 /* Returns the address of the area where the structure is returned.
6089 0 otherwise. */
6090 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6091 if (call_expr_nargs (exp) != 0
6092 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6093 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6094 return const0_rtx;
6095 else
6096 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6097
6098 case BUILT_IN_ALLOCA:
6099 case BUILT_IN_ALLOCA_WITH_ALIGN:
6100 /* If the allocation stems from the declaration of a variable-sized
6101 object, it cannot accumulate. */
6102 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6103 if (target)
6104 return target;
6105 break;
6106
6107 case BUILT_IN_STACK_SAVE:
6108 return expand_stack_save ();
6109
6110 case BUILT_IN_STACK_RESTORE:
6111 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6112 return const0_rtx;
6113
6114 case BUILT_IN_BSWAP16:
6115 case BUILT_IN_BSWAP32:
6116 case BUILT_IN_BSWAP64:
6117 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6118 if (target)
6119 return target;
6120 break;
6121
6122 CASE_INT_FN (BUILT_IN_FFS):
6123 target = expand_builtin_unop (target_mode, exp, target,
6124 subtarget, ffs_optab);
6125 if (target)
6126 return target;
6127 break;
6128
6129 CASE_INT_FN (BUILT_IN_CLZ):
6130 target = expand_builtin_unop (target_mode, exp, target,
6131 subtarget, clz_optab);
6132 if (target)
6133 return target;
6134 break;
6135
6136 CASE_INT_FN (BUILT_IN_CTZ):
6137 target = expand_builtin_unop (target_mode, exp, target,
6138 subtarget, ctz_optab);
6139 if (target)
6140 return target;
6141 break;
6142
6143 CASE_INT_FN (BUILT_IN_CLRSB):
6144 target = expand_builtin_unop (target_mode, exp, target,
6145 subtarget, clrsb_optab);
6146 if (target)
6147 return target;
6148 break;
6149
6150 CASE_INT_FN (BUILT_IN_POPCOUNT):
6151 target = expand_builtin_unop (target_mode, exp, target,
6152 subtarget, popcount_optab);
6153 if (target)
6154 return target;
6155 break;
6156
6157 CASE_INT_FN (BUILT_IN_PARITY):
6158 target = expand_builtin_unop (target_mode, exp, target,
6159 subtarget, parity_optab);
6160 if (target)
6161 return target;
6162 break;
6163
6164 case BUILT_IN_STRLEN:
6165 target = expand_builtin_strlen (exp, target, target_mode);
6166 if (target)
6167 return target;
6168 break;
6169
6170 case BUILT_IN_STRCPY:
6171 target = expand_builtin_strcpy (exp, target);
6172 if (target)
6173 return target;
6174 break;
6175
6176 case BUILT_IN_STRNCPY:
6177 target = expand_builtin_strncpy (exp, target);
6178 if (target)
6179 return target;
6180 break;
6181
6182 case BUILT_IN_STPCPY:
6183 target = expand_builtin_stpcpy (exp, target, mode);
6184 if (target)
6185 return target;
6186 break;
6187
6188 case BUILT_IN_MEMCPY:
6189 target = expand_builtin_memcpy (exp, target);
6190 if (target)
6191 return target;
6192 break;
6193
6194 case BUILT_IN_MEMPCPY:
6195 target = expand_builtin_mempcpy (exp, target, mode);
6196 if (target)
6197 return target;
6198 break;
6199
6200 case BUILT_IN_MEMSET:
6201 target = expand_builtin_memset (exp, target, mode);
6202 if (target)
6203 return target;
6204 break;
6205
6206 case BUILT_IN_BZERO:
6207 target = expand_builtin_bzero (exp);
6208 if (target)
6209 return target;
6210 break;
6211
6212 case BUILT_IN_STRCMP:
6213 target = expand_builtin_strcmp (exp, target);
6214 if (target)
6215 return target;
6216 break;
6217
6218 case BUILT_IN_STRNCMP:
6219 target = expand_builtin_strncmp (exp, target, mode);
6220 if (target)
6221 return target;
6222 break;
6223
6224 case BUILT_IN_BCMP:
6225 case BUILT_IN_MEMCMP:
6226 case BUILT_IN_MEMCMP_EQ:
6227 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6228 if (target)
6229 return target;
6230 if (fcode == BUILT_IN_MEMCMP_EQ)
6231 {
6232 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6233 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6234 }
6235 break;
6236
6237 case BUILT_IN_SETJMP:
6238 /* This should have been lowered to the builtins below. */
6239 gcc_unreachable ();
6240
6241 case BUILT_IN_SETJMP_SETUP:
6242 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6243 and the receiver label. */
6244 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6245 {
6246 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6247 VOIDmode, EXPAND_NORMAL);
6248 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6249 rtx_insn *label_r = label_rtx (label);
6250
6251 /* This is copied from the handling of non-local gotos. */
6252 expand_builtin_setjmp_setup (buf_addr, label_r);
6253 nonlocal_goto_handler_labels
6254 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6255 nonlocal_goto_handler_labels);
6256 /* ??? Do not let expand_label treat us as such since we would
6257 not want to be both on the list of non-local labels and on
6258 the list of forced labels. */
6259 FORCED_LABEL (label) = 0;
6260 return const0_rtx;
6261 }
6262 break;
6263
6264 case BUILT_IN_SETJMP_RECEIVER:
6265 /* __builtin_setjmp_receiver is passed the receiver label. */
6266 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6267 {
6268 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6269 rtx_insn *label_r = label_rtx (label);
6270
6271 expand_builtin_setjmp_receiver (label_r);
6272 return const0_rtx;
6273 }
6274 break;
6275
6276 /* __builtin_longjmp is passed a pointer to an array of five words.
6277 It's similar to the C library longjmp function but works with
6278 __builtin_setjmp above. */
6279 case BUILT_IN_LONGJMP:
6280 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6281 {
6282 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6283 VOIDmode, EXPAND_NORMAL);
6284 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6285
6286 if (value != const1_rtx)
6287 {
6288 error ("%<__builtin_longjmp%> second argument must be 1");
6289 return const0_rtx;
6290 }
6291
6292 expand_builtin_longjmp (buf_addr, value);
6293 return const0_rtx;
6294 }
6295 break;
6296
6297 case BUILT_IN_NONLOCAL_GOTO:
6298 target = expand_builtin_nonlocal_goto (exp);
6299 if (target)
6300 return target;
6301 break;
6302
6303 /* This updates the setjmp buffer that is its argument with the value
6304 of the current stack pointer. */
6305 case BUILT_IN_UPDATE_SETJMP_BUF:
6306 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6307 {
6308 rtx buf_addr
6309 = expand_normal (CALL_EXPR_ARG (exp, 0));
6310
6311 expand_builtin_update_setjmp_buf (buf_addr);
6312 return const0_rtx;
6313 }
6314 break;
6315
6316 case BUILT_IN_TRAP:
6317 expand_builtin_trap ();
6318 return const0_rtx;
6319
6320 case BUILT_IN_UNREACHABLE:
6321 expand_builtin_unreachable ();
6322 return const0_rtx;
6323
6324 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6325 case BUILT_IN_SIGNBITD32:
6326 case BUILT_IN_SIGNBITD64:
6327 case BUILT_IN_SIGNBITD128:
6328 target = expand_builtin_signbit (exp, target);
6329 if (target)
6330 return target;
6331 break;
6332
6333 /* Various hooks for the DWARF 2 __throw routine. */
6334 case BUILT_IN_UNWIND_INIT:
6335 expand_builtin_unwind_init ();
6336 return const0_rtx;
6337 case BUILT_IN_DWARF_CFA:
6338 return virtual_cfa_rtx;
6339 #ifdef DWARF2_UNWIND_INFO
6340 case BUILT_IN_DWARF_SP_COLUMN:
6341 return expand_builtin_dwarf_sp_column ();
6342 case BUILT_IN_INIT_DWARF_REG_SIZES:
6343 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6344 return const0_rtx;
6345 #endif
6346 case BUILT_IN_FROB_RETURN_ADDR:
6347 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6348 case BUILT_IN_EXTRACT_RETURN_ADDR:
6349 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6350 case BUILT_IN_EH_RETURN:
6351 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6352 CALL_EXPR_ARG (exp, 1));
6353 return const0_rtx;
6354 case BUILT_IN_EH_RETURN_DATA_REGNO:
6355 return expand_builtin_eh_return_data_regno (exp);
6356 case BUILT_IN_EXTEND_POINTER:
6357 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6358 case BUILT_IN_EH_POINTER:
6359 return expand_builtin_eh_pointer (exp);
6360 case BUILT_IN_EH_FILTER:
6361 return expand_builtin_eh_filter (exp);
6362 case BUILT_IN_EH_COPY_VALUES:
6363 return expand_builtin_eh_copy_values (exp);
6364
6365 case BUILT_IN_VA_START:
6366 return expand_builtin_va_start (exp);
6367 case BUILT_IN_VA_END:
6368 return expand_builtin_va_end (exp);
6369 case BUILT_IN_VA_COPY:
6370 return expand_builtin_va_copy (exp);
6371 case BUILT_IN_EXPECT:
6372 return expand_builtin_expect (exp, target);
6373 case BUILT_IN_ASSUME_ALIGNED:
6374 return expand_builtin_assume_aligned (exp, target);
6375 case BUILT_IN_PREFETCH:
6376 expand_builtin_prefetch (exp);
6377 return const0_rtx;
6378
6379 case BUILT_IN_INIT_TRAMPOLINE:
6380 return expand_builtin_init_trampoline (exp, true);
6381 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6382 return expand_builtin_init_trampoline (exp, false);
6383 case BUILT_IN_ADJUST_TRAMPOLINE:
6384 return expand_builtin_adjust_trampoline (exp);
6385
6386 case BUILT_IN_INIT_DESCRIPTOR:
6387 return expand_builtin_init_descriptor (exp);
6388 case BUILT_IN_ADJUST_DESCRIPTOR:
6389 return expand_builtin_adjust_descriptor (exp);
6390
6391 case BUILT_IN_FORK:
6392 case BUILT_IN_EXECL:
6393 case BUILT_IN_EXECV:
6394 case BUILT_IN_EXECLP:
6395 case BUILT_IN_EXECLE:
6396 case BUILT_IN_EXECVP:
6397 case BUILT_IN_EXECVE:
6398 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6399 if (target)
6400 return target;
6401 break;
6402
6403 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6404 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6405 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6406 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6407 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6408 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6409 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6410 if (target)
6411 return target;
6412 break;
6413
6414 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6415 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6416 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6417 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6418 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6419 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6420 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6421 if (target)
6422 return target;
6423 break;
6424
6425 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6426 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6427 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6428 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6429 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6430 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6431 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6432 if (target)
6433 return target;
6434 break;
6435
6436 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6437 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6438 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6439 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6440 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6441 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6442 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6443 if (target)
6444 return target;
6445 break;
6446
6447 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6448 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6449 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6450 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6451 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6452 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6453 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6454 if (target)
6455 return target;
6456 break;
6457
6458 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6459 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6460 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6461 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6462 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6463 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6464 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6465 if (target)
6466 return target;
6467 break;
6468
6469 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6470 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6471 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6472 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6473 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6474 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6475 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6476 if (target)
6477 return target;
6478 break;
6479
6480 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6481 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6482 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6483 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6484 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6486 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6487 if (target)
6488 return target;
6489 break;
6490
6491 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6492 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6493 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6494 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6495 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6496 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6497 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6498 if (target)
6499 return target;
6500 break;
6501
6502 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6503 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6504 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6505 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6506 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6507 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6508 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6509 if (target)
6510 return target;
6511 break;
6512
6513 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6514 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6515 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6516 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6517 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6518 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6519 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6520 if (target)
6521 return target;
6522 break;
6523
6524 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6525 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6526 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6527 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6528 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6529 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6530 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6531 if (target)
6532 return target;
6533 break;
6534
6535 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6536 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6537 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6538 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6539 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6540 if (mode == VOIDmode)
6541 mode = TYPE_MODE (boolean_type_node);
6542 if (!target || !register_operand (target, mode))
6543 target = gen_reg_rtx (mode);
6544
6545 mode = get_builtin_sync_mode
6546 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6547 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6548 if (target)
6549 return target;
6550 break;
6551
6552 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6553 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6554 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6555 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6556 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6557 mode = get_builtin_sync_mode
6558 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6559 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6560 if (target)
6561 return target;
6562 break;
6563
6564 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6565 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6566 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6567 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6568 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6569 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6570 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6571 if (target)
6572 return target;
6573 break;
6574
6575 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6576 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6577 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6578 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6579 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6580 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6581 expand_builtin_sync_lock_release (mode, exp);
6582 return const0_rtx;
6583
6584 case BUILT_IN_SYNC_SYNCHRONIZE:
6585 expand_builtin_sync_synchronize ();
6586 return const0_rtx;
6587
6588 case BUILT_IN_ATOMIC_EXCHANGE_1:
6589 case BUILT_IN_ATOMIC_EXCHANGE_2:
6590 case BUILT_IN_ATOMIC_EXCHANGE_4:
6591 case BUILT_IN_ATOMIC_EXCHANGE_8:
6592 case BUILT_IN_ATOMIC_EXCHANGE_16:
6593 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6594 target = expand_builtin_atomic_exchange (mode, exp, target);
6595 if (target)
6596 return target;
6597 break;
6598
6599 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6600 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6601 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6602 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6603 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6604 {
6605 unsigned int nargs, z;
6606 vec<tree, va_gc> *vec;
6607
6608 mode =
6609 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6610 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6611 if (target)
6612 return target;
6613
6614 /* If this is turned into an external library call, the weak parameter
6615 must be dropped to match the expected parameter list. */
6616 nargs = call_expr_nargs (exp);
6617 vec_alloc (vec, nargs - 1);
6618 for (z = 0; z < 3; z++)
6619 vec->quick_push (CALL_EXPR_ARG (exp, z));
6620 /* Skip the boolean weak parameter. */
6621 for (z = 4; z < 6; z++)
6622 vec->quick_push (CALL_EXPR_ARG (exp, z));
6623 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6624 break;
6625 }
6626
6627 case BUILT_IN_ATOMIC_LOAD_1:
6628 case BUILT_IN_ATOMIC_LOAD_2:
6629 case BUILT_IN_ATOMIC_LOAD_4:
6630 case BUILT_IN_ATOMIC_LOAD_8:
6631 case BUILT_IN_ATOMIC_LOAD_16:
6632 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6633 target = expand_builtin_atomic_load (mode, exp, target);
6634 if (target)
6635 return target;
6636 break;
6637
6638 case BUILT_IN_ATOMIC_STORE_1:
6639 case BUILT_IN_ATOMIC_STORE_2:
6640 case BUILT_IN_ATOMIC_STORE_4:
6641 case BUILT_IN_ATOMIC_STORE_8:
6642 case BUILT_IN_ATOMIC_STORE_16:
6643 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6644 target = expand_builtin_atomic_store (mode, exp);
6645 if (target)
6646 return const0_rtx;
6647 break;
6648
6649 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6650 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6651 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6652 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6653 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6654 {
6655 enum built_in_function lib;
6656 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6657 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6658 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6659 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6660 ignore, lib);
6661 if (target)
6662 return target;
6663 break;
6664 }
6665 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6666 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6667 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6668 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6669 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6670 {
6671 enum built_in_function lib;
6672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6673 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6674 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6675 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6676 ignore, lib);
6677 if (target)
6678 return target;
6679 break;
6680 }
6681 case BUILT_IN_ATOMIC_AND_FETCH_1:
6682 case BUILT_IN_ATOMIC_AND_FETCH_2:
6683 case BUILT_IN_ATOMIC_AND_FETCH_4:
6684 case BUILT_IN_ATOMIC_AND_FETCH_8:
6685 case BUILT_IN_ATOMIC_AND_FETCH_16:
6686 {
6687 enum built_in_function lib;
6688 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6689 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6690 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6691 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6692 ignore, lib);
6693 if (target)
6694 return target;
6695 break;
6696 }
6697 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6698 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6699 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6700 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6701 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6702 {
6703 enum built_in_function lib;
6704 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6705 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6706 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6707 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6708 ignore, lib);
6709 if (target)
6710 return target;
6711 break;
6712 }
6713 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6714 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6715 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6716 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6717 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6718 {
6719 enum built_in_function lib;
6720 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6721 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6722 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6723 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6724 ignore, lib);
6725 if (target)
6726 return target;
6727 break;
6728 }
6729 case BUILT_IN_ATOMIC_OR_FETCH_1:
6730 case BUILT_IN_ATOMIC_OR_FETCH_2:
6731 case BUILT_IN_ATOMIC_OR_FETCH_4:
6732 case BUILT_IN_ATOMIC_OR_FETCH_8:
6733 case BUILT_IN_ATOMIC_OR_FETCH_16:
6734 {
6735 enum built_in_function lib;
6736 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6737 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6738 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6739 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6740 ignore, lib);
6741 if (target)
6742 return target;
6743 break;
6744 }
6745 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6746 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6747 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6748 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6749 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6750 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6751 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6752 ignore, BUILT_IN_NONE);
6753 if (target)
6754 return target;
6755 break;
6756
6757 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6758 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6759 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6760 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6761 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6762 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6763 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6764 ignore, BUILT_IN_NONE);
6765 if (target)
6766 return target;
6767 break;
6768
6769 case BUILT_IN_ATOMIC_FETCH_AND_1:
6770 case BUILT_IN_ATOMIC_FETCH_AND_2:
6771 case BUILT_IN_ATOMIC_FETCH_AND_4:
6772 case BUILT_IN_ATOMIC_FETCH_AND_8:
6773 case BUILT_IN_ATOMIC_FETCH_AND_16:
6774 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6776 ignore, BUILT_IN_NONE);
6777 if (target)
6778 return target;
6779 break;
6780
6781 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6782 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6783 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6784 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6785 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6787 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6788 ignore, BUILT_IN_NONE);
6789 if (target)
6790 return target;
6791 break;
6792
6793 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6794 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6795 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6796 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6797 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6798 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6799 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6800 ignore, BUILT_IN_NONE);
6801 if (target)
6802 return target;
6803 break;
6804
6805 case BUILT_IN_ATOMIC_FETCH_OR_1:
6806 case BUILT_IN_ATOMIC_FETCH_OR_2:
6807 case BUILT_IN_ATOMIC_FETCH_OR_4:
6808 case BUILT_IN_ATOMIC_FETCH_OR_8:
6809 case BUILT_IN_ATOMIC_FETCH_OR_16:
6810 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6811 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6812 ignore, BUILT_IN_NONE);
6813 if (target)
6814 return target;
6815 break;
6816
6817 case BUILT_IN_ATOMIC_TEST_AND_SET:
6818 return expand_builtin_atomic_test_and_set (exp, target);
6819
6820 case BUILT_IN_ATOMIC_CLEAR:
6821 return expand_builtin_atomic_clear (exp);
6822
6823 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6824 return expand_builtin_atomic_always_lock_free (exp);
6825
6826 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6827 target = expand_builtin_atomic_is_lock_free (exp);
6828 if (target)
6829 return target;
6830 break;
6831
6832 case BUILT_IN_ATOMIC_THREAD_FENCE:
6833 expand_builtin_atomic_thread_fence (exp);
6834 return const0_rtx;
6835
6836 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6837 expand_builtin_atomic_signal_fence (exp);
6838 return const0_rtx;
6839
6840 case BUILT_IN_OBJECT_SIZE:
6841 return expand_builtin_object_size (exp);
6842
6843 case BUILT_IN_MEMCPY_CHK:
6844 case BUILT_IN_MEMPCPY_CHK:
6845 case BUILT_IN_MEMMOVE_CHK:
6846 case BUILT_IN_MEMSET_CHK:
6847 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6848 if (target)
6849 return target;
6850 break;
6851
6852 case BUILT_IN_STRCPY_CHK:
6853 case BUILT_IN_STPCPY_CHK:
6854 case BUILT_IN_STRNCPY_CHK:
6855 case BUILT_IN_STPNCPY_CHK:
6856 case BUILT_IN_STRCAT_CHK:
6857 case BUILT_IN_STRNCAT_CHK:
6858 case BUILT_IN_SNPRINTF_CHK:
6859 case BUILT_IN_VSNPRINTF_CHK:
6860 maybe_emit_chk_warning (exp, fcode);
6861 break;
6862
6863 case BUILT_IN_SPRINTF_CHK:
6864 case BUILT_IN_VSPRINTF_CHK:
6865 maybe_emit_sprintf_chk_warning (exp, fcode);
6866 break;
6867
6868 case BUILT_IN_FREE:
6869 if (warn_free_nonheap_object)
6870 maybe_emit_free_warning (exp);
6871 break;
6872
6873 case BUILT_IN_THREAD_POINTER:
6874 return expand_builtin_thread_pointer (exp, target);
6875
6876 case BUILT_IN_SET_THREAD_POINTER:
6877 expand_builtin_set_thread_pointer (exp);
6878 return const0_rtx;
6879
6880 case BUILT_IN_CILK_DETACH:
6881 expand_builtin_cilk_detach (exp);
6882 return const0_rtx;
6883
6884 case BUILT_IN_CILK_POP_FRAME:
6885 expand_builtin_cilk_pop_frame (exp);
6886 return const0_rtx;
6887
6888 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6889 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6890 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6891 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6892 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6893 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6894 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6895 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6896 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6897 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6898 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6899 /* We allow user CHKP builtins if Pointer Bounds
6900 Checker is off. */
6901 if (!chkp_function_instrumented_p (current_function_decl))
6902 {
6903 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6904 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6905 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6906 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6907 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6908 return expand_normal (CALL_EXPR_ARG (exp, 0));
6909 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6910 return expand_normal (size_zero_node);
6911 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6912 return expand_normal (size_int (-1));
6913 else
6914 return const0_rtx;
6915 }
6916 /* FALLTHROUGH */
6917
6918 case BUILT_IN_CHKP_BNDMK:
6919 case BUILT_IN_CHKP_BNDSTX:
6920 case BUILT_IN_CHKP_BNDCL:
6921 case BUILT_IN_CHKP_BNDCU:
6922 case BUILT_IN_CHKP_BNDLDX:
6923 case BUILT_IN_CHKP_BNDRET:
6924 case BUILT_IN_CHKP_INTERSECT:
6925 case BUILT_IN_CHKP_NARROW:
6926 case BUILT_IN_CHKP_EXTRACT_LOWER:
6927 case BUILT_IN_CHKP_EXTRACT_UPPER:
6928 /* Software implementation of Pointer Bounds Checker is NYI.
6929 Target support is required. */
6930 error ("Your target platform does not support -fcheck-pointer-bounds");
6931 break;
6932
6933 case BUILT_IN_ACC_ON_DEVICE:
6934 /* Do library call, if we failed to expand the builtin when
6935 folding. */
6936 break;
6937
6938 default: /* just do library call, if unknown builtin */
6939 break;
6940 }
6941
6942 /* The switch statement above can drop through to cause the function
6943 to be called normally. */
6944 return expand_call (exp, target, ignore);
6945 }
6946
6947 /* Similar to expand_builtin but is used for instrumented calls. */
6948
6949 rtx
6950 expand_builtin_with_bounds (tree exp, rtx target,
6951 rtx subtarget ATTRIBUTE_UNUSED,
6952 machine_mode mode, int ignore)
6953 {
6954 tree fndecl = get_callee_fndecl (exp);
6955 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6956
6957 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6958
6959 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6960 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6961
6962 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6963 && fcode < END_CHKP_BUILTINS);
6964
6965 switch (fcode)
6966 {
6967 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6968 target = expand_builtin_memcpy_with_bounds (exp, target);
6969 if (target)
6970 return target;
6971 break;
6972
6973 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6974 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6975 if (target)
6976 return target;
6977 break;
6978
6979 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6980 target = expand_builtin_memset_with_bounds (exp, target, mode);
6981 if (target)
6982 return target;
6983 break;
6984
6985 default:
6986 break;
6987 }
6988
6989 /* The switch statement above can drop through to cause the function
6990 to be called normally. */
6991 return expand_call (exp, target, ignore);
6992 }
6993
6994 /* Determine whether a tree node represents a call to a built-in
6995 function. If the tree T is a call to a built-in function with
6996 the right number of arguments of the appropriate types, return
6997 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6998 Otherwise the return value is END_BUILTINS. */
6999
7000 enum built_in_function
7001 builtin_mathfn_code (const_tree t)
7002 {
7003 const_tree fndecl, arg, parmlist;
7004 const_tree argtype, parmtype;
7005 const_call_expr_arg_iterator iter;
7006
7007 if (TREE_CODE (t) != CALL_EXPR
7008 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7009 return END_BUILTINS;
7010
7011 fndecl = get_callee_fndecl (t);
7012 if (fndecl == NULL_TREE
7013 || TREE_CODE (fndecl) != FUNCTION_DECL
7014 || ! DECL_BUILT_IN (fndecl)
7015 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7016 return END_BUILTINS;
7017
7018 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7019 init_const_call_expr_arg_iterator (t, &iter);
7020 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7021 {
7022 /* If a function doesn't take a variable number of arguments,
7023 the last element in the list will have type `void'. */
7024 parmtype = TREE_VALUE (parmlist);
7025 if (VOID_TYPE_P (parmtype))
7026 {
7027 if (more_const_call_expr_args_p (&iter))
7028 return END_BUILTINS;
7029 return DECL_FUNCTION_CODE (fndecl);
7030 }
7031
7032 if (! more_const_call_expr_args_p (&iter))
7033 return END_BUILTINS;
7034
7035 arg = next_const_call_expr_arg (&iter);
7036 argtype = TREE_TYPE (arg);
7037
7038 if (SCALAR_FLOAT_TYPE_P (parmtype))
7039 {
7040 if (! SCALAR_FLOAT_TYPE_P (argtype))
7041 return END_BUILTINS;
7042 }
7043 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7044 {
7045 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7046 return END_BUILTINS;
7047 }
7048 else if (POINTER_TYPE_P (parmtype))
7049 {
7050 if (! POINTER_TYPE_P (argtype))
7051 return END_BUILTINS;
7052 }
7053 else if (INTEGRAL_TYPE_P (parmtype))
7054 {
7055 if (! INTEGRAL_TYPE_P (argtype))
7056 return END_BUILTINS;
7057 }
7058 else
7059 return END_BUILTINS;
7060 }
7061
7062 /* Variable-length argument list. */
7063 return DECL_FUNCTION_CODE (fndecl);
7064 }
7065
7066 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7067 evaluate to a constant. */
7068
7069 static tree
7070 fold_builtin_constant_p (tree arg)
7071 {
7072 /* We return 1 for a numeric type that's known to be a constant
7073 value at compile-time or for an aggregate type that's a
7074 literal constant. */
7075 STRIP_NOPS (arg);
7076
7077 /* If we know this is a constant, emit the constant of one. */
7078 if (CONSTANT_CLASS_P (arg)
7079 || (TREE_CODE (arg) == CONSTRUCTOR
7080 && TREE_CONSTANT (arg)))
7081 return integer_one_node;
7082 if (TREE_CODE (arg) == ADDR_EXPR)
7083 {
7084 tree op = TREE_OPERAND (arg, 0);
7085 if (TREE_CODE (op) == STRING_CST
7086 || (TREE_CODE (op) == ARRAY_REF
7087 && integer_zerop (TREE_OPERAND (op, 1))
7088 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7089 return integer_one_node;
7090 }
7091
7092 /* If this expression has side effects, show we don't know it to be a
7093 constant. Likewise if it's a pointer or aggregate type since in
7094 those case we only want literals, since those are only optimized
7095 when generating RTL, not later.
7096 And finally, if we are compiling an initializer, not code, we
7097 need to return a definite result now; there's not going to be any
7098 more optimization done. */
7099 if (TREE_SIDE_EFFECTS (arg)
7100 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7101 || POINTER_TYPE_P (TREE_TYPE (arg))
7102 || cfun == 0
7103 || folding_initializer
7104 || force_folding_builtin_constant_p)
7105 return integer_zero_node;
7106
7107 return NULL_TREE;
7108 }
7109
7110 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7111 return it as a truthvalue. */
7112
7113 static tree
7114 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7115 tree predictor)
7116 {
7117 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7118
7119 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7120 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7121 ret_type = TREE_TYPE (TREE_TYPE (fn));
7122 pred_type = TREE_VALUE (arg_types);
7123 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7124
7125 pred = fold_convert_loc (loc, pred_type, pred);
7126 expected = fold_convert_loc (loc, expected_type, expected);
7127 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7128 predictor);
7129
7130 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7131 build_int_cst (ret_type, 0));
7132 }
7133
7134 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7135 NULL_TREE if no simplification is possible. */
7136
7137 tree
7138 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7139 {
7140 tree inner, fndecl, inner_arg0;
7141 enum tree_code code;
7142
7143 /* Distribute the expected value over short-circuiting operators.
7144 See through the cast from truthvalue_type_node to long. */
7145 inner_arg0 = arg0;
7146 while (CONVERT_EXPR_P (inner_arg0)
7147 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7148 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7149 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7150
7151 /* If this is a builtin_expect within a builtin_expect keep the
7152 inner one. See through a comparison against a constant. It
7153 might have been added to create a thruthvalue. */
7154 inner = inner_arg0;
7155
7156 if (COMPARISON_CLASS_P (inner)
7157 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7158 inner = TREE_OPERAND (inner, 0);
7159
7160 if (TREE_CODE (inner) == CALL_EXPR
7161 && (fndecl = get_callee_fndecl (inner))
7162 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7163 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7164 return arg0;
7165
7166 inner = inner_arg0;
7167 code = TREE_CODE (inner);
7168 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7169 {
7170 tree op0 = TREE_OPERAND (inner, 0);
7171 tree op1 = TREE_OPERAND (inner, 1);
7172
7173 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7174 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7175 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7176
7177 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7178 }
7179
7180 /* If the argument isn't invariant then there's nothing else we can do. */
7181 if (!TREE_CONSTANT (inner_arg0))
7182 return NULL_TREE;
7183
7184 /* If we expect that a comparison against the argument will fold to
7185 a constant return the constant. In practice, this means a true
7186 constant or the address of a non-weak symbol. */
7187 inner = inner_arg0;
7188 STRIP_NOPS (inner);
7189 if (TREE_CODE (inner) == ADDR_EXPR)
7190 {
7191 do
7192 {
7193 inner = TREE_OPERAND (inner, 0);
7194 }
7195 while (TREE_CODE (inner) == COMPONENT_REF
7196 || TREE_CODE (inner) == ARRAY_REF);
7197 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7198 return NULL_TREE;
7199 }
7200
7201 /* Otherwise, ARG0 already has the proper type for the return value. */
7202 return arg0;
7203 }
7204
7205 /* Fold a call to __builtin_classify_type with argument ARG. */
7206
7207 static tree
7208 fold_builtin_classify_type (tree arg)
7209 {
7210 if (arg == 0)
7211 return build_int_cst (integer_type_node, no_type_class);
7212
7213 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7214 }
7215
7216 /* Fold a call to __builtin_strlen with argument ARG. */
7217
7218 static tree
7219 fold_builtin_strlen (location_t loc, tree type, tree arg)
7220 {
7221 if (!validate_arg (arg, POINTER_TYPE))
7222 return NULL_TREE;
7223 else
7224 {
7225 tree len = c_strlen (arg, 0);
7226
7227 if (len)
7228 return fold_convert_loc (loc, type, len);
7229
7230 return NULL_TREE;
7231 }
7232 }
7233
7234 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7235
7236 static tree
7237 fold_builtin_inf (location_t loc, tree type, int warn)
7238 {
7239 REAL_VALUE_TYPE real;
7240
7241 /* __builtin_inff is intended to be usable to define INFINITY on all
7242 targets. If an infinity is not available, INFINITY expands "to a
7243 positive constant of type float that overflows at translation
7244 time", footnote "In this case, using INFINITY will violate the
7245 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7246 Thus we pedwarn to ensure this constraint violation is
7247 diagnosed. */
7248 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7249 pedwarn (loc, 0, "target format does not support infinity");
7250
7251 real_inf (&real);
7252 return build_real (type, real);
7253 }
7254
7255 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7256 NULL_TREE if no simplification can be made. */
7257
7258 static tree
7259 fold_builtin_sincos (location_t loc,
7260 tree arg0, tree arg1, tree arg2)
7261 {
7262 tree type;
7263 tree fndecl, call = NULL_TREE;
7264
7265 if (!validate_arg (arg0, REAL_TYPE)
7266 || !validate_arg (arg1, POINTER_TYPE)
7267 || !validate_arg (arg2, POINTER_TYPE))
7268 return NULL_TREE;
7269
7270 type = TREE_TYPE (arg0);
7271
7272 /* Calculate the result when the argument is a constant. */
7273 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7274 if (fn == END_BUILTINS)
7275 return NULL_TREE;
7276
7277 /* Canonicalize sincos to cexpi. */
7278 if (TREE_CODE (arg0) == REAL_CST)
7279 {
7280 tree complex_type = build_complex_type (type);
7281 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7282 }
7283 if (!call)
7284 {
7285 if (!targetm.libc_has_function (function_c99_math_complex)
7286 || !builtin_decl_implicit_p (fn))
7287 return NULL_TREE;
7288 fndecl = builtin_decl_explicit (fn);
7289 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7290 call = builtin_save_expr (call);
7291 }
7292
7293 return build2 (COMPOUND_EXPR, void_type_node,
7294 build2 (MODIFY_EXPR, void_type_node,
7295 build_fold_indirect_ref_loc (loc, arg1),
7296 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7297 build2 (MODIFY_EXPR, void_type_node,
7298 build_fold_indirect_ref_loc (loc, arg2),
7299 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7300 }
7301
7302 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7303 Return NULL_TREE if no simplification can be made. */
7304
7305 static tree
7306 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7307 {
7308 if (!validate_arg (arg1, POINTER_TYPE)
7309 || !validate_arg (arg2, POINTER_TYPE)
7310 || !validate_arg (len, INTEGER_TYPE))
7311 return NULL_TREE;
7312
7313 /* If the LEN parameter is zero, return zero. */
7314 if (integer_zerop (len))
7315 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7316 arg1, arg2);
7317
7318 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7319 if (operand_equal_p (arg1, arg2, 0))
7320 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7321
7322 /* If len parameter is one, return an expression corresponding to
7323 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7324 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7325 {
7326 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7327 tree cst_uchar_ptr_node
7328 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7329
7330 tree ind1
7331 = fold_convert_loc (loc, integer_type_node,
7332 build1 (INDIRECT_REF, cst_uchar_node,
7333 fold_convert_loc (loc,
7334 cst_uchar_ptr_node,
7335 arg1)));
7336 tree ind2
7337 = fold_convert_loc (loc, integer_type_node,
7338 build1 (INDIRECT_REF, cst_uchar_node,
7339 fold_convert_loc (loc,
7340 cst_uchar_ptr_node,
7341 arg2)));
7342 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7343 }
7344
7345 return NULL_TREE;
7346 }
7347
7348 /* Fold a call to builtin isascii with argument ARG. */
7349
7350 static tree
7351 fold_builtin_isascii (location_t loc, tree arg)
7352 {
7353 if (!validate_arg (arg, INTEGER_TYPE))
7354 return NULL_TREE;
7355 else
7356 {
7357 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7358 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7359 build_int_cst (integer_type_node,
7360 ~ (unsigned HOST_WIDE_INT) 0x7f));
7361 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7362 arg, integer_zero_node);
7363 }
7364 }
7365
7366 /* Fold a call to builtin toascii with argument ARG. */
7367
7368 static tree
7369 fold_builtin_toascii (location_t loc, tree arg)
7370 {
7371 if (!validate_arg (arg, INTEGER_TYPE))
7372 return NULL_TREE;
7373
7374 /* Transform toascii(c) -> (c & 0x7f). */
7375 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7376 build_int_cst (integer_type_node, 0x7f));
7377 }
7378
7379 /* Fold a call to builtin isdigit with argument ARG. */
7380
7381 static tree
7382 fold_builtin_isdigit (location_t loc, tree arg)
7383 {
7384 if (!validate_arg (arg, INTEGER_TYPE))
7385 return NULL_TREE;
7386 else
7387 {
7388 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7389 /* According to the C standard, isdigit is unaffected by locale.
7390 However, it definitely is affected by the target character set. */
7391 unsigned HOST_WIDE_INT target_digit0
7392 = lang_hooks.to_target_charset ('0');
7393
7394 if (target_digit0 == 0)
7395 return NULL_TREE;
7396
7397 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7398 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7399 build_int_cst (unsigned_type_node, target_digit0));
7400 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7401 build_int_cst (unsigned_type_node, 9));
7402 }
7403 }
7404
7405 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7406
7407 static tree
7408 fold_builtin_fabs (location_t loc, tree arg, tree type)
7409 {
7410 if (!validate_arg (arg, REAL_TYPE))
7411 return NULL_TREE;
7412
7413 arg = fold_convert_loc (loc, type, arg);
7414 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7415 }
7416
7417 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7418
7419 static tree
7420 fold_builtin_abs (location_t loc, tree arg, tree type)
7421 {
7422 if (!validate_arg (arg, INTEGER_TYPE))
7423 return NULL_TREE;
7424
7425 arg = fold_convert_loc (loc, type, arg);
7426 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7427 }
7428
7429 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7430
7431 static tree
7432 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7433 {
7434 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7435 if (validate_arg (arg0, REAL_TYPE)
7436 && validate_arg (arg1, REAL_TYPE)
7437 && validate_arg (arg2, REAL_TYPE)
7438 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7439 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7440
7441 return NULL_TREE;
7442 }
7443
7444 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7445
7446 static tree
7447 fold_builtin_carg (location_t loc, tree arg, tree type)
7448 {
7449 if (validate_arg (arg, COMPLEX_TYPE)
7450 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7451 {
7452 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7453
7454 if (atan2_fn)
7455 {
7456 tree new_arg = builtin_save_expr (arg);
7457 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7458 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7459 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7460 }
7461 }
7462
7463 return NULL_TREE;
7464 }
7465
7466 /* Fold a call to builtin frexp, we can assume the base is 2. */
7467
7468 static tree
7469 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7470 {
7471 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7472 return NULL_TREE;
7473
7474 STRIP_NOPS (arg0);
7475
7476 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7477 return NULL_TREE;
7478
7479 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7480
7481 /* Proceed if a valid pointer type was passed in. */
7482 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7483 {
7484 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7485 tree frac, exp;
7486
7487 switch (value->cl)
7488 {
7489 case rvc_zero:
7490 /* For +-0, return (*exp = 0, +-0). */
7491 exp = integer_zero_node;
7492 frac = arg0;
7493 break;
7494 case rvc_nan:
7495 case rvc_inf:
7496 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7497 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7498 case rvc_normal:
7499 {
7500 /* Since the frexp function always expects base 2, and in
7501 GCC normalized significands are already in the range
7502 [0.5, 1.0), we have exactly what frexp wants. */
7503 REAL_VALUE_TYPE frac_rvt = *value;
7504 SET_REAL_EXP (&frac_rvt, 0);
7505 frac = build_real (rettype, frac_rvt);
7506 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7507 }
7508 break;
7509 default:
7510 gcc_unreachable ();
7511 }
7512
7513 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7514 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7515 TREE_SIDE_EFFECTS (arg1) = 1;
7516 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7517 }
7518
7519 return NULL_TREE;
7520 }
7521
7522 /* Fold a call to builtin modf. */
7523
7524 static tree
7525 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7526 {
7527 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7528 return NULL_TREE;
7529
7530 STRIP_NOPS (arg0);
7531
7532 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7533 return NULL_TREE;
7534
7535 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7536
7537 /* Proceed if a valid pointer type was passed in. */
7538 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7539 {
7540 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7541 REAL_VALUE_TYPE trunc, frac;
7542
7543 switch (value->cl)
7544 {
7545 case rvc_nan:
7546 case rvc_zero:
7547 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7548 trunc = frac = *value;
7549 break;
7550 case rvc_inf:
7551 /* For +-Inf, return (*arg1 = arg0, +-0). */
7552 frac = dconst0;
7553 frac.sign = value->sign;
7554 trunc = *value;
7555 break;
7556 case rvc_normal:
7557 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7558 real_trunc (&trunc, VOIDmode, value);
7559 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7560 /* If the original number was negative and already
7561 integral, then the fractional part is -0.0. */
7562 if (value->sign && frac.cl == rvc_zero)
7563 frac.sign = value->sign;
7564 break;
7565 }
7566
7567 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7568 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7569 build_real (rettype, trunc));
7570 TREE_SIDE_EFFECTS (arg1) = 1;
7571 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7572 build_real (rettype, frac));
7573 }
7574
7575 return NULL_TREE;
7576 }
7577
7578 /* Given a location LOC, an interclass builtin function decl FNDECL
7579 and its single argument ARG, return an folded expression computing
7580 the same, or NULL_TREE if we either couldn't or didn't want to fold
7581 (the latter happen if there's an RTL instruction available). */
7582
7583 static tree
7584 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7585 {
7586 machine_mode mode;
7587
7588 if (!validate_arg (arg, REAL_TYPE))
7589 return NULL_TREE;
7590
7591 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7592 return NULL_TREE;
7593
7594 mode = TYPE_MODE (TREE_TYPE (arg));
7595
7596 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7597
7598 /* If there is no optab, try generic code. */
7599 switch (DECL_FUNCTION_CODE (fndecl))
7600 {
7601 tree result;
7602
7603 CASE_FLT_FN (BUILT_IN_ISINF):
7604 {
7605 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7606 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7607 tree type = TREE_TYPE (arg);
7608 REAL_VALUE_TYPE r;
7609 char buf[128];
7610
7611 if (is_ibm_extended)
7612 {
7613 /* NaN and Inf are encoded in the high-order double value
7614 only. The low-order value is not significant. */
7615 type = double_type_node;
7616 mode = DFmode;
7617 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7618 }
7619 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7620 real_from_string (&r, buf);
7621 result = build_call_expr (isgr_fn, 2,
7622 fold_build1_loc (loc, ABS_EXPR, type, arg),
7623 build_real (type, r));
7624 return result;
7625 }
7626 CASE_FLT_FN (BUILT_IN_FINITE):
7627 case BUILT_IN_ISFINITE:
7628 {
7629 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7630 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7631 tree type = TREE_TYPE (arg);
7632 REAL_VALUE_TYPE r;
7633 char buf[128];
7634
7635 if (is_ibm_extended)
7636 {
7637 /* NaN and Inf are encoded in the high-order double value
7638 only. The low-order value is not significant. */
7639 type = double_type_node;
7640 mode = DFmode;
7641 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7642 }
7643 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7644 real_from_string (&r, buf);
7645 result = build_call_expr (isle_fn, 2,
7646 fold_build1_loc (loc, ABS_EXPR, type, arg),
7647 build_real (type, r));
7648 /*result = fold_build2_loc (loc, UNGT_EXPR,
7649 TREE_TYPE (TREE_TYPE (fndecl)),
7650 fold_build1_loc (loc, ABS_EXPR, type, arg),
7651 build_real (type, r));
7652 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7653 TREE_TYPE (TREE_TYPE (fndecl)),
7654 result);*/
7655 return result;
7656 }
7657 case BUILT_IN_ISNORMAL:
7658 {
7659 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7660 islessequal(fabs(x),DBL_MAX). */
7661 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7662 tree type = TREE_TYPE (arg);
7663 tree orig_arg, max_exp, min_exp;
7664 machine_mode orig_mode = mode;
7665 REAL_VALUE_TYPE rmax, rmin;
7666 char buf[128];
7667
7668 orig_arg = arg = builtin_save_expr (arg);
7669 if (is_ibm_extended)
7670 {
7671 /* Use double to test the normal range of IBM extended
7672 precision. Emin for IBM extended precision is
7673 different to emin for IEEE double, being 53 higher
7674 since the low double exponent is at least 53 lower
7675 than the high double exponent. */
7676 type = double_type_node;
7677 mode = DFmode;
7678 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7679 }
7680 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7681
7682 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7683 real_from_string (&rmax, buf);
7684 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7685 real_from_string (&rmin, buf);
7686 max_exp = build_real (type, rmax);
7687 min_exp = build_real (type, rmin);
7688
7689 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7690 if (is_ibm_extended)
7691 {
7692 /* Testing the high end of the range is done just using
7693 the high double, using the same test as isfinite().
7694 For the subnormal end of the range we first test the
7695 high double, then if its magnitude is equal to the
7696 limit of 0x1p-969, we test whether the low double is
7697 non-zero and opposite sign to the high double. */
7698 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7699 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7700 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7701 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7702 arg, min_exp);
7703 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7704 complex_double_type_node, orig_arg);
7705 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7706 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7707 tree zero = build_real (type, dconst0);
7708 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7709 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7710 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7711 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7712 fold_build3 (COND_EXPR,
7713 integer_type_node,
7714 hilt, logt, lolt));
7715 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7716 eq_min, ok_lo);
7717 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7718 gt_min, eq_min);
7719 }
7720 else
7721 {
7722 tree const isge_fn
7723 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7724 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7725 }
7726 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7727 max_exp, min_exp);
7728 return result;
7729 }
7730 default:
7731 break;
7732 }
7733
7734 return NULL_TREE;
7735 }
7736
7737 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7738 ARG is the argument for the call. */
7739
7740 static tree
7741 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7742 {
7743 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7744
7745 if (!validate_arg (arg, REAL_TYPE))
7746 return NULL_TREE;
7747
7748 switch (builtin_index)
7749 {
7750 case BUILT_IN_ISINF:
7751 if (!HONOR_INFINITIES (arg))
7752 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7753
7754 return NULL_TREE;
7755
7756 case BUILT_IN_ISINF_SIGN:
7757 {
7758 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7759 /* In a boolean context, GCC will fold the inner COND_EXPR to
7760 1. So e.g. "if (isinf_sign(x))" would be folded to just
7761 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7762 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7763 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7764 tree tmp = NULL_TREE;
7765
7766 arg = builtin_save_expr (arg);
7767
7768 if (signbit_fn && isinf_fn)
7769 {
7770 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7771 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7772
7773 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7774 signbit_call, integer_zero_node);
7775 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7776 isinf_call, integer_zero_node);
7777
7778 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7779 integer_minus_one_node, integer_one_node);
7780 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7781 isinf_call, tmp,
7782 integer_zero_node);
7783 }
7784
7785 return tmp;
7786 }
7787
7788 case BUILT_IN_ISFINITE:
7789 if (!HONOR_NANS (arg)
7790 && !HONOR_INFINITIES (arg))
7791 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7792
7793 return NULL_TREE;
7794
7795 case BUILT_IN_ISNAN:
7796 if (!HONOR_NANS (arg))
7797 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7798
7799 {
7800 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7801 if (is_ibm_extended)
7802 {
7803 /* NaN and Inf are encoded in the high-order double value
7804 only. The low-order value is not significant. */
7805 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7806 }
7807 }
7808 arg = builtin_save_expr (arg);
7809 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7810
7811 default:
7812 gcc_unreachable ();
7813 }
7814 }
7815
7816 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7817 This builtin will generate code to return the appropriate floating
7818 point classification depending on the value of the floating point
7819 number passed in. The possible return values must be supplied as
7820 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7821 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7822 one floating point argument which is "type generic". */
7823
7824 static tree
7825 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7826 {
7827 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7828 arg, type, res, tmp;
7829 machine_mode mode;
7830 REAL_VALUE_TYPE r;
7831 char buf[128];
7832
7833 /* Verify the required arguments in the original call. */
7834 if (nargs != 6
7835 || !validate_arg (args[0], INTEGER_TYPE)
7836 || !validate_arg (args[1], INTEGER_TYPE)
7837 || !validate_arg (args[2], INTEGER_TYPE)
7838 || !validate_arg (args[3], INTEGER_TYPE)
7839 || !validate_arg (args[4], INTEGER_TYPE)
7840 || !validate_arg (args[5], REAL_TYPE))
7841 return NULL_TREE;
7842
7843 fp_nan = args[0];
7844 fp_infinite = args[1];
7845 fp_normal = args[2];
7846 fp_subnormal = args[3];
7847 fp_zero = args[4];
7848 arg = args[5];
7849 type = TREE_TYPE (arg);
7850 mode = TYPE_MODE (type);
7851 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7852
7853 /* fpclassify(x) ->
7854 isnan(x) ? FP_NAN :
7855 (fabs(x) == Inf ? FP_INFINITE :
7856 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7857 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7858
7859 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7860 build_real (type, dconst0));
7861 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7862 tmp, fp_zero, fp_subnormal);
7863
7864 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7865 real_from_string (&r, buf);
7866 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7867 arg, build_real (type, r));
7868 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7869
7870 if (HONOR_INFINITIES (mode))
7871 {
7872 real_inf (&r);
7873 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7874 build_real (type, r));
7875 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7876 fp_infinite, res);
7877 }
7878
7879 if (HONOR_NANS (mode))
7880 {
7881 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7882 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7883 }
7884
7885 return res;
7886 }
7887
7888 /* Fold a call to an unordered comparison function such as
7889 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
7890 being called and ARG0 and ARG1 are the arguments for the call.
7891 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7892 the opposite of the desired result. UNORDERED_CODE is used
7893 for modes that can hold NaNs and ORDERED_CODE is used for
7894 the rest. */
7895
7896 static tree
7897 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
7898 enum tree_code unordered_code,
7899 enum tree_code ordered_code)
7900 {
7901 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7902 enum tree_code code;
7903 tree type0, type1;
7904 enum tree_code code0, code1;
7905 tree cmp_type = NULL_TREE;
7906
7907 type0 = TREE_TYPE (arg0);
7908 type1 = TREE_TYPE (arg1);
7909
7910 code0 = TREE_CODE (type0);
7911 code1 = TREE_CODE (type1);
7912
7913 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
7914 /* Choose the wider of two real types. */
7915 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
7916 ? type0 : type1;
7917 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
7918 cmp_type = type0;
7919 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
7920 cmp_type = type1;
7921
7922 arg0 = fold_convert_loc (loc, cmp_type, arg0);
7923 arg1 = fold_convert_loc (loc, cmp_type, arg1);
7924
7925 if (unordered_code == UNORDERED_EXPR)
7926 {
7927 if (!HONOR_NANS (arg0))
7928 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
7929 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
7930 }
7931
7932 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
7933 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
7934 fold_build2_loc (loc, code, type, arg0, arg1));
7935 }
7936
7937 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
7938 arithmetics if it can never overflow, or into internal functions that
7939 return both result of arithmetics and overflowed boolean flag in
7940 a complex integer result, or some other check for overflow.
7941 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
7942 checking part of that. */
7943
7944 static tree
7945 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
7946 tree arg0, tree arg1, tree arg2)
7947 {
7948 enum internal_fn ifn = IFN_LAST;
7949 /* The code of the expression corresponding to the type-generic
7950 built-in, or ERROR_MARK for the type-specific ones. */
7951 enum tree_code opcode = ERROR_MARK;
7952 bool ovf_only = false;
7953
7954 switch (fcode)
7955 {
7956 case BUILT_IN_ADD_OVERFLOW_P:
7957 ovf_only = true;
7958 /* FALLTHRU */
7959 case BUILT_IN_ADD_OVERFLOW:
7960 opcode = PLUS_EXPR;
7961 /* FALLTHRU */
7962 case BUILT_IN_SADD_OVERFLOW:
7963 case BUILT_IN_SADDL_OVERFLOW:
7964 case BUILT_IN_SADDLL_OVERFLOW:
7965 case BUILT_IN_UADD_OVERFLOW:
7966 case BUILT_IN_UADDL_OVERFLOW:
7967 case BUILT_IN_UADDLL_OVERFLOW:
7968 ifn = IFN_ADD_OVERFLOW;
7969 break;
7970 case BUILT_IN_SUB_OVERFLOW_P:
7971 ovf_only = true;
7972 /* FALLTHRU */
7973 case BUILT_IN_SUB_OVERFLOW:
7974 opcode = MINUS_EXPR;
7975 /* FALLTHRU */
7976 case BUILT_IN_SSUB_OVERFLOW:
7977 case BUILT_IN_SSUBL_OVERFLOW:
7978 case BUILT_IN_SSUBLL_OVERFLOW:
7979 case BUILT_IN_USUB_OVERFLOW:
7980 case BUILT_IN_USUBL_OVERFLOW:
7981 case BUILT_IN_USUBLL_OVERFLOW:
7982 ifn = IFN_SUB_OVERFLOW;
7983 break;
7984 case BUILT_IN_MUL_OVERFLOW_P:
7985 ovf_only = true;
7986 /* FALLTHRU */
7987 case BUILT_IN_MUL_OVERFLOW:
7988 opcode = MULT_EXPR;
7989 /* FALLTHRU */
7990 case BUILT_IN_SMUL_OVERFLOW:
7991 case BUILT_IN_SMULL_OVERFLOW:
7992 case BUILT_IN_SMULLL_OVERFLOW:
7993 case BUILT_IN_UMUL_OVERFLOW:
7994 case BUILT_IN_UMULL_OVERFLOW:
7995 case BUILT_IN_UMULLL_OVERFLOW:
7996 ifn = IFN_MUL_OVERFLOW;
7997 break;
7998 default:
7999 gcc_unreachable ();
8000 }
8001
8002 /* For the "generic" overloads, the first two arguments can have different
8003 types and the last argument determines the target type to use to check
8004 for overflow. The arguments of the other overloads all have the same
8005 type. */
8006 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8007
8008 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8009 arguments are constant, attempt to fold the built-in call into a constant
8010 expression indicating whether or not it detected an overflow. */
8011 if (ovf_only
8012 && TREE_CODE (arg0) == INTEGER_CST
8013 && TREE_CODE (arg1) == INTEGER_CST)
8014 /* Perform the computation in the target type and check for overflow. */
8015 return omit_one_operand_loc (loc, boolean_type_node,
8016 arith_overflowed_p (opcode, type, arg0, arg1)
8017 ? boolean_true_node : boolean_false_node,
8018 arg2);
8019
8020 tree ctype = build_complex_type (type);
8021 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8022 2, arg0, arg1);
8023 tree tgt = save_expr (call);
8024 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8025 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8026 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8027
8028 if (ovf_only)
8029 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8030
8031 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8032 tree store
8033 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8034 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8035 }
8036
8037 /* Fold a call to __builtin_FILE to a constant string. */
8038
8039 static inline tree
8040 fold_builtin_FILE (location_t loc)
8041 {
8042 if (const char *fname = LOCATION_FILE (loc))
8043 return build_string_literal (strlen (fname) + 1, fname);
8044
8045 return build_string_literal (1, "");
8046 }
8047
8048 /* Fold a call to __builtin_FUNCTION to a constant string. */
8049
8050 static inline tree
8051 fold_builtin_FUNCTION ()
8052 {
8053 if (current_function_decl)
8054 {
8055 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8056 return build_string_literal (strlen (name) + 1, name);
8057 }
8058
8059 return build_string_literal (1, "");
8060 }
8061
8062 /* Fold a call to __builtin_LINE to an integer constant. */
8063
8064 static inline tree
8065 fold_builtin_LINE (location_t loc, tree type)
8066 {
8067 return build_int_cst (type, LOCATION_LINE (loc));
8068 }
8069
8070 /* Fold a call to built-in function FNDECL with 0 arguments.
8071 This function returns NULL_TREE if no simplification was possible. */
8072
8073 static tree
8074 fold_builtin_0 (location_t loc, tree fndecl)
8075 {
8076 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8077 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8078 switch (fcode)
8079 {
8080 case BUILT_IN_FILE:
8081 return fold_builtin_FILE (loc);
8082
8083 case BUILT_IN_FUNCTION:
8084 return fold_builtin_FUNCTION ();
8085
8086 case BUILT_IN_LINE:
8087 return fold_builtin_LINE (loc, type);
8088
8089 CASE_FLT_FN (BUILT_IN_INF):
8090 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8091 case BUILT_IN_INFD32:
8092 case BUILT_IN_INFD64:
8093 case BUILT_IN_INFD128:
8094 return fold_builtin_inf (loc, type, true);
8095
8096 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8097 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8098 return fold_builtin_inf (loc, type, false);
8099
8100 case BUILT_IN_CLASSIFY_TYPE:
8101 return fold_builtin_classify_type (NULL_TREE);
8102
8103 default:
8104 break;
8105 }
8106 return NULL_TREE;
8107 }
8108
8109 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8110 This function returns NULL_TREE if no simplification was possible. */
8111
8112 static tree
8113 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8114 {
8115 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8116 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8117
8118 if (TREE_CODE (arg0) == ERROR_MARK)
8119 return NULL_TREE;
8120
8121 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8122 return ret;
8123
8124 switch (fcode)
8125 {
8126 case BUILT_IN_CONSTANT_P:
8127 {
8128 tree val = fold_builtin_constant_p (arg0);
8129
8130 /* Gimplification will pull the CALL_EXPR for the builtin out of
8131 an if condition. When not optimizing, we'll not CSE it back.
8132 To avoid link error types of regressions, return false now. */
8133 if (!val && !optimize)
8134 val = integer_zero_node;
8135
8136 return val;
8137 }
8138
8139 case BUILT_IN_CLASSIFY_TYPE:
8140 return fold_builtin_classify_type (arg0);
8141
8142 case BUILT_IN_STRLEN:
8143 return fold_builtin_strlen (loc, type, arg0);
8144
8145 CASE_FLT_FN (BUILT_IN_FABS):
8146 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8147 case BUILT_IN_FABSD32:
8148 case BUILT_IN_FABSD64:
8149 case BUILT_IN_FABSD128:
8150 return fold_builtin_fabs (loc, arg0, type);
8151
8152 case BUILT_IN_ABS:
8153 case BUILT_IN_LABS:
8154 case BUILT_IN_LLABS:
8155 case BUILT_IN_IMAXABS:
8156 return fold_builtin_abs (loc, arg0, type);
8157
8158 CASE_FLT_FN (BUILT_IN_CONJ):
8159 if (validate_arg (arg0, COMPLEX_TYPE)
8160 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8161 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8162 break;
8163
8164 CASE_FLT_FN (BUILT_IN_CREAL):
8165 if (validate_arg (arg0, COMPLEX_TYPE)
8166 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8167 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8168 break;
8169
8170 CASE_FLT_FN (BUILT_IN_CIMAG):
8171 if (validate_arg (arg0, COMPLEX_TYPE)
8172 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8173 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8174 break;
8175
8176 CASE_FLT_FN (BUILT_IN_CARG):
8177 return fold_builtin_carg (loc, arg0, type);
8178
8179 case BUILT_IN_ISASCII:
8180 return fold_builtin_isascii (loc, arg0);
8181
8182 case BUILT_IN_TOASCII:
8183 return fold_builtin_toascii (loc, arg0);
8184
8185 case BUILT_IN_ISDIGIT:
8186 return fold_builtin_isdigit (loc, arg0);
8187
8188 CASE_FLT_FN (BUILT_IN_FINITE):
8189 case BUILT_IN_FINITED32:
8190 case BUILT_IN_FINITED64:
8191 case BUILT_IN_FINITED128:
8192 case BUILT_IN_ISFINITE:
8193 {
8194 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8195 if (ret)
8196 return ret;
8197 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8198 }
8199
8200 CASE_FLT_FN (BUILT_IN_ISINF):
8201 case BUILT_IN_ISINFD32:
8202 case BUILT_IN_ISINFD64:
8203 case BUILT_IN_ISINFD128:
8204 {
8205 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8206 if (ret)
8207 return ret;
8208 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8209 }
8210
8211 case BUILT_IN_ISNORMAL:
8212 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8213
8214 case BUILT_IN_ISINF_SIGN:
8215 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8216
8217 CASE_FLT_FN (BUILT_IN_ISNAN):
8218 case BUILT_IN_ISNAND32:
8219 case BUILT_IN_ISNAND64:
8220 case BUILT_IN_ISNAND128:
8221 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8222
8223 case BUILT_IN_FREE:
8224 if (integer_zerop (arg0))
8225 return build_empty_stmt (loc);
8226 break;
8227
8228 default:
8229 break;
8230 }
8231
8232 return NULL_TREE;
8233
8234 }
8235
8236 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8237 This function returns NULL_TREE if no simplification was possible. */
8238
8239 static tree
8240 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8241 {
8242 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8243 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8244
8245 if (TREE_CODE (arg0) == ERROR_MARK
8246 || TREE_CODE (arg1) == ERROR_MARK)
8247 return NULL_TREE;
8248
8249 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8250 return ret;
8251
8252 switch (fcode)
8253 {
8254 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8255 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8256 if (validate_arg (arg0, REAL_TYPE)
8257 && validate_arg (arg1, POINTER_TYPE))
8258 return do_mpfr_lgamma_r (arg0, arg1, type);
8259 break;
8260
8261 CASE_FLT_FN (BUILT_IN_FREXP):
8262 return fold_builtin_frexp (loc, arg0, arg1, type);
8263
8264 CASE_FLT_FN (BUILT_IN_MODF):
8265 return fold_builtin_modf (loc, arg0, arg1, type);
8266
8267 case BUILT_IN_STRSTR:
8268 return fold_builtin_strstr (loc, arg0, arg1, type);
8269
8270 case BUILT_IN_STRSPN:
8271 return fold_builtin_strspn (loc, arg0, arg1);
8272
8273 case BUILT_IN_STRCSPN:
8274 return fold_builtin_strcspn (loc, arg0, arg1);
8275
8276 case BUILT_IN_STRPBRK:
8277 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8278
8279 case BUILT_IN_EXPECT:
8280 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8281
8282 case BUILT_IN_ISGREATER:
8283 return fold_builtin_unordered_cmp (loc, fndecl,
8284 arg0, arg1, UNLE_EXPR, LE_EXPR);
8285 case BUILT_IN_ISGREATEREQUAL:
8286 return fold_builtin_unordered_cmp (loc, fndecl,
8287 arg0, arg1, UNLT_EXPR, LT_EXPR);
8288 case BUILT_IN_ISLESS:
8289 return fold_builtin_unordered_cmp (loc, fndecl,
8290 arg0, arg1, UNGE_EXPR, GE_EXPR);
8291 case BUILT_IN_ISLESSEQUAL:
8292 return fold_builtin_unordered_cmp (loc, fndecl,
8293 arg0, arg1, UNGT_EXPR, GT_EXPR);
8294 case BUILT_IN_ISLESSGREATER:
8295 return fold_builtin_unordered_cmp (loc, fndecl,
8296 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8297 case BUILT_IN_ISUNORDERED:
8298 return fold_builtin_unordered_cmp (loc, fndecl,
8299 arg0, arg1, UNORDERED_EXPR,
8300 NOP_EXPR);
8301
8302 /* We do the folding for va_start in the expander. */
8303 case BUILT_IN_VA_START:
8304 break;
8305
8306 case BUILT_IN_OBJECT_SIZE:
8307 return fold_builtin_object_size (arg0, arg1);
8308
8309 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8310 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8311
8312 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8313 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8314
8315 default:
8316 break;
8317 }
8318 return NULL_TREE;
8319 }
8320
8321 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8322 and ARG2.
8323 This function returns NULL_TREE if no simplification was possible. */
8324
8325 static tree
8326 fold_builtin_3 (location_t loc, tree fndecl,
8327 tree arg0, tree arg1, tree arg2)
8328 {
8329 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8330 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8331
8332 if (TREE_CODE (arg0) == ERROR_MARK
8333 || TREE_CODE (arg1) == ERROR_MARK
8334 || TREE_CODE (arg2) == ERROR_MARK)
8335 return NULL_TREE;
8336
8337 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8338 arg0, arg1, arg2))
8339 return ret;
8340
8341 switch (fcode)
8342 {
8343
8344 CASE_FLT_FN (BUILT_IN_SINCOS):
8345 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8346
8347 CASE_FLT_FN (BUILT_IN_FMA):
8348 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8349
8350 CASE_FLT_FN (BUILT_IN_REMQUO):
8351 if (validate_arg (arg0, REAL_TYPE)
8352 && validate_arg (arg1, REAL_TYPE)
8353 && validate_arg (arg2, POINTER_TYPE))
8354 return do_mpfr_remquo (arg0, arg1, arg2);
8355 break;
8356
8357 case BUILT_IN_BCMP:
8358 case BUILT_IN_MEMCMP:
8359 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8360
8361 case BUILT_IN_EXPECT:
8362 return fold_builtin_expect (loc, arg0, arg1, arg2);
8363
8364 case BUILT_IN_ADD_OVERFLOW:
8365 case BUILT_IN_SUB_OVERFLOW:
8366 case BUILT_IN_MUL_OVERFLOW:
8367 case BUILT_IN_ADD_OVERFLOW_P:
8368 case BUILT_IN_SUB_OVERFLOW_P:
8369 case BUILT_IN_MUL_OVERFLOW_P:
8370 case BUILT_IN_SADD_OVERFLOW:
8371 case BUILT_IN_SADDL_OVERFLOW:
8372 case BUILT_IN_SADDLL_OVERFLOW:
8373 case BUILT_IN_SSUB_OVERFLOW:
8374 case BUILT_IN_SSUBL_OVERFLOW:
8375 case BUILT_IN_SSUBLL_OVERFLOW:
8376 case BUILT_IN_SMUL_OVERFLOW:
8377 case BUILT_IN_SMULL_OVERFLOW:
8378 case BUILT_IN_SMULLL_OVERFLOW:
8379 case BUILT_IN_UADD_OVERFLOW:
8380 case BUILT_IN_UADDL_OVERFLOW:
8381 case BUILT_IN_UADDLL_OVERFLOW:
8382 case BUILT_IN_USUB_OVERFLOW:
8383 case BUILT_IN_USUBL_OVERFLOW:
8384 case BUILT_IN_USUBLL_OVERFLOW:
8385 case BUILT_IN_UMUL_OVERFLOW:
8386 case BUILT_IN_UMULL_OVERFLOW:
8387 case BUILT_IN_UMULLL_OVERFLOW:
8388 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8389
8390 default:
8391 break;
8392 }
8393 return NULL_TREE;
8394 }
8395
8396 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8397 arguments. IGNORE is true if the result of the
8398 function call is ignored. This function returns NULL_TREE if no
8399 simplification was possible. */
8400
8401 tree
8402 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8403 {
8404 tree ret = NULL_TREE;
8405
8406 switch (nargs)
8407 {
8408 case 0:
8409 ret = fold_builtin_0 (loc, fndecl);
8410 break;
8411 case 1:
8412 ret = fold_builtin_1 (loc, fndecl, args[0]);
8413 break;
8414 case 2:
8415 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8416 break;
8417 case 3:
8418 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8419 break;
8420 default:
8421 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8422 break;
8423 }
8424 if (ret)
8425 {
8426 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8427 SET_EXPR_LOCATION (ret, loc);
8428 TREE_NO_WARNING (ret) = 1;
8429 return ret;
8430 }
8431 return NULL_TREE;
8432 }
8433
8434 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8435 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8436 of arguments in ARGS to be omitted. OLDNARGS is the number of
8437 elements in ARGS. */
8438
8439 static tree
8440 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8441 int skip, tree fndecl, int n, va_list newargs)
8442 {
8443 int nargs = oldnargs - skip + n;
8444 tree *buffer;
8445
8446 if (n > 0)
8447 {
8448 int i, j;
8449
8450 buffer = XALLOCAVEC (tree, nargs);
8451 for (i = 0; i < n; i++)
8452 buffer[i] = va_arg (newargs, tree);
8453 for (j = skip; j < oldnargs; j++, i++)
8454 buffer[i] = args[j];
8455 }
8456 else
8457 buffer = args + skip;
8458
8459 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8460 }
8461
8462 /* Return true if FNDECL shouldn't be folded right now.
8463 If a built-in function has an inline attribute always_inline
8464 wrapper, defer folding it after always_inline functions have
8465 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8466 might not be performed. */
8467
8468 bool
8469 avoid_folding_inline_builtin (tree fndecl)
8470 {
8471 return (DECL_DECLARED_INLINE_P (fndecl)
8472 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8473 && cfun
8474 && !cfun->always_inline_functions_inlined
8475 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8476 }
8477
8478 /* A wrapper function for builtin folding that prevents warnings for
8479 "statement without effect" and the like, caused by removing the
8480 call node earlier than the warning is generated. */
8481
8482 tree
8483 fold_call_expr (location_t loc, tree exp, bool ignore)
8484 {
8485 tree ret = NULL_TREE;
8486 tree fndecl = get_callee_fndecl (exp);
8487 if (fndecl
8488 && TREE_CODE (fndecl) == FUNCTION_DECL
8489 && DECL_BUILT_IN (fndecl)
8490 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8491 yet. Defer folding until we see all the arguments
8492 (after inlining). */
8493 && !CALL_EXPR_VA_ARG_PACK (exp))
8494 {
8495 int nargs = call_expr_nargs (exp);
8496
8497 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8498 instead last argument is __builtin_va_arg_pack (). Defer folding
8499 even in that case, until arguments are finalized. */
8500 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8501 {
8502 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8503 if (fndecl2
8504 && TREE_CODE (fndecl2) == FUNCTION_DECL
8505 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8506 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8507 return NULL_TREE;
8508 }
8509
8510 if (avoid_folding_inline_builtin (fndecl))
8511 return NULL_TREE;
8512
8513 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8514 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8515 CALL_EXPR_ARGP (exp), ignore);
8516 else
8517 {
8518 tree *args = CALL_EXPR_ARGP (exp);
8519 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8520 if (ret)
8521 return ret;
8522 }
8523 }
8524 return NULL_TREE;
8525 }
8526
8527 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8528 N arguments are passed in the array ARGARRAY. Return a folded
8529 expression or NULL_TREE if no simplification was possible. */
8530
8531 tree
8532 fold_builtin_call_array (location_t loc, tree,
8533 tree fn,
8534 int n,
8535 tree *argarray)
8536 {
8537 if (TREE_CODE (fn) != ADDR_EXPR)
8538 return NULL_TREE;
8539
8540 tree fndecl = TREE_OPERAND (fn, 0);
8541 if (TREE_CODE (fndecl) == FUNCTION_DECL
8542 && DECL_BUILT_IN (fndecl))
8543 {
8544 /* If last argument is __builtin_va_arg_pack (), arguments to this
8545 function are not finalized yet. Defer folding until they are. */
8546 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8547 {
8548 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8549 if (fndecl2
8550 && TREE_CODE (fndecl2) == FUNCTION_DECL
8551 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8552 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8553 return NULL_TREE;
8554 }
8555 if (avoid_folding_inline_builtin (fndecl))
8556 return NULL_TREE;
8557 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8558 return targetm.fold_builtin (fndecl, n, argarray, false);
8559 else
8560 return fold_builtin_n (loc, fndecl, argarray, n, false);
8561 }
8562
8563 return NULL_TREE;
8564 }
8565
8566 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8567 along with N new arguments specified as the "..." parameters. SKIP
8568 is the number of arguments in EXP to be omitted. This function is used
8569 to do varargs-to-varargs transformations. */
8570
8571 static tree
8572 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8573 {
8574 va_list ap;
8575 tree t;
8576
8577 va_start (ap, n);
8578 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8579 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8580 va_end (ap);
8581
8582 return t;
8583 }
8584
8585 /* Validate a single argument ARG against a tree code CODE representing
8586 a type. */
8587
8588 static bool
8589 validate_arg (const_tree arg, enum tree_code code)
8590 {
8591 if (!arg)
8592 return false;
8593 else if (code == POINTER_TYPE)
8594 return POINTER_TYPE_P (TREE_TYPE (arg));
8595 else if (code == INTEGER_TYPE)
8596 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8597 return code == TREE_CODE (TREE_TYPE (arg));
8598 }
8599
8600 /* This function validates the types of a function call argument list
8601 against a specified list of tree_codes. If the last specifier is a 0,
8602 that represents an ellipses, otherwise the last specifier must be a
8603 VOID_TYPE.
8604
8605 This is the GIMPLE version of validate_arglist. Eventually we want to
8606 completely convert builtins.c to work from GIMPLEs and the tree based
8607 validate_arglist will then be removed. */
8608
8609 bool
8610 validate_gimple_arglist (const gcall *call, ...)
8611 {
8612 enum tree_code code;
8613 bool res = 0;
8614 va_list ap;
8615 const_tree arg;
8616 size_t i;
8617
8618 va_start (ap, call);
8619 i = 0;
8620
8621 do
8622 {
8623 code = (enum tree_code) va_arg (ap, int);
8624 switch (code)
8625 {
8626 case 0:
8627 /* This signifies an ellipses, any further arguments are all ok. */
8628 res = true;
8629 goto end;
8630 case VOID_TYPE:
8631 /* This signifies an endlink, if no arguments remain, return
8632 true, otherwise return false. */
8633 res = (i == gimple_call_num_args (call));
8634 goto end;
8635 default:
8636 /* If no parameters remain or the parameter's code does not
8637 match the specified code, return false. Otherwise continue
8638 checking any remaining arguments. */
8639 arg = gimple_call_arg (call, i++);
8640 if (!validate_arg (arg, code))
8641 goto end;
8642 break;
8643 }
8644 }
8645 while (1);
8646
8647 /* We need gotos here since we can only have one VA_CLOSE in a
8648 function. */
8649 end: ;
8650 va_end (ap);
8651
8652 return res;
8653 }
8654
8655 /* Default target-specific builtin expander that does nothing. */
8656
8657 rtx
8658 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8659 rtx target ATTRIBUTE_UNUSED,
8660 rtx subtarget ATTRIBUTE_UNUSED,
8661 machine_mode mode ATTRIBUTE_UNUSED,
8662 int ignore ATTRIBUTE_UNUSED)
8663 {
8664 return NULL_RTX;
8665 }
8666
8667 /* Returns true is EXP represents data that would potentially reside
8668 in a readonly section. */
8669
8670 bool
8671 readonly_data_expr (tree exp)
8672 {
8673 STRIP_NOPS (exp);
8674
8675 if (TREE_CODE (exp) != ADDR_EXPR)
8676 return false;
8677
8678 exp = get_base_address (TREE_OPERAND (exp, 0));
8679 if (!exp)
8680 return false;
8681
8682 /* Make sure we call decl_readonly_section only for trees it
8683 can handle (since it returns true for everything it doesn't
8684 understand). */
8685 if (TREE_CODE (exp) == STRING_CST
8686 || TREE_CODE (exp) == CONSTRUCTOR
8687 || (VAR_P (exp) && TREE_STATIC (exp)))
8688 return decl_readonly_section (exp, 0);
8689 else
8690 return false;
8691 }
8692
8693 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8694 to the call, and TYPE is its return type.
8695
8696 Return NULL_TREE if no simplification was possible, otherwise return the
8697 simplified form of the call as a tree.
8698
8699 The simplified form may be a constant or other expression which
8700 computes the same value, but in a more efficient manner (including
8701 calls to other builtin functions).
8702
8703 The call may contain arguments which need to be evaluated, but
8704 which are not useful to determine the result of the call. In
8705 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8706 COMPOUND_EXPR will be an argument which must be evaluated.
8707 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8708 COMPOUND_EXPR in the chain will contain the tree for the simplified
8709 form of the builtin function call. */
8710
8711 static tree
8712 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8713 {
8714 if (!validate_arg (s1, POINTER_TYPE)
8715 || !validate_arg (s2, POINTER_TYPE))
8716 return NULL_TREE;
8717 else
8718 {
8719 tree fn;
8720 const char *p1, *p2;
8721
8722 p2 = c_getstr (s2);
8723 if (p2 == NULL)
8724 return NULL_TREE;
8725
8726 p1 = c_getstr (s1);
8727 if (p1 != NULL)
8728 {
8729 const char *r = strstr (p1, p2);
8730 tree tem;
8731
8732 if (r == NULL)
8733 return build_int_cst (TREE_TYPE (s1), 0);
8734
8735 /* Return an offset into the constant string argument. */
8736 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8737 return fold_convert_loc (loc, type, tem);
8738 }
8739
8740 /* The argument is const char *, and the result is char *, so we need
8741 a type conversion here to avoid a warning. */
8742 if (p2[0] == '\0')
8743 return fold_convert_loc (loc, type, s1);
8744
8745 if (p2[1] != '\0')
8746 return NULL_TREE;
8747
8748 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8749 if (!fn)
8750 return NULL_TREE;
8751
8752 /* New argument list transforming strstr(s1, s2) to
8753 strchr(s1, s2[0]). */
8754 return build_call_expr_loc (loc, fn, 2, s1,
8755 build_int_cst (integer_type_node, p2[0]));
8756 }
8757 }
8758
8759 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8760 to the call, and TYPE is its return type.
8761
8762 Return NULL_TREE if no simplification was possible, otherwise return the
8763 simplified form of the call as a tree.
8764
8765 The simplified form may be a constant or other expression which
8766 computes the same value, but in a more efficient manner (including
8767 calls to other builtin functions).
8768
8769 The call may contain arguments which need to be evaluated, but
8770 which are not useful to determine the result of the call. In
8771 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8772 COMPOUND_EXPR will be an argument which must be evaluated.
8773 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8774 COMPOUND_EXPR in the chain will contain the tree for the simplified
8775 form of the builtin function call. */
8776
8777 static tree
8778 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8779 {
8780 if (!validate_arg (s1, POINTER_TYPE)
8781 || !validate_arg (s2, POINTER_TYPE))
8782 return NULL_TREE;
8783 else
8784 {
8785 tree fn;
8786 const char *p1, *p2;
8787
8788 p2 = c_getstr (s2);
8789 if (p2 == NULL)
8790 return NULL_TREE;
8791
8792 p1 = c_getstr (s1);
8793 if (p1 != NULL)
8794 {
8795 const char *r = strpbrk (p1, p2);
8796 tree tem;
8797
8798 if (r == NULL)
8799 return build_int_cst (TREE_TYPE (s1), 0);
8800
8801 /* Return an offset into the constant string argument. */
8802 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8803 return fold_convert_loc (loc, type, tem);
8804 }
8805
8806 if (p2[0] == '\0')
8807 /* strpbrk(x, "") == NULL.
8808 Evaluate and ignore s1 in case it had side-effects. */
8809 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8810
8811 if (p2[1] != '\0')
8812 return NULL_TREE; /* Really call strpbrk. */
8813
8814 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8815 if (!fn)
8816 return NULL_TREE;
8817
8818 /* New argument list transforming strpbrk(s1, s2) to
8819 strchr(s1, s2[0]). */
8820 return build_call_expr_loc (loc, fn, 2, s1,
8821 build_int_cst (integer_type_node, p2[0]));
8822 }
8823 }
8824
8825 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8826 to the call.
8827
8828 Return NULL_TREE if no simplification was possible, otherwise return the
8829 simplified form of the call as a tree.
8830
8831 The simplified form may be a constant or other expression which
8832 computes the same value, but in a more efficient manner (including
8833 calls to other builtin functions).
8834
8835 The call may contain arguments which need to be evaluated, but
8836 which are not useful to determine the result of the call. In
8837 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8838 COMPOUND_EXPR will be an argument which must be evaluated.
8839 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8840 COMPOUND_EXPR in the chain will contain the tree for the simplified
8841 form of the builtin function call. */
8842
8843 static tree
8844 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8845 {
8846 if (!validate_arg (s1, POINTER_TYPE)
8847 || !validate_arg (s2, POINTER_TYPE))
8848 return NULL_TREE;
8849 else
8850 {
8851 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8852
8853 /* If either argument is "", return NULL_TREE. */
8854 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
8855 /* Evaluate and ignore both arguments in case either one has
8856 side-effects. */
8857 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
8858 s1, s2);
8859 return NULL_TREE;
8860 }
8861 }
8862
8863 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8864 to the call.
8865
8866 Return NULL_TREE if no simplification was possible, otherwise return the
8867 simplified form of the call as a tree.
8868
8869 The simplified form may be a constant or other expression which
8870 computes the same value, but in a more efficient manner (including
8871 calls to other builtin functions).
8872
8873 The call may contain arguments which need to be evaluated, but
8874 which are not useful to determine the result of the call. In
8875 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8876 COMPOUND_EXPR will be an argument which must be evaluated.
8877 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8878 COMPOUND_EXPR in the chain will contain the tree for the simplified
8879 form of the builtin function call. */
8880
8881 static tree
8882 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
8883 {
8884 if (!validate_arg (s1, POINTER_TYPE)
8885 || !validate_arg (s2, POINTER_TYPE))
8886 return NULL_TREE;
8887 else
8888 {
8889 /* If the first argument is "", return NULL_TREE. */
8890 const char *p1 = c_getstr (s1);
8891 if (p1 && *p1 == '\0')
8892 {
8893 /* Evaluate and ignore argument s2 in case it has
8894 side-effects. */
8895 return omit_one_operand_loc (loc, size_type_node,
8896 size_zero_node, s2);
8897 }
8898
8899 /* If the second argument is "", return __builtin_strlen(s1). */
8900 const char *p2 = c_getstr (s2);
8901 if (p2 && *p2 == '\0')
8902 {
8903 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
8904
8905 /* If the replacement _DECL isn't initialized, don't do the
8906 transformation. */
8907 if (!fn)
8908 return NULL_TREE;
8909
8910 return build_call_expr_loc (loc, fn, 1, s1);
8911 }
8912 return NULL_TREE;
8913 }
8914 }
8915
8916 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
8917 produced. False otherwise. This is done so that we don't output the error
8918 or warning twice or three times. */
8919
8920 bool
8921 fold_builtin_next_arg (tree exp, bool va_start_p)
8922 {
8923 tree fntype = TREE_TYPE (current_function_decl);
8924 int nargs = call_expr_nargs (exp);
8925 tree arg;
8926 /* There is good chance the current input_location points inside the
8927 definition of the va_start macro (perhaps on the token for
8928 builtin) in a system header, so warnings will not be emitted.
8929 Use the location in real source code. */
8930 source_location current_location =
8931 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
8932 NULL);
8933
8934 if (!stdarg_p (fntype))
8935 {
8936 error ("%<va_start%> used in function with fixed args");
8937 return true;
8938 }
8939
8940 if (va_start_p)
8941 {
8942 if (va_start_p && (nargs != 2))
8943 {
8944 error ("wrong number of arguments to function %<va_start%>");
8945 return true;
8946 }
8947 arg = CALL_EXPR_ARG (exp, 1);
8948 }
8949 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8950 when we checked the arguments and if needed issued a warning. */
8951 else
8952 {
8953 if (nargs == 0)
8954 {
8955 /* Evidently an out of date version of <stdarg.h>; can't validate
8956 va_start's second argument, but can still work as intended. */
8957 warning_at (current_location,
8958 OPT_Wvarargs,
8959 "%<__builtin_next_arg%> called without an argument");
8960 return true;
8961 }
8962 else if (nargs > 1)
8963 {
8964 error ("wrong number of arguments to function %<__builtin_next_arg%>");
8965 return true;
8966 }
8967 arg = CALL_EXPR_ARG (exp, 0);
8968 }
8969
8970 if (TREE_CODE (arg) == SSA_NAME)
8971 arg = SSA_NAME_VAR (arg);
8972
8973 /* We destructively modify the call to be __builtin_va_start (ap, 0)
8974 or __builtin_next_arg (0) the first time we see it, after checking
8975 the arguments and if needed issuing a warning. */
8976 if (!integer_zerop (arg))
8977 {
8978 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8979
8980 /* Strip off all nops for the sake of the comparison. This
8981 is not quite the same as STRIP_NOPS. It does more.
8982 We must also strip off INDIRECT_EXPR for C++ reference
8983 parameters. */
8984 while (CONVERT_EXPR_P (arg)
8985 || TREE_CODE (arg) == INDIRECT_REF)
8986 arg = TREE_OPERAND (arg, 0);
8987 if (arg != last_parm)
8988 {
8989 /* FIXME: Sometimes with the tree optimizers we can get the
8990 not the last argument even though the user used the last
8991 argument. We just warn and set the arg to be the last
8992 argument so that we will get wrong-code because of
8993 it. */
8994 warning_at (current_location,
8995 OPT_Wvarargs,
8996 "second parameter of %<va_start%> not last named argument");
8997 }
8998
8999 /* Undefined by C99 7.15.1.4p4 (va_start):
9000 "If the parameter parmN is declared with the register storage
9001 class, with a function or array type, or with a type that is
9002 not compatible with the type that results after application of
9003 the default argument promotions, the behavior is undefined."
9004 */
9005 else if (DECL_REGISTER (arg))
9006 {
9007 warning_at (current_location,
9008 OPT_Wvarargs,
9009 "undefined behavior when second parameter of "
9010 "%<va_start%> is declared with %<register%> storage");
9011 }
9012
9013 /* We want to verify the second parameter just once before the tree
9014 optimizers are run and then avoid keeping it in the tree,
9015 as otherwise we could warn even for correct code like:
9016 void foo (int i, ...)
9017 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9018 if (va_start_p)
9019 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9020 else
9021 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9022 }
9023 return false;
9024 }
9025
9026
9027 /* Expand a call EXP to __builtin_object_size. */
9028
9029 static rtx
9030 expand_builtin_object_size (tree exp)
9031 {
9032 tree ost;
9033 int object_size_type;
9034 tree fndecl = get_callee_fndecl (exp);
9035
9036 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9037 {
9038 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9039 exp, fndecl);
9040 expand_builtin_trap ();
9041 return const0_rtx;
9042 }
9043
9044 ost = CALL_EXPR_ARG (exp, 1);
9045 STRIP_NOPS (ost);
9046
9047 if (TREE_CODE (ost) != INTEGER_CST
9048 || tree_int_cst_sgn (ost) < 0
9049 || compare_tree_int (ost, 3) > 0)
9050 {
9051 error ("%Klast argument of %D is not integer constant between 0 and 3",
9052 exp, fndecl);
9053 expand_builtin_trap ();
9054 return const0_rtx;
9055 }
9056
9057 object_size_type = tree_to_shwi (ost);
9058
9059 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9060 }
9061
9062 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9063 FCODE is the BUILT_IN_* to use.
9064 Return NULL_RTX if we failed; the caller should emit a normal call,
9065 otherwise try to get the result in TARGET, if convenient (and in
9066 mode MODE if that's convenient). */
9067
9068 static rtx
9069 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9070 enum built_in_function fcode)
9071 {
9072 tree dest, src, len, size;
9073
9074 if (!validate_arglist (exp,
9075 POINTER_TYPE,
9076 fcode == BUILT_IN_MEMSET_CHK
9077 ? INTEGER_TYPE : POINTER_TYPE,
9078 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9079 return NULL_RTX;
9080
9081 dest = CALL_EXPR_ARG (exp, 0);
9082 src = CALL_EXPR_ARG (exp, 1);
9083 len = CALL_EXPR_ARG (exp, 2);
9084 size = CALL_EXPR_ARG (exp, 3);
9085
9086 if (! tree_fits_uhwi_p (size))
9087 return NULL_RTX;
9088
9089 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9090 {
9091 tree fn;
9092
9093 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9094 {
9095 warning_at (tree_nonartificial_location (exp),
9096 0, "%Kcall to %D will always overflow destination buffer",
9097 exp, get_callee_fndecl (exp));
9098 return NULL_RTX;
9099 }
9100
9101 fn = NULL_TREE;
9102 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9103 mem{cpy,pcpy,move,set} is available. */
9104 switch (fcode)
9105 {
9106 case BUILT_IN_MEMCPY_CHK:
9107 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9108 break;
9109 case BUILT_IN_MEMPCPY_CHK:
9110 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9111 break;
9112 case BUILT_IN_MEMMOVE_CHK:
9113 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9114 break;
9115 case BUILT_IN_MEMSET_CHK:
9116 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9117 break;
9118 default:
9119 break;
9120 }
9121
9122 if (! fn)
9123 return NULL_RTX;
9124
9125 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9126 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9127 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9128 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9129 }
9130 else if (fcode == BUILT_IN_MEMSET_CHK)
9131 return NULL_RTX;
9132 else
9133 {
9134 unsigned int dest_align = get_pointer_alignment (dest);
9135
9136 /* If DEST is not a pointer type, call the normal function. */
9137 if (dest_align == 0)
9138 return NULL_RTX;
9139
9140 /* If SRC and DEST are the same (and not volatile), do nothing. */
9141 if (operand_equal_p (src, dest, 0))
9142 {
9143 tree expr;
9144
9145 if (fcode != BUILT_IN_MEMPCPY_CHK)
9146 {
9147 /* Evaluate and ignore LEN in case it has side-effects. */
9148 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9149 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9150 }
9151
9152 expr = fold_build_pointer_plus (dest, len);
9153 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9154 }
9155
9156 /* __memmove_chk special case. */
9157 if (fcode == BUILT_IN_MEMMOVE_CHK)
9158 {
9159 unsigned int src_align = get_pointer_alignment (src);
9160
9161 if (src_align == 0)
9162 return NULL_RTX;
9163
9164 /* If src is categorized for a readonly section we can use
9165 normal __memcpy_chk. */
9166 if (readonly_data_expr (src))
9167 {
9168 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9169 if (!fn)
9170 return NULL_RTX;
9171 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9172 dest, src, len, size);
9173 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9174 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9175 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9176 }
9177 }
9178 return NULL_RTX;
9179 }
9180 }
9181
9182 /* Emit warning if a buffer overflow is detected at compile time. */
9183
9184 static void
9185 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9186 {
9187 int is_strlen = 0;
9188 tree len, size;
9189 location_t loc = tree_nonartificial_location (exp);
9190
9191 switch (fcode)
9192 {
9193 case BUILT_IN_STRCPY_CHK:
9194 case BUILT_IN_STPCPY_CHK:
9195 /* For __strcat_chk the warning will be emitted only if overflowing
9196 by at least strlen (dest) + 1 bytes. */
9197 case BUILT_IN_STRCAT_CHK:
9198 len = CALL_EXPR_ARG (exp, 1);
9199 size = CALL_EXPR_ARG (exp, 2);
9200 is_strlen = 1;
9201 break;
9202 case BUILT_IN_STRNCAT_CHK:
9203 case BUILT_IN_STRNCPY_CHK:
9204 case BUILT_IN_STPNCPY_CHK:
9205 len = CALL_EXPR_ARG (exp, 2);
9206 size = CALL_EXPR_ARG (exp, 3);
9207 break;
9208 case BUILT_IN_SNPRINTF_CHK:
9209 case BUILT_IN_VSNPRINTF_CHK:
9210 len = CALL_EXPR_ARG (exp, 1);
9211 size = CALL_EXPR_ARG (exp, 3);
9212 break;
9213 default:
9214 gcc_unreachable ();
9215 }
9216
9217 if (!len || !size)
9218 return;
9219
9220 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9221 return;
9222
9223 if (is_strlen)
9224 {
9225 len = c_strlen (len, 1);
9226 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9227 return;
9228 }
9229 else if (fcode == BUILT_IN_STRNCAT_CHK)
9230 {
9231 tree src = CALL_EXPR_ARG (exp, 1);
9232 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9233 return;
9234 src = c_strlen (src, 1);
9235 if (! src || ! tree_fits_uhwi_p (src))
9236 {
9237 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9238 exp, get_callee_fndecl (exp));
9239 return;
9240 }
9241 else if (tree_int_cst_lt (src, size))
9242 return;
9243 }
9244 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9245 return;
9246
9247 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9248 exp, get_callee_fndecl (exp));
9249 }
9250
9251 /* Emit warning if a buffer overflow is detected at compile time
9252 in __sprintf_chk/__vsprintf_chk calls. */
9253
9254 static void
9255 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9256 {
9257 tree size, len, fmt;
9258 const char *fmt_str;
9259 int nargs = call_expr_nargs (exp);
9260
9261 /* Verify the required arguments in the original call. */
9262
9263 if (nargs < 4)
9264 return;
9265 size = CALL_EXPR_ARG (exp, 2);
9266 fmt = CALL_EXPR_ARG (exp, 3);
9267
9268 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9269 return;
9270
9271 /* Check whether the format is a literal string constant. */
9272 fmt_str = c_getstr (fmt);
9273 if (fmt_str == NULL)
9274 return;
9275
9276 if (!init_target_chars ())
9277 return;
9278
9279 /* If the format doesn't contain % args or %%, we know its size. */
9280 if (strchr (fmt_str, target_percent) == 0)
9281 len = build_int_cstu (size_type_node, strlen (fmt_str));
9282 /* If the format is "%s" and first ... argument is a string literal,
9283 we know it too. */
9284 else if (fcode == BUILT_IN_SPRINTF_CHK
9285 && strcmp (fmt_str, target_percent_s) == 0)
9286 {
9287 tree arg;
9288
9289 if (nargs < 5)
9290 return;
9291 arg = CALL_EXPR_ARG (exp, 4);
9292 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9293 return;
9294
9295 len = c_strlen (arg, 1);
9296 if (!len || ! tree_fits_uhwi_p (len))
9297 return;
9298 }
9299 else
9300 return;
9301
9302 if (! tree_int_cst_lt (len, size))
9303 warning_at (tree_nonartificial_location (exp),
9304 0, "%Kcall to %D will always overflow destination buffer",
9305 exp, get_callee_fndecl (exp));
9306 }
9307
9308 /* Emit warning if a free is called with address of a variable. */
9309
9310 static void
9311 maybe_emit_free_warning (tree exp)
9312 {
9313 tree arg = CALL_EXPR_ARG (exp, 0);
9314
9315 STRIP_NOPS (arg);
9316 if (TREE_CODE (arg) != ADDR_EXPR)
9317 return;
9318
9319 arg = get_base_address (TREE_OPERAND (arg, 0));
9320 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9321 return;
9322
9323 if (SSA_VAR_P (arg))
9324 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9325 "%Kattempt to free a non-heap object %qD", exp, arg);
9326 else
9327 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9328 "%Kattempt to free a non-heap object", exp);
9329 }
9330
9331 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9332 if possible. */
9333
9334 static tree
9335 fold_builtin_object_size (tree ptr, tree ost)
9336 {
9337 unsigned HOST_WIDE_INT bytes;
9338 int object_size_type;
9339
9340 if (!validate_arg (ptr, POINTER_TYPE)
9341 || !validate_arg (ost, INTEGER_TYPE))
9342 return NULL_TREE;
9343
9344 STRIP_NOPS (ost);
9345
9346 if (TREE_CODE (ost) != INTEGER_CST
9347 || tree_int_cst_sgn (ost) < 0
9348 || compare_tree_int (ost, 3) > 0)
9349 return NULL_TREE;
9350
9351 object_size_type = tree_to_shwi (ost);
9352
9353 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9354 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9355 and (size_t) 0 for types 2 and 3. */
9356 if (TREE_SIDE_EFFECTS (ptr))
9357 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9358
9359 if (TREE_CODE (ptr) == ADDR_EXPR)
9360 {
9361 compute_builtin_object_size (ptr, object_size_type, &bytes);
9362 if (wi::fits_to_tree_p (bytes, size_type_node))
9363 return build_int_cstu (size_type_node, bytes);
9364 }
9365 else if (TREE_CODE (ptr) == SSA_NAME)
9366 {
9367 /* If object size is not known yet, delay folding until
9368 later. Maybe subsequent passes will help determining
9369 it. */
9370 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9371 && wi::fits_to_tree_p (bytes, size_type_node))
9372 return build_int_cstu (size_type_node, bytes);
9373 }
9374
9375 return NULL_TREE;
9376 }
9377
9378 /* Builtins with folding operations that operate on "..." arguments
9379 need special handling; we need to store the arguments in a convenient
9380 data structure before attempting any folding. Fortunately there are
9381 only a few builtins that fall into this category. FNDECL is the
9382 function, EXP is the CALL_EXPR for the call. */
9383
9384 static tree
9385 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9386 {
9387 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9388 tree ret = NULL_TREE;
9389
9390 switch (fcode)
9391 {
9392 case BUILT_IN_FPCLASSIFY:
9393 ret = fold_builtin_fpclassify (loc, args, nargs);
9394 break;
9395
9396 default:
9397 break;
9398 }
9399 if (ret)
9400 {
9401 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9402 SET_EXPR_LOCATION (ret, loc);
9403 TREE_NO_WARNING (ret) = 1;
9404 return ret;
9405 }
9406 return NULL_TREE;
9407 }
9408
9409 /* Initialize format string characters in the target charset. */
9410
9411 bool
9412 init_target_chars (void)
9413 {
9414 static bool init;
9415 if (!init)
9416 {
9417 target_newline = lang_hooks.to_target_charset ('\n');
9418 target_percent = lang_hooks.to_target_charset ('%');
9419 target_c = lang_hooks.to_target_charset ('c');
9420 target_s = lang_hooks.to_target_charset ('s');
9421 if (target_newline == 0 || target_percent == 0 || target_c == 0
9422 || target_s == 0)
9423 return false;
9424
9425 target_percent_c[0] = target_percent;
9426 target_percent_c[1] = target_c;
9427 target_percent_c[2] = '\0';
9428
9429 target_percent_s[0] = target_percent;
9430 target_percent_s[1] = target_s;
9431 target_percent_s[2] = '\0';
9432
9433 target_percent_s_newline[0] = target_percent;
9434 target_percent_s_newline[1] = target_s;
9435 target_percent_s_newline[2] = target_newline;
9436 target_percent_s_newline[3] = '\0';
9437
9438 init = true;
9439 }
9440 return true;
9441 }
9442
9443 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9444 and no overflow/underflow occurred. INEXACT is true if M was not
9445 exactly calculated. TYPE is the tree type for the result. This
9446 function assumes that you cleared the MPFR flags and then
9447 calculated M to see if anything subsequently set a flag prior to
9448 entering this function. Return NULL_TREE if any checks fail. */
9449
9450 static tree
9451 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9452 {
9453 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9454 overflow/underflow occurred. If -frounding-math, proceed iff the
9455 result of calling FUNC was exact. */
9456 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9457 && (!flag_rounding_math || !inexact))
9458 {
9459 REAL_VALUE_TYPE rr;
9460
9461 real_from_mpfr (&rr, m, type, GMP_RNDN);
9462 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9463 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9464 but the mpft_t is not, then we underflowed in the
9465 conversion. */
9466 if (real_isfinite (&rr)
9467 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9468 {
9469 REAL_VALUE_TYPE rmode;
9470
9471 real_convert (&rmode, TYPE_MODE (type), &rr);
9472 /* Proceed iff the specified mode can hold the value. */
9473 if (real_identical (&rmode, &rr))
9474 return build_real (type, rmode);
9475 }
9476 }
9477 return NULL_TREE;
9478 }
9479
9480 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9481 number and no overflow/underflow occurred. INEXACT is true if M
9482 was not exactly calculated. TYPE is the tree type for the result.
9483 This function assumes that you cleared the MPFR flags and then
9484 calculated M to see if anything subsequently set a flag prior to
9485 entering this function. Return NULL_TREE if any checks fail, if
9486 FORCE_CONVERT is true, then bypass the checks. */
9487
9488 static tree
9489 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9490 {
9491 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9492 overflow/underflow occurred. If -frounding-math, proceed iff the
9493 result of calling FUNC was exact. */
9494 if (force_convert
9495 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9496 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9497 && (!flag_rounding_math || !inexact)))
9498 {
9499 REAL_VALUE_TYPE re, im;
9500
9501 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9502 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9503 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9504 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9505 but the mpft_t is not, then we underflowed in the
9506 conversion. */
9507 if (force_convert
9508 || (real_isfinite (&re) && real_isfinite (&im)
9509 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9510 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9511 {
9512 REAL_VALUE_TYPE re_mode, im_mode;
9513
9514 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9515 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9516 /* Proceed iff the specified mode can hold the value. */
9517 if (force_convert
9518 || (real_identical (&re_mode, &re)
9519 && real_identical (&im_mode, &im)))
9520 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9521 build_real (TREE_TYPE (type), im_mode));
9522 }
9523 }
9524 return NULL_TREE;
9525 }
9526
9527 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9528 the pointer *(ARG_QUO) and return the result. The type is taken
9529 from the type of ARG0 and is used for setting the precision of the
9530 calculation and results. */
9531
9532 static tree
9533 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9534 {
9535 tree const type = TREE_TYPE (arg0);
9536 tree result = NULL_TREE;
9537
9538 STRIP_NOPS (arg0);
9539 STRIP_NOPS (arg1);
9540
9541 /* To proceed, MPFR must exactly represent the target floating point
9542 format, which only happens when the target base equals two. */
9543 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9544 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9545 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9546 {
9547 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9548 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9549
9550 if (real_isfinite (ra0) && real_isfinite (ra1))
9551 {
9552 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9553 const int prec = fmt->p;
9554 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9555 tree result_rem;
9556 long integer_quo;
9557 mpfr_t m0, m1;
9558
9559 mpfr_inits2 (prec, m0, m1, NULL);
9560 mpfr_from_real (m0, ra0, GMP_RNDN);
9561 mpfr_from_real (m1, ra1, GMP_RNDN);
9562 mpfr_clear_flags ();
9563 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9564 /* Remquo is independent of the rounding mode, so pass
9565 inexact=0 to do_mpfr_ckconv(). */
9566 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9567 mpfr_clears (m0, m1, NULL);
9568 if (result_rem)
9569 {
9570 /* MPFR calculates quo in the host's long so it may
9571 return more bits in quo than the target int can hold
9572 if sizeof(host long) > sizeof(target int). This can
9573 happen even for native compilers in LP64 mode. In
9574 these cases, modulo the quo value with the largest
9575 number that the target int can hold while leaving one
9576 bit for the sign. */
9577 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9578 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9579
9580 /* Dereference the quo pointer argument. */
9581 arg_quo = build_fold_indirect_ref (arg_quo);
9582 /* Proceed iff a valid pointer type was passed in. */
9583 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9584 {
9585 /* Set the value. */
9586 tree result_quo
9587 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9588 build_int_cst (TREE_TYPE (arg_quo),
9589 integer_quo));
9590 TREE_SIDE_EFFECTS (result_quo) = 1;
9591 /* Combine the quo assignment with the rem. */
9592 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9593 result_quo, result_rem));
9594 }
9595 }
9596 }
9597 }
9598 return result;
9599 }
9600
9601 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9602 resulting value as a tree with type TYPE. The mpfr precision is
9603 set to the precision of TYPE. We assume that this mpfr function
9604 returns zero if the result could be calculated exactly within the
9605 requested precision. In addition, the integer pointer represented
9606 by ARG_SG will be dereferenced and set to the appropriate signgam
9607 (-1,1) value. */
9608
9609 static tree
9610 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9611 {
9612 tree result = NULL_TREE;
9613
9614 STRIP_NOPS (arg);
9615
9616 /* To proceed, MPFR must exactly represent the target floating point
9617 format, which only happens when the target base equals two. Also
9618 verify ARG is a constant and that ARG_SG is an int pointer. */
9619 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9620 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9621 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9622 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9623 {
9624 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9625
9626 /* In addition to NaN and Inf, the argument cannot be zero or a
9627 negative integer. */
9628 if (real_isfinite (ra)
9629 && ra->cl != rvc_zero
9630 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9631 {
9632 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9633 const int prec = fmt->p;
9634 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9635 int inexact, sg;
9636 mpfr_t m;
9637 tree result_lg;
9638
9639 mpfr_init2 (m, prec);
9640 mpfr_from_real (m, ra, GMP_RNDN);
9641 mpfr_clear_flags ();
9642 inexact = mpfr_lgamma (m, &sg, m, rnd);
9643 result_lg = do_mpfr_ckconv (m, type, inexact);
9644 mpfr_clear (m);
9645 if (result_lg)
9646 {
9647 tree result_sg;
9648
9649 /* Dereference the arg_sg pointer argument. */
9650 arg_sg = build_fold_indirect_ref (arg_sg);
9651 /* Assign the signgam value into *arg_sg. */
9652 result_sg = fold_build2 (MODIFY_EXPR,
9653 TREE_TYPE (arg_sg), arg_sg,
9654 build_int_cst (TREE_TYPE (arg_sg), sg));
9655 TREE_SIDE_EFFECTS (result_sg) = 1;
9656 /* Combine the signgam assignment with the lgamma result. */
9657 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9658 result_sg, result_lg));
9659 }
9660 }
9661 }
9662
9663 return result;
9664 }
9665
9666 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9667 mpc function FUNC on it and return the resulting value as a tree
9668 with type TYPE. The mpfr precision is set to the precision of
9669 TYPE. We assume that function FUNC returns zero if the result
9670 could be calculated exactly within the requested precision. If
9671 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9672 in the arguments and/or results. */
9673
9674 tree
9675 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9676 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9677 {
9678 tree result = NULL_TREE;
9679
9680 STRIP_NOPS (arg0);
9681 STRIP_NOPS (arg1);
9682
9683 /* To proceed, MPFR must exactly represent the target floating point
9684 format, which only happens when the target base equals two. */
9685 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9686 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9687 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9689 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9690 {
9691 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9692 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9693 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9694 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9695
9696 if (do_nonfinite
9697 || (real_isfinite (re0) && real_isfinite (im0)
9698 && real_isfinite (re1) && real_isfinite (im1)))
9699 {
9700 const struct real_format *const fmt =
9701 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9702 const int prec = fmt->p;
9703 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9704 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9705 int inexact;
9706 mpc_t m0, m1;
9707
9708 mpc_init2 (m0, prec);
9709 mpc_init2 (m1, prec);
9710 mpfr_from_real (mpc_realref (m0), re0, rnd);
9711 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9712 mpfr_from_real (mpc_realref (m1), re1, rnd);
9713 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9714 mpfr_clear_flags ();
9715 inexact = func (m0, m0, m1, crnd);
9716 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9717 mpc_clear (m0);
9718 mpc_clear (m1);
9719 }
9720 }
9721
9722 return result;
9723 }
9724
9725 /* A wrapper function for builtin folding that prevents warnings for
9726 "statement without effect" and the like, caused by removing the
9727 call node earlier than the warning is generated. */
9728
9729 tree
9730 fold_call_stmt (gcall *stmt, bool ignore)
9731 {
9732 tree ret = NULL_TREE;
9733 tree fndecl = gimple_call_fndecl (stmt);
9734 location_t loc = gimple_location (stmt);
9735 if (fndecl
9736 && TREE_CODE (fndecl) == FUNCTION_DECL
9737 && DECL_BUILT_IN (fndecl)
9738 && !gimple_call_va_arg_pack_p (stmt))
9739 {
9740 int nargs = gimple_call_num_args (stmt);
9741 tree *args = (nargs > 0
9742 ? gimple_call_arg_ptr (stmt, 0)
9743 : &error_mark_node);
9744
9745 if (avoid_folding_inline_builtin (fndecl))
9746 return NULL_TREE;
9747 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9748 {
9749 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9750 }
9751 else
9752 {
9753 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9754 if (ret)
9755 {
9756 /* Propagate location information from original call to
9757 expansion of builtin. Otherwise things like
9758 maybe_emit_chk_warning, that operate on the expansion
9759 of a builtin, will use the wrong location information. */
9760 if (gimple_has_location (stmt))
9761 {
9762 tree realret = ret;
9763 if (TREE_CODE (ret) == NOP_EXPR)
9764 realret = TREE_OPERAND (ret, 0);
9765 if (CAN_HAVE_LOCATION_P (realret)
9766 && !EXPR_HAS_LOCATION (realret))
9767 SET_EXPR_LOCATION (realret, loc);
9768 return realret;
9769 }
9770 return ret;
9771 }
9772 }
9773 }
9774 return NULL_TREE;
9775 }
9776
9777 /* Look up the function in builtin_decl that corresponds to DECL
9778 and set ASMSPEC as its user assembler name. DECL must be a
9779 function decl that declares a builtin. */
9780
9781 void
9782 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9783 {
9784 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9785 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9786 && asmspec != 0);
9787
9788 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9789 set_user_assembler_name (builtin, asmspec);
9790
9791 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
9792 && INT_TYPE_SIZE < BITS_PER_WORD)
9793 {
9794 set_user_assembler_libfunc ("ffs", asmspec);
9795 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
9796 "ffs");
9797 }
9798 }
9799
9800 /* Return true if DECL is a builtin that expands to a constant or similarly
9801 simple code. */
9802 bool
9803 is_simple_builtin (tree decl)
9804 {
9805 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9806 switch (DECL_FUNCTION_CODE (decl))
9807 {
9808 /* Builtins that expand to constants. */
9809 case BUILT_IN_CONSTANT_P:
9810 case BUILT_IN_EXPECT:
9811 case BUILT_IN_OBJECT_SIZE:
9812 case BUILT_IN_UNREACHABLE:
9813 /* Simple register moves or loads from stack. */
9814 case BUILT_IN_ASSUME_ALIGNED:
9815 case BUILT_IN_RETURN_ADDRESS:
9816 case BUILT_IN_EXTRACT_RETURN_ADDR:
9817 case BUILT_IN_FROB_RETURN_ADDR:
9818 case BUILT_IN_RETURN:
9819 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9820 case BUILT_IN_FRAME_ADDRESS:
9821 case BUILT_IN_VA_END:
9822 case BUILT_IN_STACK_SAVE:
9823 case BUILT_IN_STACK_RESTORE:
9824 /* Exception state returns or moves registers around. */
9825 case BUILT_IN_EH_FILTER:
9826 case BUILT_IN_EH_POINTER:
9827 case BUILT_IN_EH_COPY_VALUES:
9828 return true;
9829
9830 default:
9831 return false;
9832 }
9833
9834 return false;
9835 }
9836
9837 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9838 most probably expanded inline into reasonably simple code. This is a
9839 superset of is_simple_builtin. */
9840 bool
9841 is_inexpensive_builtin (tree decl)
9842 {
9843 if (!decl)
9844 return false;
9845 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
9846 return true;
9847 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9848 switch (DECL_FUNCTION_CODE (decl))
9849 {
9850 case BUILT_IN_ABS:
9851 case BUILT_IN_ALLOCA:
9852 case BUILT_IN_ALLOCA_WITH_ALIGN:
9853 case BUILT_IN_BSWAP16:
9854 case BUILT_IN_BSWAP32:
9855 case BUILT_IN_BSWAP64:
9856 case BUILT_IN_CLZ:
9857 case BUILT_IN_CLZIMAX:
9858 case BUILT_IN_CLZL:
9859 case BUILT_IN_CLZLL:
9860 case BUILT_IN_CTZ:
9861 case BUILT_IN_CTZIMAX:
9862 case BUILT_IN_CTZL:
9863 case BUILT_IN_CTZLL:
9864 case BUILT_IN_FFS:
9865 case BUILT_IN_FFSIMAX:
9866 case BUILT_IN_FFSL:
9867 case BUILT_IN_FFSLL:
9868 case BUILT_IN_IMAXABS:
9869 case BUILT_IN_FINITE:
9870 case BUILT_IN_FINITEF:
9871 case BUILT_IN_FINITEL:
9872 case BUILT_IN_FINITED32:
9873 case BUILT_IN_FINITED64:
9874 case BUILT_IN_FINITED128:
9875 case BUILT_IN_FPCLASSIFY:
9876 case BUILT_IN_ISFINITE:
9877 case BUILT_IN_ISINF_SIGN:
9878 case BUILT_IN_ISINF:
9879 case BUILT_IN_ISINFF:
9880 case BUILT_IN_ISINFL:
9881 case BUILT_IN_ISINFD32:
9882 case BUILT_IN_ISINFD64:
9883 case BUILT_IN_ISINFD128:
9884 case BUILT_IN_ISNAN:
9885 case BUILT_IN_ISNANF:
9886 case BUILT_IN_ISNANL:
9887 case BUILT_IN_ISNAND32:
9888 case BUILT_IN_ISNAND64:
9889 case BUILT_IN_ISNAND128:
9890 case BUILT_IN_ISNORMAL:
9891 case BUILT_IN_ISGREATER:
9892 case BUILT_IN_ISGREATEREQUAL:
9893 case BUILT_IN_ISLESS:
9894 case BUILT_IN_ISLESSEQUAL:
9895 case BUILT_IN_ISLESSGREATER:
9896 case BUILT_IN_ISUNORDERED:
9897 case BUILT_IN_VA_ARG_PACK:
9898 case BUILT_IN_VA_ARG_PACK_LEN:
9899 case BUILT_IN_VA_COPY:
9900 case BUILT_IN_TRAP:
9901 case BUILT_IN_SAVEREGS:
9902 case BUILT_IN_POPCOUNTL:
9903 case BUILT_IN_POPCOUNTLL:
9904 case BUILT_IN_POPCOUNTIMAX:
9905 case BUILT_IN_POPCOUNT:
9906 case BUILT_IN_PARITYL:
9907 case BUILT_IN_PARITYLL:
9908 case BUILT_IN_PARITYIMAX:
9909 case BUILT_IN_PARITY:
9910 case BUILT_IN_LABS:
9911 case BUILT_IN_LLABS:
9912 case BUILT_IN_PREFETCH:
9913 case BUILT_IN_ACC_ON_DEVICE:
9914 return true;
9915
9916 default:
9917 return is_simple_builtin (decl);
9918 }
9919
9920 return false;
9921 }
9922
9923 /* Return true if T is a constant and the value cast to a target char
9924 can be represented by a host char.
9925 Store the casted char constant in *P if so. */
9926
9927 bool
9928 target_char_cst_p (tree t, char *p)
9929 {
9930 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
9931 return false;
9932
9933 *p = (char)tree_to_uhwi (t);
9934 return true;
9935 }