Add internal bitcount functions
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "fold-const-call.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "tree-object-size.h"
48 #include "realmpfr.h"
49 #include "cfgrtl.h"
50 #include "except.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "libfuncs.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "langhooks.h"
59 #include "value-prof.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "tree-chkp.h"
64 #include "rtl-chkp.h"
65 #include "internal-fn.h"
66
67
68 struct target_builtins default_target_builtins;
69 #if SWITCHABLE_TARGET
70 struct target_builtins *this_target_builtins = &default_target_builtins;
71 #endif
72
73 /* Define the names of the builtin function types and codes. */
74 const char *const built_in_class_names[BUILT_IN_LAST]
75 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
76
77 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
78 const char * built_in_names[(int) END_BUILTINS] =
79 {
80 #include "builtins.def"
81 };
82
83 /* Setup an array of builtin_info_type, make sure each element decl is
84 initialized to NULL_TREE. */
85 builtin_info_type builtin_info[(int)END_BUILTINS];
86
87 /* Non-zero if __builtin_constant_p should be folded right away. */
88 bool force_folding_builtin_constant_p;
89
90 static rtx c_readstr (const char *, machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 static rtx result_vector (int, rtx);
96 static void expand_builtin_prefetch (tree);
97 static rtx expand_builtin_apply_args (void);
98 static rtx expand_builtin_apply_args_1 (void);
99 static rtx expand_builtin_apply (rtx, rtx, rtx);
100 static void expand_builtin_return (rtx);
101 static enum type_class type_to_class (tree);
102 static rtx expand_builtin_classify_type (tree);
103 static void expand_errno_check (tree, rtx);
104 static rtx expand_builtin_mathfn (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
108 static rtx expand_builtin_interclass_mathfn (tree, rtx);
109 static rtx expand_builtin_sincos (tree);
110 static rtx expand_builtin_cexpi (tree, rtx);
111 static rtx expand_builtin_int_roundingfn (tree, rtx);
112 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
113 static rtx expand_builtin_next_arg (void);
114 static rtx expand_builtin_va_start (tree);
115 static rtx expand_builtin_va_end (tree);
116 static rtx expand_builtin_va_copy (tree);
117 static rtx expand_builtin_strcmp (tree, rtx);
118 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
120 static rtx expand_builtin_memcpy (tree, rtx);
121 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
122 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
123 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 machine_mode, int, tree);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_strchr (location_t, tree, tree, tree);
151 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
153 static tree fold_builtin_strcmp (location_t, tree, tree);
154 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
155 static tree fold_builtin_isascii (location_t, tree);
156 static tree fold_builtin_toascii (location_t, tree);
157 static tree fold_builtin_isdigit (location_t, tree);
158 static tree fold_builtin_fabs (location_t, tree, tree);
159 static tree fold_builtin_abs (location_t, tree, tree);
160 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
161 enum tree_code);
162 static tree fold_builtin_0 (location_t, tree);
163 static tree fold_builtin_1 (location_t, tree, tree);
164 static tree fold_builtin_2 (location_t, tree, tree, tree);
165 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_varargs (location_t, tree, tree*, int);
167
168 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
169 static tree fold_builtin_strstr (location_t, tree, tree, tree);
170 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
173
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
181
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
192
193 /* Return true if NAME starts with __builtin_ or __sync_. */
194
195 static bool
196 is_builtin_name (const char *name)
197 {
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 if (flag_cilkplus
205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
208 return false;
209 }
210
211
212 /* Return true if DECL is a function symbol representing a built-in. */
213
214 bool
215 is_builtin_fn (tree decl)
216 {
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218 }
219
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
223
224 static bool
225 called_as_built_in (tree node)
226 {
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
232 }
233
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
238
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
245
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
248
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 {
253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
259
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep, true);
264
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
268 {
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
275 }
276 else if (TREE_CODE (exp) == LABEL_DECL)
277 ;
278 else if (TREE_CODE (exp) == CONST_DECL)
279 {
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
285
286 known_alignment = true;
287 }
288 else if (DECL_P (exp))
289 {
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
292 }
293 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
294 {
295 align = TYPE_ALIGN (TREE_TYPE (exp));
296 }
297 else if (TREE_CODE (exp) == INDIRECT_REF
298 || TREE_CODE (exp) == MEM_REF
299 || TREE_CODE (exp) == TARGET_MEM_REF)
300 {
301 tree addr = TREE_OPERAND (exp, 0);
302 unsigned ptr_align;
303 unsigned HOST_WIDE_INT ptr_bitpos;
304 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
305
306 /* If the address is explicitely aligned, handle that. */
307 if (TREE_CODE (addr) == BIT_AND_EXPR
308 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
309 {
310 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
311 ptr_bitmask *= BITS_PER_UNIT;
312 align = ptr_bitmask & -ptr_bitmask;
313 addr = TREE_OPERAND (addr, 0);
314 }
315
316 known_alignment
317 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
318 align = MAX (ptr_align, align);
319
320 /* Re-apply explicit alignment to the bitpos. */
321 ptr_bitpos &= ptr_bitmask;
322
323 /* The alignment of the pointer operand in a TARGET_MEM_REF
324 has to take the variable offset parts into account. */
325 if (TREE_CODE (exp) == TARGET_MEM_REF)
326 {
327 if (TMR_INDEX (exp))
328 {
329 unsigned HOST_WIDE_INT step = 1;
330 if (TMR_STEP (exp))
331 step = TREE_INT_CST_LOW (TMR_STEP (exp));
332 align = MIN (align, (step & -step) * BITS_PER_UNIT);
333 }
334 if (TMR_INDEX2 (exp))
335 align = BITS_PER_UNIT;
336 known_alignment = false;
337 }
338
339 /* When EXP is an actual memory reference then we can use
340 TYPE_ALIGN of a pointer indirection to derive alignment.
341 Do so only if get_pointer_alignment_1 did not reveal absolute
342 alignment knowledge and if using that alignment would
343 improve the situation. */
344 if (!addr_p && !known_alignment
345 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
346 align = TYPE_ALIGN (TREE_TYPE (exp));
347 else
348 {
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
354 }
355 }
356 else if (TREE_CODE (exp) == STRING_CST)
357 {
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
361 if (CONSTANT_CLASS_P (exp))
362 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
363
364 known_alignment = true;
365 }
366
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 if (offset)
370 {
371 unsigned int trailing_zeros = tree_ctz (offset);
372 if (trailing_zeros < HOST_BITS_PER_INT)
373 {
374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
377 }
378 }
379
380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
382 return known_alignment;
383 }
384
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
393 {
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 }
396
397 /* Return the alignment in bits of EXP, an object. */
398
399 unsigned int
400 get_object_alignment (tree exp)
401 {
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
404
405 get_object_alignment_1 (exp, &align, &bitpos);
406
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
409
410 if (bitpos != 0)
411 align = (bitpos & -bitpos);
412 return align;
413 }
414
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
419
420 If EXP is not a pointer, false is returned too. */
421
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
425 {
426 STRIP_NOPS (exp);
427
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 {
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
440 {
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
443 {
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
447 }
448 }
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
452 }
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 {
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 {
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* We cannot really tell whether this result is an approximation. */
464 return true;
465 }
466 else
467 {
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
471 }
472 }
473 else if (TREE_CODE (exp) == INTEGER_CST)
474 {
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
479 }
480
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
484 }
485
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
492
493 unsigned int
494 get_pointer_alignment (tree exp)
495 {
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
498
499 get_pointer_alignment_1 (exp, &align, &bitpos);
500
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
503
504 if (bitpos != 0)
505 align = (bitpos & -bitpos);
506
507 return align;
508 }
509
510 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
511 way, because it could contain a zero byte in the middle.
512 TREE_STRING_LENGTH is the size of the character array, not the string.
513
514 ONLY_VALUE should be nonzero if the result is not going to be emitted
515 into the instruction stream and zero if it is going to be expanded.
516 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
517 is returned, otherwise NULL, since
518 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
519 evaluate the side-effects.
520
521 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
522 accesses. Note that this implies the result is not going to be emitted
523 into the instruction stream.
524
525 The value returned is of type `ssizetype'.
526
527 Unfortunately, string_constant can't access the values of const char
528 arrays with initializers, so neither can we do so here. */
529
530 tree
531 c_strlen (tree src, int only_value)
532 {
533 tree offset_node;
534 HOST_WIDE_INT offset;
535 int max;
536 const char *ptr;
537 location_t loc;
538
539 STRIP_NOPS (src);
540 if (TREE_CODE (src) == COND_EXPR
541 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
542 {
543 tree len1, len2;
544
545 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
546 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
547 if (tree_int_cst_equal (len1, len2))
548 return len1;
549 }
550
551 if (TREE_CODE (src) == COMPOUND_EXPR
552 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
553 return c_strlen (TREE_OPERAND (src, 1), only_value);
554
555 loc = EXPR_LOC_OR_LOC (src, input_location);
556
557 src = string_constant (src, &offset_node);
558 if (src == 0)
559 return NULL_TREE;
560
561 max = TREE_STRING_LENGTH (src) - 1;
562 ptr = TREE_STRING_POINTER (src);
563
564 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
565 {
566 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
567 compute the offset to the following null if we don't know where to
568 start searching for it. */
569 int i;
570
571 for (i = 0; i < max; i++)
572 if (ptr[i] == 0)
573 return NULL_TREE;
574
575 /* We don't know the starting offset, but we do know that the string
576 has no internal zero bytes. We can assume that the offset falls
577 within the bounds of the string; otherwise, the programmer deserves
578 what he gets. Subtract the offset from the length of the string,
579 and return that. This would perhaps not be valid if we were dealing
580 with named arrays in addition to literal string constants. */
581
582 return size_diffop_loc (loc, size_int (max), offset_node);
583 }
584
585 /* We have a known offset into the string. Start searching there for
586 a null character if we can represent it as a single HOST_WIDE_INT. */
587 if (offset_node == 0)
588 offset = 0;
589 else if (! tree_fits_shwi_p (offset_node))
590 offset = -1;
591 else
592 offset = tree_to_shwi (offset_node);
593
594 /* If the offset is known to be out of bounds, warn, and call strlen at
595 runtime. */
596 if (offset < 0 || offset > max)
597 {
598 /* Suppress multiple warnings for propagated constant strings. */
599 if (only_value != 2
600 && !TREE_NO_WARNING (src))
601 {
602 warning_at (loc, 0, "offset outside bounds of constant string");
603 TREE_NO_WARNING (src) = 1;
604 }
605 return NULL_TREE;
606 }
607
608 /* Use strlen to search for the first zero byte. Since any strings
609 constructed with build_string will have nulls appended, we win even
610 if we get handed something like (char[4])"abcd".
611
612 Since OFFSET is our starting index into the string, no further
613 calculation is needed. */
614 return ssize_int (strlen (ptr + offset));
615 }
616
617 /* Return a constant integer corresponding to target reading
618 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
619
620 static rtx
621 c_readstr (const char *str, machine_mode mode)
622 {
623 HOST_WIDE_INT ch;
624 unsigned int i, j;
625 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
626
627 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
628 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
629 / HOST_BITS_PER_WIDE_INT;
630
631 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
632 for (i = 0; i < len; i++)
633 tmp[i] = 0;
634
635 ch = 1;
636 for (i = 0; i < GET_MODE_SIZE (mode); i++)
637 {
638 j = i;
639 if (WORDS_BIG_ENDIAN)
640 j = GET_MODE_SIZE (mode) - i - 1;
641 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
642 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
643 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
644 j *= BITS_PER_UNIT;
645
646 if (ch)
647 ch = (unsigned char) str[i];
648 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
649 }
650
651 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
652 return immed_wide_int_const (c, mode);
653 }
654
655 /* Cast a target constant CST to target CHAR and if that value fits into
656 host char type, return zero and put that value into variable pointed to by
657 P. */
658
659 static int
660 target_char_cast (tree cst, char *p)
661 {
662 unsigned HOST_WIDE_INT val, hostval;
663
664 if (TREE_CODE (cst) != INTEGER_CST
665 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
666 return 1;
667
668 /* Do not care if it fits or not right here. */
669 val = TREE_INT_CST_LOW (cst);
670
671 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
672 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
673
674 hostval = val;
675 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
676 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
677
678 if (val != hostval)
679 return 1;
680
681 *p = hostval;
682 return 0;
683 }
684
685 /* Similar to save_expr, but assumes that arbitrary code is not executed
686 in between the multiple evaluations. In particular, we assume that a
687 non-addressable local variable will not be modified. */
688
689 static tree
690 builtin_save_expr (tree exp)
691 {
692 if (TREE_CODE (exp) == SSA_NAME
693 || (TREE_ADDRESSABLE (exp) == 0
694 && (TREE_CODE (exp) == PARM_DECL
695 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
696 return exp;
697
698 return save_expr (exp);
699 }
700
701 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
702 times to get the address of either a higher stack frame, or a return
703 address located within it (depending on FNDECL_CODE). */
704
705 static rtx
706 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
707 {
708 int i;
709 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
710 if (tem == NULL_RTX)
711 {
712 /* For a zero count with __builtin_return_address, we don't care what
713 frame address we return, because target-specific definitions will
714 override us. Therefore frame pointer elimination is OK, and using
715 the soft frame pointer is OK.
716
717 For a nonzero count, or a zero count with __builtin_frame_address,
718 we require a stable offset from the current frame pointer to the
719 previous one, so we must use the hard frame pointer, and
720 we must disable frame pointer elimination. */
721 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
722 tem = frame_pointer_rtx;
723 else
724 {
725 tem = hard_frame_pointer_rtx;
726
727 /* Tell reload not to eliminate the frame pointer. */
728 crtl->accesses_prior_frames = 1;
729 }
730 }
731
732 if (count > 0)
733 SETUP_FRAME_ADDRESSES ();
734
735 /* On the SPARC, the return address is not in the frame, it is in a
736 register. There is no way to access it off of the current frame
737 pointer, but it can be accessed off the previous frame pointer by
738 reading the value from the register window save area. */
739 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
740 count--;
741
742 /* Scan back COUNT frames to the specified frame. */
743 for (i = 0; i < count; i++)
744 {
745 /* Assume the dynamic chain pointer is in the word that the
746 frame address points to, unless otherwise specified. */
747 tem = DYNAMIC_CHAIN_ADDRESS (tem);
748 tem = memory_address (Pmode, tem);
749 tem = gen_frame_mem (Pmode, tem);
750 tem = copy_to_reg (tem);
751 }
752
753 /* For __builtin_frame_address, return what we've got. But, on
754 the SPARC for example, we may have to add a bias. */
755 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
756 return FRAME_ADDR_RTX (tem);
757
758 /* For __builtin_return_address, get the return address from that frame. */
759 #ifdef RETURN_ADDR_RTX
760 tem = RETURN_ADDR_RTX (count, tem);
761 #else
762 tem = memory_address (Pmode,
763 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
764 tem = gen_frame_mem (Pmode, tem);
765 #endif
766 return tem;
767 }
768
769 /* Alias set used for setjmp buffer. */
770 static alias_set_type setjmp_alias_set = -1;
771
772 /* Construct the leading half of a __builtin_setjmp call. Control will
773 return to RECEIVER_LABEL. This is also called directly by the SJLJ
774 exception handling code. */
775
776 void
777 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
778 {
779 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780 rtx stack_save;
781 rtx mem;
782
783 if (setjmp_alias_set == -1)
784 setjmp_alias_set = new_alias_set ();
785
786 buf_addr = convert_memory_address (Pmode, buf_addr);
787
788 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
789
790 /* We store the frame pointer and the address of receiver_label in
791 the buffer and use the rest of it for the stack save area, which
792 is machine-dependent. */
793
794 mem = gen_rtx_MEM (Pmode, buf_addr);
795 set_mem_alias_set (mem, setjmp_alias_set);
796 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
797
798 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
799 GET_MODE_SIZE (Pmode))),
800 set_mem_alias_set (mem, setjmp_alias_set);
801
802 emit_move_insn (validize_mem (mem),
803 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
804
805 stack_save = gen_rtx_MEM (sa_mode,
806 plus_constant (Pmode, buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (stack_save, setjmp_alias_set);
809 emit_stack_save (SAVE_NONLOCAL, &stack_save);
810
811 /* If there is further processing to do, do it. */
812 if (targetm.have_builtin_setjmp_setup ())
813 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
814
815 /* We have a nonlocal label. */
816 cfun->has_nonlocal_label = 1;
817 }
818
819 /* Construct the trailing part of a __builtin_setjmp call. This is
820 also called directly by the SJLJ exception handling code.
821 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
822
823 void
824 expand_builtin_setjmp_receiver (rtx receiver_label)
825 {
826 rtx chain;
827
828 /* Mark the FP as used when we get here, so we have to make sure it's
829 marked as used by this function. */
830 emit_use (hard_frame_pointer_rtx);
831
832 /* Mark the static chain as clobbered here so life information
833 doesn't get messed up for it. */
834 chain = targetm.calls.static_chain (current_function_decl, true);
835 if (chain && REG_P (chain))
836 emit_clobber (chain);
837
838 /* Now put in the code to restore the frame pointer, and argument
839 pointer, if needed. */
840 if (! targetm.have_nonlocal_goto ())
841 {
842 /* First adjust our frame pointer to its actual value. It was
843 previously set to the start of the virtual area corresponding to
844 the stacked variables when we branched here and now needs to be
845 adjusted to the actual hardware fp value.
846
847 Assignments to virtual registers are converted by
848 instantiate_virtual_regs into the corresponding assignment
849 to the underlying register (fp in this case) that makes
850 the original assignment true.
851 So the following insn will actually be decrementing fp by
852 STARTING_FRAME_OFFSET. */
853 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
854
855 /* Restoring the frame pointer also modifies the hard frame pointer.
856 Mark it used (so that the previous assignment remains live once
857 the frame pointer is eliminated) and clobbered (to represent the
858 implicit update from the assignment). */
859 emit_use (hard_frame_pointer_rtx);
860 emit_clobber (hard_frame_pointer_rtx);
861 }
862
863 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
864 {
865 #ifdef ELIMINABLE_REGS
866 /* If the argument pointer can be eliminated in favor of the
867 frame pointer, we don't need to restore it. We assume here
868 that if such an elimination is present, it can always be used.
869 This is the case on all known machines; if we don't make this
870 assumption, we do unnecessary saving on many machines. */
871 size_t i;
872 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
873
874 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
875 if (elim_regs[i].from == ARG_POINTER_REGNUM
876 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
877 break;
878
879 if (i == ARRAY_SIZE (elim_regs))
880 #endif
881 {
882 /* Now restore our arg pointer from the address at which it
883 was saved in our stack frame. */
884 emit_move_insn (crtl->args.internal_arg_pointer,
885 copy_to_reg (get_arg_pointer_save_area ()));
886 }
887 }
888
889 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
890 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
891 else if (targetm.have_nonlocal_goto_receiver ())
892 emit_insn (targetm.gen_nonlocal_goto_receiver ());
893 else
894 { /* Nothing */ }
895
896 /* We must not allow the code we just generated to be reordered by
897 scheduling. Specifically, the update of the frame pointer must
898 happen immediately, not later. */
899 emit_insn (gen_blockage ());
900 }
901
902 /* __builtin_longjmp is passed a pointer to an array of five words (not
903 all will be used on all machines). It operates similarly to the C
904 library function of the same name, but is more efficient. Much of
905 the code below is copied from the handling of non-local gotos. */
906
907 static void
908 expand_builtin_longjmp (rtx buf_addr, rtx value)
909 {
910 rtx fp, lab, stack;
911 rtx_insn *insn, *last;
912 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
913
914 /* DRAP is needed for stack realign if longjmp is expanded to current
915 function */
916 if (SUPPORTS_STACK_ALIGNMENT)
917 crtl->need_drap = true;
918
919 if (setjmp_alias_set == -1)
920 setjmp_alias_set = new_alias_set ();
921
922 buf_addr = convert_memory_address (Pmode, buf_addr);
923
924 buf_addr = force_reg (Pmode, buf_addr);
925
926 /* We require that the user must pass a second argument of 1, because
927 that is what builtin_setjmp will return. */
928 gcc_assert (value == const1_rtx);
929
930 last = get_last_insn ();
931 if (targetm.have_builtin_longjmp ())
932 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
933 else
934 {
935 fp = gen_rtx_MEM (Pmode, buf_addr);
936 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
937 GET_MODE_SIZE (Pmode)));
938
939 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
940 2 * GET_MODE_SIZE (Pmode)));
941 set_mem_alias_set (fp, setjmp_alias_set);
942 set_mem_alias_set (lab, setjmp_alias_set);
943 set_mem_alias_set (stack, setjmp_alias_set);
944
945 /* Pick up FP, label, and SP from the block and jump. This code is
946 from expand_goto in stmt.c; see there for detailed comments. */
947 if (targetm.have_nonlocal_goto ())
948 /* We have to pass a value to the nonlocal_goto pattern that will
949 get copied into the static_chain pointer, but it does not matter
950 what that value is, because builtin_setjmp does not use it. */
951 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
952 else
953 {
954 lab = copy_to_reg (lab);
955
956 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
957 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
958
959 emit_move_insn (hard_frame_pointer_rtx, fp);
960 emit_stack_restore (SAVE_NONLOCAL, stack);
961
962 emit_use (hard_frame_pointer_rtx);
963 emit_use (stack_pointer_rtx);
964 emit_indirect_jump (lab);
965 }
966 }
967
968 /* Search backwards and mark the jump insn as a non-local goto.
969 Note that this precludes the use of __builtin_longjmp to a
970 __builtin_setjmp target in the same function. However, we've
971 already cautioned the user that these functions are for
972 internal exception handling use only. */
973 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
974 {
975 gcc_assert (insn != last);
976
977 if (JUMP_P (insn))
978 {
979 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
980 break;
981 }
982 else if (CALL_P (insn))
983 break;
984 }
985 }
986
987 static inline bool
988 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
989 {
990 return (iter->i < iter->n);
991 }
992
993 /* This function validates the types of a function call argument list
994 against a specified list of tree_codes. If the last specifier is a 0,
995 that represents an ellipses, otherwise the last specifier must be a
996 VOID_TYPE. */
997
998 static bool
999 validate_arglist (const_tree callexpr, ...)
1000 {
1001 enum tree_code code;
1002 bool res = 0;
1003 va_list ap;
1004 const_call_expr_arg_iterator iter;
1005 const_tree arg;
1006
1007 va_start (ap, callexpr);
1008 init_const_call_expr_arg_iterator (callexpr, &iter);
1009
1010 do
1011 {
1012 code = (enum tree_code) va_arg (ap, int);
1013 switch (code)
1014 {
1015 case 0:
1016 /* This signifies an ellipses, any further arguments are all ok. */
1017 res = true;
1018 goto end;
1019 case VOID_TYPE:
1020 /* This signifies an endlink, if no arguments remain, return
1021 true, otherwise return false. */
1022 res = !more_const_call_expr_args_p (&iter);
1023 goto end;
1024 default:
1025 /* If no parameters remain or the parameter's code does not
1026 match the specified code, return false. Otherwise continue
1027 checking any remaining arguments. */
1028 arg = next_const_call_expr_arg (&iter);
1029 if (!validate_arg (arg, code))
1030 goto end;
1031 break;
1032 }
1033 }
1034 while (1);
1035
1036 /* We need gotos here since we can only have one VA_CLOSE in a
1037 function. */
1038 end: ;
1039 va_end (ap);
1040
1041 return res;
1042 }
1043
1044 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1045 and the address of the save area. */
1046
1047 static rtx
1048 expand_builtin_nonlocal_goto (tree exp)
1049 {
1050 tree t_label, t_save_area;
1051 rtx r_label, r_save_area, r_fp, r_sp;
1052 rtx_insn *insn;
1053
1054 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1055 return NULL_RTX;
1056
1057 t_label = CALL_EXPR_ARG (exp, 0);
1058 t_save_area = CALL_EXPR_ARG (exp, 1);
1059
1060 r_label = expand_normal (t_label);
1061 r_label = convert_memory_address (Pmode, r_label);
1062 r_save_area = expand_normal (t_save_area);
1063 r_save_area = convert_memory_address (Pmode, r_save_area);
1064 /* Copy the address of the save location to a register just in case it was
1065 based on the frame pointer. */
1066 r_save_area = copy_to_reg (r_save_area);
1067 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1068 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1069 plus_constant (Pmode, r_save_area,
1070 GET_MODE_SIZE (Pmode)));
1071
1072 crtl->has_nonlocal_goto = 1;
1073
1074 /* ??? We no longer need to pass the static chain value, afaik. */
1075 if (targetm.have_nonlocal_goto ())
1076 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1077 else
1078 {
1079 r_label = copy_to_reg (r_label);
1080
1081 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1082 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1083
1084 /* Restore frame pointer for containing function. */
1085 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1086 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1087
1088 /* USE of hard_frame_pointer_rtx added for consistency;
1089 not clear if really needed. */
1090 emit_use (hard_frame_pointer_rtx);
1091 emit_use (stack_pointer_rtx);
1092
1093 /* If the architecture is using a GP register, we must
1094 conservatively assume that the target function makes use of it.
1095 The prologue of functions with nonlocal gotos must therefore
1096 initialize the GP register to the appropriate value, and we
1097 must then make sure that this value is live at the point
1098 of the jump. (Note that this doesn't necessarily apply
1099 to targets with a nonlocal_goto pattern; they are free
1100 to implement it in their own way. Note also that this is
1101 a no-op if the GP register is a global invariant.) */
1102 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1103 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1104 emit_use (pic_offset_table_rtx);
1105
1106 emit_indirect_jump (r_label);
1107 }
1108
1109 /* Search backwards to the jump insn and mark it as a
1110 non-local goto. */
1111 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1112 {
1113 if (JUMP_P (insn))
1114 {
1115 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1116 break;
1117 }
1118 else if (CALL_P (insn))
1119 break;
1120 }
1121
1122 return const0_rtx;
1123 }
1124
1125 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1126 (not all will be used on all machines) that was passed to __builtin_setjmp.
1127 It updates the stack pointer in that block to the current value. This is
1128 also called directly by the SJLJ exception handling code. */
1129
1130 void
1131 expand_builtin_update_setjmp_buf (rtx buf_addr)
1132 {
1133 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1134 rtx stack_save
1135 = gen_rtx_MEM (sa_mode,
1136 memory_address
1137 (sa_mode,
1138 plus_constant (Pmode, buf_addr,
1139 2 * GET_MODE_SIZE (Pmode))));
1140
1141 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1142 }
1143
1144 /* Expand a call to __builtin_prefetch. For a target that does not support
1145 data prefetch, evaluate the memory address argument in case it has side
1146 effects. */
1147
1148 static void
1149 expand_builtin_prefetch (tree exp)
1150 {
1151 tree arg0, arg1, arg2;
1152 int nargs;
1153 rtx op0, op1, op2;
1154
1155 if (!validate_arglist (exp, POINTER_TYPE, 0))
1156 return;
1157
1158 arg0 = CALL_EXPR_ARG (exp, 0);
1159
1160 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1161 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1162 locality). */
1163 nargs = call_expr_nargs (exp);
1164 if (nargs > 1)
1165 arg1 = CALL_EXPR_ARG (exp, 1);
1166 else
1167 arg1 = integer_zero_node;
1168 if (nargs > 2)
1169 arg2 = CALL_EXPR_ARG (exp, 2);
1170 else
1171 arg2 = integer_three_node;
1172
1173 /* Argument 0 is an address. */
1174 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1175
1176 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1177 if (TREE_CODE (arg1) != INTEGER_CST)
1178 {
1179 error ("second argument to %<__builtin_prefetch%> must be a constant");
1180 arg1 = integer_zero_node;
1181 }
1182 op1 = expand_normal (arg1);
1183 /* Argument 1 must be either zero or one. */
1184 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1185 {
1186 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1187 " using zero");
1188 op1 = const0_rtx;
1189 }
1190
1191 /* Argument 2 (locality) must be a compile-time constant int. */
1192 if (TREE_CODE (arg2) != INTEGER_CST)
1193 {
1194 error ("third argument to %<__builtin_prefetch%> must be a constant");
1195 arg2 = integer_zero_node;
1196 }
1197 op2 = expand_normal (arg2);
1198 /* Argument 2 must be 0, 1, 2, or 3. */
1199 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1200 {
1201 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1202 op2 = const0_rtx;
1203 }
1204
1205 if (targetm.have_prefetch ())
1206 {
1207 struct expand_operand ops[3];
1208
1209 create_address_operand (&ops[0], op0);
1210 create_integer_operand (&ops[1], INTVAL (op1));
1211 create_integer_operand (&ops[2], INTVAL (op2));
1212 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1213 return;
1214 }
1215
1216 /* Don't do anything with direct references to volatile memory, but
1217 generate code to handle other side effects. */
1218 if (!MEM_P (op0) && side_effects_p (op0))
1219 emit_insn (op0);
1220 }
1221
1222 /* Get a MEM rtx for expression EXP which is the address of an operand
1223 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1224 the maximum length of the block of memory that might be accessed or
1225 NULL if unknown. */
1226
1227 static rtx
1228 get_memory_rtx (tree exp, tree len)
1229 {
1230 tree orig_exp = exp;
1231 rtx addr, mem;
1232
1233 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1234 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1235 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1236 exp = TREE_OPERAND (exp, 0);
1237
1238 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1239 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1240
1241 /* Get an expression we can use to find the attributes to assign to MEM.
1242 First remove any nops. */
1243 while (CONVERT_EXPR_P (exp)
1244 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1245 exp = TREE_OPERAND (exp, 0);
1246
1247 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1248 (as builtin stringops may alias with anything). */
1249 exp = fold_build2 (MEM_REF,
1250 build_array_type (char_type_node,
1251 build_range_type (sizetype,
1252 size_one_node, len)),
1253 exp, build_int_cst (ptr_type_node, 0));
1254
1255 /* If the MEM_REF has no acceptable address, try to get the base object
1256 from the original address we got, and build an all-aliasing
1257 unknown-sized access to that one. */
1258 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1259 set_mem_attributes (mem, exp, 0);
1260 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1261 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1262 0))))
1263 {
1264 exp = build_fold_addr_expr (exp);
1265 exp = fold_build2 (MEM_REF,
1266 build_array_type (char_type_node,
1267 build_range_type (sizetype,
1268 size_zero_node,
1269 NULL)),
1270 exp, build_int_cst (ptr_type_node, 0));
1271 set_mem_attributes (mem, exp, 0);
1272 }
1273 set_mem_alias_set (mem, 0);
1274 return mem;
1275 }
1276 \f
1277 /* Built-in functions to perform an untyped call and return. */
1278
1279 #define apply_args_mode \
1280 (this_target_builtins->x_apply_args_mode)
1281 #define apply_result_mode \
1282 (this_target_builtins->x_apply_result_mode)
1283
1284 /* Return the size required for the block returned by __builtin_apply_args,
1285 and initialize apply_args_mode. */
1286
1287 static int
1288 apply_args_size (void)
1289 {
1290 static int size = -1;
1291 int align;
1292 unsigned int regno;
1293 machine_mode mode;
1294
1295 /* The values computed by this function never change. */
1296 if (size < 0)
1297 {
1298 /* The first value is the incoming arg-pointer. */
1299 size = GET_MODE_SIZE (Pmode);
1300
1301 /* The second value is the structure value address unless this is
1302 passed as an "invisible" first argument. */
1303 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1304 size += GET_MODE_SIZE (Pmode);
1305
1306 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1307 if (FUNCTION_ARG_REGNO_P (regno))
1308 {
1309 mode = targetm.calls.get_raw_arg_mode (regno);
1310
1311 gcc_assert (mode != VOIDmode);
1312
1313 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1314 if (size % align != 0)
1315 size = CEIL (size, align) * align;
1316 size += GET_MODE_SIZE (mode);
1317 apply_args_mode[regno] = mode;
1318 }
1319 else
1320 {
1321 apply_args_mode[regno] = VOIDmode;
1322 }
1323 }
1324 return size;
1325 }
1326
1327 /* Return the size required for the block returned by __builtin_apply,
1328 and initialize apply_result_mode. */
1329
1330 static int
1331 apply_result_size (void)
1332 {
1333 static int size = -1;
1334 int align, regno;
1335 machine_mode mode;
1336
1337 /* The values computed by this function never change. */
1338 if (size < 0)
1339 {
1340 size = 0;
1341
1342 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1343 if (targetm.calls.function_value_regno_p (regno))
1344 {
1345 mode = targetm.calls.get_raw_result_mode (regno);
1346
1347 gcc_assert (mode != VOIDmode);
1348
1349 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1350 if (size % align != 0)
1351 size = CEIL (size, align) * align;
1352 size += GET_MODE_SIZE (mode);
1353 apply_result_mode[regno] = mode;
1354 }
1355 else
1356 apply_result_mode[regno] = VOIDmode;
1357
1358 /* Allow targets that use untyped_call and untyped_return to override
1359 the size so that machine-specific information can be stored here. */
1360 #ifdef APPLY_RESULT_SIZE
1361 size = APPLY_RESULT_SIZE;
1362 #endif
1363 }
1364 return size;
1365 }
1366
1367 /* Create a vector describing the result block RESULT. If SAVEP is true,
1368 the result block is used to save the values; otherwise it is used to
1369 restore the values. */
1370
1371 static rtx
1372 result_vector (int savep, rtx result)
1373 {
1374 int regno, size, align, nelts;
1375 machine_mode mode;
1376 rtx reg, mem;
1377 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1378
1379 size = nelts = 0;
1380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1381 if ((mode = apply_result_mode[regno]) != VOIDmode)
1382 {
1383 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1384 if (size % align != 0)
1385 size = CEIL (size, align) * align;
1386 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1387 mem = adjust_address (result, mode, size);
1388 savevec[nelts++] = (savep
1389 ? gen_rtx_SET (mem, reg)
1390 : gen_rtx_SET (reg, mem));
1391 size += GET_MODE_SIZE (mode);
1392 }
1393 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1394 }
1395
1396 /* Save the state required to perform an untyped call with the same
1397 arguments as were passed to the current function. */
1398
1399 static rtx
1400 expand_builtin_apply_args_1 (void)
1401 {
1402 rtx registers, tem;
1403 int size, align, regno;
1404 machine_mode mode;
1405 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1406
1407 /* Create a block where the arg-pointer, structure value address,
1408 and argument registers can be saved. */
1409 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1410
1411 /* Walk past the arg-pointer and structure value address. */
1412 size = GET_MODE_SIZE (Pmode);
1413 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1414 size += GET_MODE_SIZE (Pmode);
1415
1416 /* Save each register used in calling a function to the block. */
1417 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1418 if ((mode = apply_args_mode[regno]) != VOIDmode)
1419 {
1420 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1421 if (size % align != 0)
1422 size = CEIL (size, align) * align;
1423
1424 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1425
1426 emit_move_insn (adjust_address (registers, mode, size), tem);
1427 size += GET_MODE_SIZE (mode);
1428 }
1429
1430 /* Save the arg pointer to the block. */
1431 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1432 /* We need the pointer as the caller actually passed them to us, not
1433 as we might have pretended they were passed. Make sure it's a valid
1434 operand, as emit_move_insn isn't expected to handle a PLUS. */
1435 if (STACK_GROWS_DOWNWARD)
1436 tem
1437 = force_operand (plus_constant (Pmode, tem,
1438 crtl->args.pretend_args_size),
1439 NULL_RTX);
1440 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1441
1442 size = GET_MODE_SIZE (Pmode);
1443
1444 /* Save the structure value address unless this is passed as an
1445 "invisible" first argument. */
1446 if (struct_incoming_value)
1447 {
1448 emit_move_insn (adjust_address (registers, Pmode, size),
1449 copy_to_reg (struct_incoming_value));
1450 size += GET_MODE_SIZE (Pmode);
1451 }
1452
1453 /* Return the address of the block. */
1454 return copy_addr_to_reg (XEXP (registers, 0));
1455 }
1456
1457 /* __builtin_apply_args returns block of memory allocated on
1458 the stack into which is stored the arg pointer, structure
1459 value address, static chain, and all the registers that might
1460 possibly be used in performing a function call. The code is
1461 moved to the start of the function so the incoming values are
1462 saved. */
1463
1464 static rtx
1465 expand_builtin_apply_args (void)
1466 {
1467 /* Don't do __builtin_apply_args more than once in a function.
1468 Save the result of the first call and reuse it. */
1469 if (apply_args_value != 0)
1470 return apply_args_value;
1471 {
1472 /* When this function is called, it means that registers must be
1473 saved on entry to this function. So we migrate the
1474 call to the first insn of this function. */
1475 rtx temp;
1476
1477 start_sequence ();
1478 temp = expand_builtin_apply_args_1 ();
1479 rtx_insn *seq = get_insns ();
1480 end_sequence ();
1481
1482 apply_args_value = temp;
1483
1484 /* Put the insns after the NOTE that starts the function.
1485 If this is inside a start_sequence, make the outer-level insn
1486 chain current, so the code is placed at the start of the
1487 function. If internal_arg_pointer is a non-virtual pseudo,
1488 it needs to be placed after the function that initializes
1489 that pseudo. */
1490 push_topmost_sequence ();
1491 if (REG_P (crtl->args.internal_arg_pointer)
1492 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1493 emit_insn_before (seq, parm_birth_insn);
1494 else
1495 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1496 pop_topmost_sequence ();
1497 return temp;
1498 }
1499 }
1500
1501 /* Perform an untyped call and save the state required to perform an
1502 untyped return of whatever value was returned by the given function. */
1503
1504 static rtx
1505 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1506 {
1507 int size, align, regno;
1508 machine_mode mode;
1509 rtx incoming_args, result, reg, dest, src;
1510 rtx_call_insn *call_insn;
1511 rtx old_stack_level = 0;
1512 rtx call_fusage = 0;
1513 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1514
1515 arguments = convert_memory_address (Pmode, arguments);
1516
1517 /* Create a block where the return registers can be saved. */
1518 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1519
1520 /* Fetch the arg pointer from the ARGUMENTS block. */
1521 incoming_args = gen_reg_rtx (Pmode);
1522 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1523 if (!STACK_GROWS_DOWNWARD)
1524 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1525 incoming_args, 0, OPTAB_LIB_WIDEN);
1526
1527 /* Push a new argument block and copy the arguments. Do not allow
1528 the (potential) memcpy call below to interfere with our stack
1529 manipulations. */
1530 do_pending_stack_adjust ();
1531 NO_DEFER_POP;
1532
1533 /* Save the stack with nonlocal if available. */
1534 if (targetm.have_save_stack_nonlocal ())
1535 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1536 else
1537 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1538
1539 /* Allocate a block of memory onto the stack and copy the memory
1540 arguments to the outgoing arguments address. We can pass TRUE
1541 as the 4th argument because we just saved the stack pointer
1542 and will restore it right after the call. */
1543 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1544
1545 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1546 may have already set current_function_calls_alloca to true.
1547 current_function_calls_alloca won't be set if argsize is zero,
1548 so we have to guarantee need_drap is true here. */
1549 if (SUPPORTS_STACK_ALIGNMENT)
1550 crtl->need_drap = true;
1551
1552 dest = virtual_outgoing_args_rtx;
1553 if (!STACK_GROWS_DOWNWARD)
1554 {
1555 if (CONST_INT_P (argsize))
1556 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1557 else
1558 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1559 }
1560 dest = gen_rtx_MEM (BLKmode, dest);
1561 set_mem_align (dest, PARM_BOUNDARY);
1562 src = gen_rtx_MEM (BLKmode, incoming_args);
1563 set_mem_align (src, PARM_BOUNDARY);
1564 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1565
1566 /* Refer to the argument block. */
1567 apply_args_size ();
1568 arguments = gen_rtx_MEM (BLKmode, arguments);
1569 set_mem_align (arguments, PARM_BOUNDARY);
1570
1571 /* Walk past the arg-pointer and structure value address. */
1572 size = GET_MODE_SIZE (Pmode);
1573 if (struct_value)
1574 size += GET_MODE_SIZE (Pmode);
1575
1576 /* Restore each of the registers previously saved. Make USE insns
1577 for each of these registers for use in making the call. */
1578 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1579 if ((mode = apply_args_mode[regno]) != VOIDmode)
1580 {
1581 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1582 if (size % align != 0)
1583 size = CEIL (size, align) * align;
1584 reg = gen_rtx_REG (mode, regno);
1585 emit_move_insn (reg, adjust_address (arguments, mode, size));
1586 use_reg (&call_fusage, reg);
1587 size += GET_MODE_SIZE (mode);
1588 }
1589
1590 /* Restore the structure value address unless this is passed as an
1591 "invisible" first argument. */
1592 size = GET_MODE_SIZE (Pmode);
1593 if (struct_value)
1594 {
1595 rtx value = gen_reg_rtx (Pmode);
1596 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1597 emit_move_insn (struct_value, value);
1598 if (REG_P (struct_value))
1599 use_reg (&call_fusage, struct_value);
1600 size += GET_MODE_SIZE (Pmode);
1601 }
1602
1603 /* All arguments and registers used for the call are set up by now! */
1604 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1605
1606 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1607 and we don't want to load it into a register as an optimization,
1608 because prepare_call_address already did it if it should be done. */
1609 if (GET_CODE (function) != SYMBOL_REF)
1610 function = memory_address (FUNCTION_MODE, function);
1611
1612 /* Generate the actual call instruction and save the return value. */
1613 if (targetm.have_untyped_call ())
1614 {
1615 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1616 emit_call_insn (targetm.gen_untyped_call (mem, result,
1617 result_vector (1, result)));
1618 }
1619 else if (targetm.have_call_value ())
1620 {
1621 rtx valreg = 0;
1622
1623 /* Locate the unique return register. It is not possible to
1624 express a call that sets more than one return register using
1625 call_value; use untyped_call for that. In fact, untyped_call
1626 only needs to save the return registers in the given block. */
1627 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1628 if ((mode = apply_result_mode[regno]) != VOIDmode)
1629 {
1630 gcc_assert (!valreg); /* have_untyped_call required. */
1631
1632 valreg = gen_rtx_REG (mode, regno);
1633 }
1634
1635 emit_insn (targetm.gen_call_value (valreg,
1636 gen_rtx_MEM (FUNCTION_MODE, function),
1637 const0_rtx, NULL_RTX, const0_rtx));
1638
1639 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1640 }
1641 else
1642 gcc_unreachable ();
1643
1644 /* Find the CALL insn we just emitted, and attach the register usage
1645 information. */
1646 call_insn = last_call_insn ();
1647 add_function_usage_to (call_insn, call_fusage);
1648
1649 /* Restore the stack. */
1650 if (targetm.have_save_stack_nonlocal ())
1651 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1652 else
1653 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1654 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1655
1656 OK_DEFER_POP;
1657
1658 /* Return the address of the result block. */
1659 result = copy_addr_to_reg (XEXP (result, 0));
1660 return convert_memory_address (ptr_mode, result);
1661 }
1662
1663 /* Perform an untyped return. */
1664
1665 static void
1666 expand_builtin_return (rtx result)
1667 {
1668 int size, align, regno;
1669 machine_mode mode;
1670 rtx reg;
1671 rtx_insn *call_fusage = 0;
1672
1673 result = convert_memory_address (Pmode, result);
1674
1675 apply_result_size ();
1676 result = gen_rtx_MEM (BLKmode, result);
1677
1678 if (targetm.have_untyped_return ())
1679 {
1680 rtx vector = result_vector (0, result);
1681 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1682 emit_barrier ();
1683 return;
1684 }
1685
1686 /* Restore the return value and note that each value is used. */
1687 size = 0;
1688 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1689 if ((mode = apply_result_mode[regno]) != VOIDmode)
1690 {
1691 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1692 if (size % align != 0)
1693 size = CEIL (size, align) * align;
1694 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1695 emit_move_insn (reg, adjust_address (result, mode, size));
1696
1697 push_to_sequence (call_fusage);
1698 emit_use (reg);
1699 call_fusage = get_insns ();
1700 end_sequence ();
1701 size += GET_MODE_SIZE (mode);
1702 }
1703
1704 /* Put the USE insns before the return. */
1705 emit_insn (call_fusage);
1706
1707 /* Return whatever values was restored by jumping directly to the end
1708 of the function. */
1709 expand_naked_return ();
1710 }
1711
1712 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1713
1714 static enum type_class
1715 type_to_class (tree type)
1716 {
1717 switch (TREE_CODE (type))
1718 {
1719 case VOID_TYPE: return void_type_class;
1720 case INTEGER_TYPE: return integer_type_class;
1721 case ENUMERAL_TYPE: return enumeral_type_class;
1722 case BOOLEAN_TYPE: return boolean_type_class;
1723 case POINTER_TYPE: return pointer_type_class;
1724 case REFERENCE_TYPE: return reference_type_class;
1725 case OFFSET_TYPE: return offset_type_class;
1726 case REAL_TYPE: return real_type_class;
1727 case COMPLEX_TYPE: return complex_type_class;
1728 case FUNCTION_TYPE: return function_type_class;
1729 case METHOD_TYPE: return method_type_class;
1730 case RECORD_TYPE: return record_type_class;
1731 case UNION_TYPE:
1732 case QUAL_UNION_TYPE: return union_type_class;
1733 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1734 ? string_type_class : array_type_class);
1735 case LANG_TYPE: return lang_type_class;
1736 default: return no_type_class;
1737 }
1738 }
1739
1740 /* Expand a call EXP to __builtin_classify_type. */
1741
1742 static rtx
1743 expand_builtin_classify_type (tree exp)
1744 {
1745 if (call_expr_nargs (exp))
1746 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1747 return GEN_INT (no_type_class);
1748 }
1749
1750 /* This helper macro, meant to be used in mathfn_built_in below,
1751 determines which among a set of three builtin math functions is
1752 appropriate for a given type mode. The `F' and `L' cases are
1753 automatically generated from the `double' case. */
1754 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1755 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1756 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1757 fcodel = BUILT_IN_MATHFN##L ; break;
1758 /* Similar to above, but appends _R after any F/L suffix. */
1759 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1760 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1761 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1762 fcodel = BUILT_IN_MATHFN##L_R ; break;
1763
1764 /* Return a function equivalent to FN but operating on floating-point
1765 values of type TYPE, or END_BUILTINS if no such function exists.
1766 This is purely an operation on built-in function codes; it does not
1767 guarantee that the target actually has an implementation of the
1768 function. */
1769
1770 static built_in_function
1771 mathfn_built_in_2 (tree type, built_in_function fn)
1772 {
1773 built_in_function fcode, fcodef, fcodel;
1774
1775 switch (fn)
1776 {
1777 CASE_MATHFN (BUILT_IN_ACOS)
1778 CASE_MATHFN (BUILT_IN_ACOSH)
1779 CASE_MATHFN (BUILT_IN_ASIN)
1780 CASE_MATHFN (BUILT_IN_ASINH)
1781 CASE_MATHFN (BUILT_IN_ATAN)
1782 CASE_MATHFN (BUILT_IN_ATAN2)
1783 CASE_MATHFN (BUILT_IN_ATANH)
1784 CASE_MATHFN (BUILT_IN_CBRT)
1785 CASE_MATHFN (BUILT_IN_CEIL)
1786 CASE_MATHFN (BUILT_IN_CEXPI)
1787 CASE_MATHFN (BUILT_IN_COPYSIGN)
1788 CASE_MATHFN (BUILT_IN_COS)
1789 CASE_MATHFN (BUILT_IN_COSH)
1790 CASE_MATHFN (BUILT_IN_DREM)
1791 CASE_MATHFN (BUILT_IN_ERF)
1792 CASE_MATHFN (BUILT_IN_ERFC)
1793 CASE_MATHFN (BUILT_IN_EXP)
1794 CASE_MATHFN (BUILT_IN_EXP10)
1795 CASE_MATHFN (BUILT_IN_EXP2)
1796 CASE_MATHFN (BUILT_IN_EXPM1)
1797 CASE_MATHFN (BUILT_IN_FABS)
1798 CASE_MATHFN (BUILT_IN_FDIM)
1799 CASE_MATHFN (BUILT_IN_FLOOR)
1800 CASE_MATHFN (BUILT_IN_FMA)
1801 CASE_MATHFN (BUILT_IN_FMAX)
1802 CASE_MATHFN (BUILT_IN_FMIN)
1803 CASE_MATHFN (BUILT_IN_FMOD)
1804 CASE_MATHFN (BUILT_IN_FREXP)
1805 CASE_MATHFN (BUILT_IN_GAMMA)
1806 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1807 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1808 CASE_MATHFN (BUILT_IN_HYPOT)
1809 CASE_MATHFN (BUILT_IN_ILOGB)
1810 CASE_MATHFN (BUILT_IN_ICEIL)
1811 CASE_MATHFN (BUILT_IN_IFLOOR)
1812 CASE_MATHFN (BUILT_IN_INF)
1813 CASE_MATHFN (BUILT_IN_IRINT)
1814 CASE_MATHFN (BUILT_IN_IROUND)
1815 CASE_MATHFN (BUILT_IN_ISINF)
1816 CASE_MATHFN (BUILT_IN_J0)
1817 CASE_MATHFN (BUILT_IN_J1)
1818 CASE_MATHFN (BUILT_IN_JN)
1819 CASE_MATHFN (BUILT_IN_LCEIL)
1820 CASE_MATHFN (BUILT_IN_LDEXP)
1821 CASE_MATHFN (BUILT_IN_LFLOOR)
1822 CASE_MATHFN (BUILT_IN_LGAMMA)
1823 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1824 CASE_MATHFN (BUILT_IN_LLCEIL)
1825 CASE_MATHFN (BUILT_IN_LLFLOOR)
1826 CASE_MATHFN (BUILT_IN_LLRINT)
1827 CASE_MATHFN (BUILT_IN_LLROUND)
1828 CASE_MATHFN (BUILT_IN_LOG)
1829 CASE_MATHFN (BUILT_IN_LOG10)
1830 CASE_MATHFN (BUILT_IN_LOG1P)
1831 CASE_MATHFN (BUILT_IN_LOG2)
1832 CASE_MATHFN (BUILT_IN_LOGB)
1833 CASE_MATHFN (BUILT_IN_LRINT)
1834 CASE_MATHFN (BUILT_IN_LROUND)
1835 CASE_MATHFN (BUILT_IN_MODF)
1836 CASE_MATHFN (BUILT_IN_NAN)
1837 CASE_MATHFN (BUILT_IN_NANS)
1838 CASE_MATHFN (BUILT_IN_NEARBYINT)
1839 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1840 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1841 CASE_MATHFN (BUILT_IN_POW)
1842 CASE_MATHFN (BUILT_IN_POWI)
1843 CASE_MATHFN (BUILT_IN_POW10)
1844 CASE_MATHFN (BUILT_IN_REMAINDER)
1845 CASE_MATHFN (BUILT_IN_REMQUO)
1846 CASE_MATHFN (BUILT_IN_RINT)
1847 CASE_MATHFN (BUILT_IN_ROUND)
1848 CASE_MATHFN (BUILT_IN_SCALB)
1849 CASE_MATHFN (BUILT_IN_SCALBLN)
1850 CASE_MATHFN (BUILT_IN_SCALBN)
1851 CASE_MATHFN (BUILT_IN_SIGNBIT)
1852 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1853 CASE_MATHFN (BUILT_IN_SIN)
1854 CASE_MATHFN (BUILT_IN_SINCOS)
1855 CASE_MATHFN (BUILT_IN_SINH)
1856 CASE_MATHFN (BUILT_IN_SQRT)
1857 CASE_MATHFN (BUILT_IN_TAN)
1858 CASE_MATHFN (BUILT_IN_TANH)
1859 CASE_MATHFN (BUILT_IN_TGAMMA)
1860 CASE_MATHFN (BUILT_IN_TRUNC)
1861 CASE_MATHFN (BUILT_IN_Y0)
1862 CASE_MATHFN (BUILT_IN_Y1)
1863 CASE_MATHFN (BUILT_IN_YN)
1864
1865 default:
1866 return END_BUILTINS;
1867 }
1868
1869 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1870 return fcode;
1871 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1872 return fcodef;
1873 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1874 return fcodel;
1875 else
1876 return END_BUILTINS;
1877 }
1878
1879 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1880 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1881 otherwise use the explicit declaration. If we can't do the conversion,
1882 return null. */
1883
1884 static tree
1885 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1886 {
1887 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1888 if (fcode2 == END_BUILTINS)
1889 return NULL_TREE;
1890
1891 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1892 return NULL_TREE;
1893
1894 return builtin_decl_explicit (fcode2);
1895 }
1896
1897 /* Like mathfn_built_in_1(), but always use the implicit array. */
1898
1899 tree
1900 mathfn_built_in (tree type, enum built_in_function fn)
1901 {
1902 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1903 }
1904
1905 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1906 return its code, otherwise return IFN_LAST. Note that this function
1907 only tests whether the function is defined in internals.def, not whether
1908 it is actually available on the target. */
1909
1910 internal_fn
1911 associated_internal_fn (tree fndecl)
1912 {
1913 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1914 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1915 switch (DECL_FUNCTION_CODE (fndecl))
1916 {
1917 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1918 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1919 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1920 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1921 #include "internal-fn.def"
1922
1923 CASE_FLT_FN (BUILT_IN_POW10):
1924 return IFN_EXP10;
1925
1926 CASE_FLT_FN (BUILT_IN_DREM):
1927 return IFN_REMAINDER;
1928
1929 CASE_FLT_FN (BUILT_IN_SCALBN):
1930 CASE_FLT_FN (BUILT_IN_SCALBLN):
1931 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1932 return IFN_LDEXP;
1933 return IFN_LAST;
1934
1935 default:
1936 return IFN_LAST;
1937 }
1938 }
1939
1940 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1941 on the current target by a call to an internal function, return the
1942 code of that internal function, otherwise return IFN_LAST. The caller
1943 is responsible for ensuring that any side-effects of the built-in
1944 call are dealt with correctly. E.g. if CALL sets errno, the caller
1945 must decide that the errno result isn't needed or make it available
1946 in some other way. */
1947
1948 internal_fn
1949 replacement_internal_fn (gcall *call)
1950 {
1951 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1952 {
1953 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1954 if (ifn != IFN_LAST)
1955 {
1956 tree_pair types = direct_internal_fn_types (ifn, call);
1957 if (direct_internal_fn_supported_p (ifn, types))
1958 return ifn;
1959 }
1960 }
1961 return IFN_LAST;
1962 }
1963
1964 /* If errno must be maintained, expand the RTL to check if the result,
1965 TARGET, of a built-in function call, EXP, is NaN, and if so set
1966 errno to EDOM. */
1967
1968 static void
1969 expand_errno_check (tree exp, rtx target)
1970 {
1971 rtx_code_label *lab = gen_label_rtx ();
1972
1973 /* Test the result; if it is NaN, set errno=EDOM because
1974 the argument was not in the domain. */
1975 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1976 NULL_RTX, NULL, lab,
1977 /* The jump is very likely. */
1978 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1979
1980 #ifdef TARGET_EDOM
1981 /* If this built-in doesn't throw an exception, set errno directly. */
1982 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1983 {
1984 #ifdef GEN_ERRNO_RTX
1985 rtx errno_rtx = GEN_ERRNO_RTX;
1986 #else
1987 rtx errno_rtx
1988 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1989 #endif
1990 emit_move_insn (errno_rtx,
1991 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1992 emit_label (lab);
1993 return;
1994 }
1995 #endif
1996
1997 /* Make sure the library call isn't expanded as a tail call. */
1998 CALL_EXPR_TAILCALL (exp) = 0;
1999
2000 /* We can't set errno=EDOM directly; let the library call do it.
2001 Pop the arguments right away in case the call gets deleted. */
2002 NO_DEFER_POP;
2003 expand_call (exp, target, 0);
2004 OK_DEFER_POP;
2005 emit_label (lab);
2006 }
2007
2008 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2009 Return NULL_RTX if a normal call should be emitted rather than expanding
2010 the function in-line. EXP is the expression that is a call to the builtin
2011 function; if convenient, the result should be placed in TARGET.
2012 SUBTARGET may be used as the target for computing one of EXP's operands. */
2013
2014 static rtx
2015 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2016 {
2017 optab builtin_optab;
2018 rtx op0;
2019 rtx_insn *insns;
2020 tree fndecl = get_callee_fndecl (exp);
2021 machine_mode mode;
2022 bool errno_set = false;
2023 bool try_widening = false;
2024 tree arg;
2025
2026 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2027 return NULL_RTX;
2028
2029 arg = CALL_EXPR_ARG (exp, 0);
2030
2031 switch (DECL_FUNCTION_CODE (fndecl))
2032 {
2033 CASE_FLT_FN (BUILT_IN_SQRT):
2034 errno_set = ! tree_expr_nonnegative_p (arg);
2035 try_widening = true;
2036 builtin_optab = sqrt_optab;
2037 break;
2038 CASE_FLT_FN (BUILT_IN_EXP):
2039 errno_set = true; builtin_optab = exp_optab; break;
2040 CASE_FLT_FN (BUILT_IN_EXP10):
2041 CASE_FLT_FN (BUILT_IN_POW10):
2042 errno_set = true; builtin_optab = exp10_optab; break;
2043 CASE_FLT_FN (BUILT_IN_EXP2):
2044 errno_set = true; builtin_optab = exp2_optab; break;
2045 CASE_FLT_FN (BUILT_IN_EXPM1):
2046 errno_set = true; builtin_optab = expm1_optab; break;
2047 CASE_FLT_FN (BUILT_IN_LOGB):
2048 errno_set = true; builtin_optab = logb_optab; break;
2049 CASE_FLT_FN (BUILT_IN_LOG):
2050 errno_set = true; builtin_optab = log_optab; break;
2051 CASE_FLT_FN (BUILT_IN_LOG10):
2052 errno_set = true; builtin_optab = log10_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOG2):
2054 errno_set = true; builtin_optab = log2_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG1P):
2056 errno_set = true; builtin_optab = log1p_optab; break;
2057 CASE_FLT_FN (BUILT_IN_ASIN):
2058 builtin_optab = asin_optab; break;
2059 CASE_FLT_FN (BUILT_IN_ACOS):
2060 builtin_optab = acos_optab; break;
2061 CASE_FLT_FN (BUILT_IN_TAN):
2062 builtin_optab = tan_optab; break;
2063 CASE_FLT_FN (BUILT_IN_ATAN):
2064 builtin_optab = atan_optab; break;
2065 CASE_FLT_FN (BUILT_IN_FLOOR):
2066 builtin_optab = floor_optab; break;
2067 CASE_FLT_FN (BUILT_IN_CEIL):
2068 builtin_optab = ceil_optab; break;
2069 CASE_FLT_FN (BUILT_IN_TRUNC):
2070 builtin_optab = btrunc_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ROUND):
2072 builtin_optab = round_optab; break;
2073 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2074 builtin_optab = nearbyint_optab;
2075 if (flag_trapping_math)
2076 break;
2077 /* Else fallthrough and expand as rint. */
2078 CASE_FLT_FN (BUILT_IN_RINT):
2079 builtin_optab = rint_optab; break;
2080 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2081 builtin_optab = significand_optab; break;
2082 default:
2083 gcc_unreachable ();
2084 }
2085
2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
2088
2089 if (! flag_errno_math || ! HONOR_NANS (mode))
2090 errno_set = false;
2091
2092 /* Before working hard, check whether the instruction is available, but try
2093 to widen the mode for specific operations. */
2094 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2095 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2096 && (!errno_set || !optimize_insn_for_size_p ()))
2097 {
2098 rtx result = gen_reg_rtx (mode);
2099
2100 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2101 need to expand the argument again. This way, we will not perform
2102 side-effects more the once. */
2103 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2104
2105 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2106
2107 start_sequence ();
2108
2109 /* Compute into RESULT.
2110 Set RESULT to wherever the result comes back. */
2111 result = expand_unop (mode, builtin_optab, op0, result, 0);
2112
2113 if (result != 0)
2114 {
2115 if (errno_set)
2116 expand_errno_check (exp, result);
2117
2118 /* Output the entire sequence. */
2119 insns = get_insns ();
2120 end_sequence ();
2121 emit_insn (insns);
2122 return result;
2123 }
2124
2125 /* If we were unable to expand via the builtin, stop the sequence
2126 (without outputting the insns) and call to the library function
2127 with the stabilized argument list. */
2128 end_sequence ();
2129 }
2130
2131 return expand_call (exp, target, target == const0_rtx);
2132 }
2133
2134 /* Expand a call to the builtin binary math functions (pow and atan2).
2135 Return NULL_RTX if a normal call should be emitted rather than expanding the
2136 function in-line. EXP is the expression that is a call to the builtin
2137 function; if convenient, the result should be placed in TARGET.
2138 SUBTARGET may be used as the target for computing one of EXP's
2139 operands. */
2140
2141 static rtx
2142 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2143 {
2144 optab builtin_optab;
2145 rtx op0, op1, result;
2146 rtx_insn *insns;
2147 int op1_type = REAL_TYPE;
2148 tree fndecl = get_callee_fndecl (exp);
2149 tree arg0, arg1;
2150 machine_mode mode;
2151 bool errno_set = true;
2152
2153 switch (DECL_FUNCTION_CODE (fndecl))
2154 {
2155 CASE_FLT_FN (BUILT_IN_SCALBN):
2156 CASE_FLT_FN (BUILT_IN_SCALBLN):
2157 CASE_FLT_FN (BUILT_IN_LDEXP):
2158 op1_type = INTEGER_TYPE;
2159 default:
2160 break;
2161 }
2162
2163 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2164 return NULL_RTX;
2165
2166 arg0 = CALL_EXPR_ARG (exp, 0);
2167 arg1 = CALL_EXPR_ARG (exp, 1);
2168
2169 switch (DECL_FUNCTION_CODE (fndecl))
2170 {
2171 CASE_FLT_FN (BUILT_IN_POW):
2172 builtin_optab = pow_optab; break;
2173 CASE_FLT_FN (BUILT_IN_ATAN2):
2174 builtin_optab = atan2_optab; break;
2175 CASE_FLT_FN (BUILT_IN_SCALB):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2177 return 0;
2178 builtin_optab = scalb_optab; break;
2179 CASE_FLT_FN (BUILT_IN_SCALBN):
2180 CASE_FLT_FN (BUILT_IN_SCALBLN):
2181 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2182 return 0;
2183 /* Fall through... */
2184 CASE_FLT_FN (BUILT_IN_LDEXP):
2185 builtin_optab = ldexp_optab; break;
2186 CASE_FLT_FN (BUILT_IN_FMOD):
2187 builtin_optab = fmod_optab; break;
2188 CASE_FLT_FN (BUILT_IN_REMAINDER):
2189 CASE_FLT_FN (BUILT_IN_DREM):
2190 builtin_optab = remainder_optab; break;
2191 default:
2192 gcc_unreachable ();
2193 }
2194
2195 /* Make a suitable register to place result in. */
2196 mode = TYPE_MODE (TREE_TYPE (exp));
2197
2198 /* Before working hard, check whether the instruction is available. */
2199 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2200 return NULL_RTX;
2201
2202 result = gen_reg_rtx (mode);
2203
2204 if (! flag_errno_math || ! HONOR_NANS (mode))
2205 errno_set = false;
2206
2207 if (errno_set && optimize_insn_for_size_p ())
2208 return 0;
2209
2210 /* Always stabilize the argument list. */
2211 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2212 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2213
2214 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2215 op1 = expand_normal (arg1);
2216
2217 start_sequence ();
2218
2219 /* Compute into RESULT.
2220 Set RESULT to wherever the result comes back. */
2221 result = expand_binop (mode, builtin_optab, op0, op1,
2222 result, 0, OPTAB_DIRECT);
2223
2224 /* If we were unable to expand via the builtin, stop the sequence
2225 (without outputting the insns) and call to the library function
2226 with the stabilized argument list. */
2227 if (result == 0)
2228 {
2229 end_sequence ();
2230 return expand_call (exp, target, target == const0_rtx);
2231 }
2232
2233 if (errno_set)
2234 expand_errno_check (exp, result);
2235
2236 /* Output the entire sequence. */
2237 insns = get_insns ();
2238 end_sequence ();
2239 emit_insn (insns);
2240
2241 return result;
2242 }
2243
2244 /* Expand a call to the builtin trinary math functions (fma).
2245 Return NULL_RTX if a normal call should be emitted rather than expanding the
2246 function in-line. EXP is the expression that is a call to the builtin
2247 function; if convenient, the result should be placed in TARGET.
2248 SUBTARGET may be used as the target for computing one of EXP's
2249 operands. */
2250
2251 static rtx
2252 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2253 {
2254 optab builtin_optab;
2255 rtx op0, op1, op2, result;
2256 rtx_insn *insns;
2257 tree fndecl = get_callee_fndecl (exp);
2258 tree arg0, arg1, arg2;
2259 machine_mode mode;
2260
2261 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2262 return NULL_RTX;
2263
2264 arg0 = CALL_EXPR_ARG (exp, 0);
2265 arg1 = CALL_EXPR_ARG (exp, 1);
2266 arg2 = CALL_EXPR_ARG (exp, 2);
2267
2268 switch (DECL_FUNCTION_CODE (fndecl))
2269 {
2270 CASE_FLT_FN (BUILT_IN_FMA):
2271 builtin_optab = fma_optab; break;
2272 default:
2273 gcc_unreachable ();
2274 }
2275
2276 /* Make a suitable register to place result in. */
2277 mode = TYPE_MODE (TREE_TYPE (exp));
2278
2279 /* Before working hard, check whether the instruction is available. */
2280 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2281 return NULL_RTX;
2282
2283 result = gen_reg_rtx (mode);
2284
2285 /* Always stabilize the argument list. */
2286 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2287 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2288 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2289
2290 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2291 op1 = expand_normal (arg1);
2292 op2 = expand_normal (arg2);
2293
2294 start_sequence ();
2295
2296 /* Compute into RESULT.
2297 Set RESULT to wherever the result comes back. */
2298 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2299 result, 0);
2300
2301 /* If we were unable to expand via the builtin, stop the sequence
2302 (without outputting the insns) and call to the library function
2303 with the stabilized argument list. */
2304 if (result == 0)
2305 {
2306 end_sequence ();
2307 return expand_call (exp, target, target == const0_rtx);
2308 }
2309
2310 /* Output the entire sequence. */
2311 insns = get_insns ();
2312 end_sequence ();
2313 emit_insn (insns);
2314
2315 return result;
2316 }
2317
2318 /* Expand a call to the builtin sin and cos math functions.
2319 Return NULL_RTX if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
2321 function; if convenient, the result should be placed in TARGET.
2322 SUBTARGET may be used as the target for computing one of EXP's
2323 operands. */
2324
2325 static rtx
2326 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2327 {
2328 optab builtin_optab;
2329 rtx op0;
2330 rtx_insn *insns;
2331 tree fndecl = get_callee_fndecl (exp);
2332 machine_mode mode;
2333 tree arg;
2334
2335 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2336 return NULL_RTX;
2337
2338 arg = CALL_EXPR_ARG (exp, 0);
2339
2340 switch (DECL_FUNCTION_CODE (fndecl))
2341 {
2342 CASE_FLT_FN (BUILT_IN_SIN):
2343 CASE_FLT_FN (BUILT_IN_COS):
2344 builtin_optab = sincos_optab; break;
2345 default:
2346 gcc_unreachable ();
2347 }
2348
2349 /* Make a suitable register to place result in. */
2350 mode = TYPE_MODE (TREE_TYPE (exp));
2351
2352 /* Check if sincos insn is available, otherwise fallback
2353 to sin or cos insn. */
2354 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2355 switch (DECL_FUNCTION_CODE (fndecl))
2356 {
2357 CASE_FLT_FN (BUILT_IN_SIN):
2358 builtin_optab = sin_optab; break;
2359 CASE_FLT_FN (BUILT_IN_COS):
2360 builtin_optab = cos_optab; break;
2361 default:
2362 gcc_unreachable ();
2363 }
2364
2365 /* Before working hard, check whether the instruction is available. */
2366 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2367 {
2368 rtx result = gen_reg_rtx (mode);
2369
2370 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2371 need to expand the argument again. This way, we will not perform
2372 side-effects more the once. */
2373 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2374
2375 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2376
2377 start_sequence ();
2378
2379 /* Compute into RESULT.
2380 Set RESULT to wherever the result comes back. */
2381 if (builtin_optab == sincos_optab)
2382 {
2383 int ok;
2384
2385 switch (DECL_FUNCTION_CODE (fndecl))
2386 {
2387 CASE_FLT_FN (BUILT_IN_SIN):
2388 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2389 break;
2390 CASE_FLT_FN (BUILT_IN_COS):
2391 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2392 break;
2393 default:
2394 gcc_unreachable ();
2395 }
2396 gcc_assert (ok);
2397 }
2398 else
2399 result = expand_unop (mode, builtin_optab, op0, result, 0);
2400
2401 if (result != 0)
2402 {
2403 /* Output the entire sequence. */
2404 insns = get_insns ();
2405 end_sequence ();
2406 emit_insn (insns);
2407 return result;
2408 }
2409
2410 /* If we were unable to expand via the builtin, stop the sequence
2411 (without outputting the insns) and call to the library function
2412 with the stabilized argument list. */
2413 end_sequence ();
2414 }
2415
2416 return expand_call (exp, target, target == const0_rtx);
2417 }
2418
2419 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2420 return an RTL instruction code that implements the functionality.
2421 If that isn't possible or available return CODE_FOR_nothing. */
2422
2423 static enum insn_code
2424 interclass_mathfn_icode (tree arg, tree fndecl)
2425 {
2426 bool errno_set = false;
2427 optab builtin_optab = unknown_optab;
2428 machine_mode mode;
2429
2430 switch (DECL_FUNCTION_CODE (fndecl))
2431 {
2432 CASE_FLT_FN (BUILT_IN_ILOGB):
2433 errno_set = true; builtin_optab = ilogb_optab; break;
2434 CASE_FLT_FN (BUILT_IN_ISINF):
2435 builtin_optab = isinf_optab; break;
2436 case BUILT_IN_ISNORMAL:
2437 case BUILT_IN_ISFINITE:
2438 CASE_FLT_FN (BUILT_IN_FINITE):
2439 case BUILT_IN_FINITED32:
2440 case BUILT_IN_FINITED64:
2441 case BUILT_IN_FINITED128:
2442 case BUILT_IN_ISINFD32:
2443 case BUILT_IN_ISINFD64:
2444 case BUILT_IN_ISINFD128:
2445 /* These builtins have no optabs (yet). */
2446 break;
2447 default:
2448 gcc_unreachable ();
2449 }
2450
2451 /* There's no easy way to detect the case we need to set EDOM. */
2452 if (flag_errno_math && errno_set)
2453 return CODE_FOR_nothing;
2454
2455 /* Optab mode depends on the mode of the input argument. */
2456 mode = TYPE_MODE (TREE_TYPE (arg));
2457
2458 if (builtin_optab)
2459 return optab_handler (builtin_optab, mode);
2460 return CODE_FOR_nothing;
2461 }
2462
2463 /* Expand a call to one of the builtin math functions that operate on
2464 floating point argument and output an integer result (ilogb, isinf,
2465 isnan, etc).
2466 Return 0 if a normal call should be emitted rather than expanding the
2467 function in-line. EXP is the expression that is a call to the builtin
2468 function; if convenient, the result should be placed in TARGET. */
2469
2470 static rtx
2471 expand_builtin_interclass_mathfn (tree exp, rtx target)
2472 {
2473 enum insn_code icode = CODE_FOR_nothing;
2474 rtx op0;
2475 tree fndecl = get_callee_fndecl (exp);
2476 machine_mode mode;
2477 tree arg;
2478
2479 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2480 return NULL_RTX;
2481
2482 arg = CALL_EXPR_ARG (exp, 0);
2483 icode = interclass_mathfn_icode (arg, fndecl);
2484 mode = TYPE_MODE (TREE_TYPE (arg));
2485
2486 if (icode != CODE_FOR_nothing)
2487 {
2488 struct expand_operand ops[1];
2489 rtx_insn *last = get_last_insn ();
2490 tree orig_arg = arg;
2491
2492 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2493 need to expand the argument again. This way, we will not perform
2494 side-effects more the once. */
2495 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2496
2497 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2498
2499 if (mode != GET_MODE (op0))
2500 op0 = convert_to_mode (mode, op0, 0);
2501
2502 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2503 if (maybe_legitimize_operands (icode, 0, 1, ops)
2504 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2505 return ops[0].value;
2506
2507 delete_insns_since (last);
2508 CALL_EXPR_ARG (exp, 0) = orig_arg;
2509 }
2510
2511 return NULL_RTX;
2512 }
2513
2514 /* Expand a call to the builtin sincos math function.
2515 Return NULL_RTX if a normal call should be emitted rather than expanding the
2516 function in-line. EXP is the expression that is a call to the builtin
2517 function. */
2518
2519 static rtx
2520 expand_builtin_sincos (tree exp)
2521 {
2522 rtx op0, op1, op2, target1, target2;
2523 machine_mode mode;
2524 tree arg, sinp, cosp;
2525 int result;
2526 location_t loc = EXPR_LOCATION (exp);
2527 tree alias_type, alias_off;
2528
2529 if (!validate_arglist (exp, REAL_TYPE,
2530 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2531 return NULL_RTX;
2532
2533 arg = CALL_EXPR_ARG (exp, 0);
2534 sinp = CALL_EXPR_ARG (exp, 1);
2535 cosp = CALL_EXPR_ARG (exp, 2);
2536
2537 /* Make a suitable register to place result in. */
2538 mode = TYPE_MODE (TREE_TYPE (arg));
2539
2540 /* Check if sincos insn is available, otherwise emit the call. */
2541 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2542 return NULL_RTX;
2543
2544 target1 = gen_reg_rtx (mode);
2545 target2 = gen_reg_rtx (mode);
2546
2547 op0 = expand_normal (arg);
2548 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2549 alias_off = build_int_cst (alias_type, 0);
2550 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2551 sinp, alias_off));
2552 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2553 cosp, alias_off));
2554
2555 /* Compute into target1 and target2.
2556 Set TARGET to wherever the result comes back. */
2557 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2558 gcc_assert (result);
2559
2560 /* Move target1 and target2 to the memory locations indicated
2561 by op1 and op2. */
2562 emit_move_insn (op1, target1);
2563 emit_move_insn (op2, target2);
2564
2565 return const0_rtx;
2566 }
2567
2568 /* Expand a call to the internal cexpi builtin to the sincos math function.
2569 EXP is the expression that is a call to the builtin function; if convenient,
2570 the result should be placed in TARGET. */
2571
2572 static rtx
2573 expand_builtin_cexpi (tree exp, rtx target)
2574 {
2575 tree fndecl = get_callee_fndecl (exp);
2576 tree arg, type;
2577 machine_mode mode;
2578 rtx op0, op1, op2;
2579 location_t loc = EXPR_LOCATION (exp);
2580
2581 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2582 return NULL_RTX;
2583
2584 arg = CALL_EXPR_ARG (exp, 0);
2585 type = TREE_TYPE (arg);
2586 mode = TYPE_MODE (TREE_TYPE (arg));
2587
2588 /* Try expanding via a sincos optab, fall back to emitting a libcall
2589 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2590 is only generated from sincos, cexp or if we have either of them. */
2591 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2592 {
2593 op1 = gen_reg_rtx (mode);
2594 op2 = gen_reg_rtx (mode);
2595
2596 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2597
2598 /* Compute into op1 and op2. */
2599 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2600 }
2601 else if (targetm.libc_has_function (function_sincos))
2602 {
2603 tree call, fn = NULL_TREE;
2604 tree top1, top2;
2605 rtx op1a, op2a;
2606
2607 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2608 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2610 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2612 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2613 else
2614 gcc_unreachable ();
2615
2616 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2617 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2618 op1a = copy_addr_to_reg (XEXP (op1, 0));
2619 op2a = copy_addr_to_reg (XEXP (op2, 0));
2620 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2621 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2622
2623 /* Make sure not to fold the sincos call again. */
2624 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2625 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2626 call, 3, arg, top1, top2));
2627 }
2628 else
2629 {
2630 tree call, fn = NULL_TREE, narg;
2631 tree ctype = build_complex_type (type);
2632
2633 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2634 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2635 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2636 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2637 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2638 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2639 else
2640 gcc_unreachable ();
2641
2642 /* If we don't have a decl for cexp create one. This is the
2643 friendliest fallback if the user calls __builtin_cexpi
2644 without full target C99 function support. */
2645 if (fn == NULL_TREE)
2646 {
2647 tree fntype;
2648 const char *name = NULL;
2649
2650 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2651 name = "cexpf";
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2653 name = "cexp";
2654 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2655 name = "cexpl";
2656
2657 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2658 fn = build_fn_decl (name, fntype);
2659 }
2660
2661 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2662 build_real (type, dconst0), arg);
2663
2664 /* Make sure not to fold the cexp call again. */
2665 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2666 return expand_expr (build_call_nary (ctype, call, 1, narg),
2667 target, VOIDmode, EXPAND_NORMAL);
2668 }
2669
2670 /* Now build the proper return type. */
2671 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2672 make_tree (TREE_TYPE (arg), op2),
2673 make_tree (TREE_TYPE (arg), op1)),
2674 target, VOIDmode, EXPAND_NORMAL);
2675 }
2676
2677 /* Conveniently construct a function call expression. FNDECL names the
2678 function to be called, N is the number of arguments, and the "..."
2679 parameters are the argument expressions. Unlike build_call_exr
2680 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2681
2682 static tree
2683 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2684 {
2685 va_list ap;
2686 tree fntype = TREE_TYPE (fndecl);
2687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2688
2689 va_start (ap, n);
2690 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2691 va_end (ap);
2692 SET_EXPR_LOCATION (fn, loc);
2693 return fn;
2694 }
2695
2696 /* Expand a call to one of the builtin rounding functions gcc defines
2697 as an extension (lfloor and lceil). As these are gcc extensions we
2698 do not need to worry about setting errno to EDOM.
2699 If expanding via optab fails, lower expression to (int)(floor(x)).
2700 EXP is the expression that is a call to the builtin function;
2701 if convenient, the result should be placed in TARGET. */
2702
2703 static rtx
2704 expand_builtin_int_roundingfn (tree exp, rtx target)
2705 {
2706 convert_optab builtin_optab;
2707 rtx op0, tmp;
2708 rtx_insn *insns;
2709 tree fndecl = get_callee_fndecl (exp);
2710 enum built_in_function fallback_fn;
2711 tree fallback_fndecl;
2712 machine_mode mode;
2713 tree arg;
2714
2715 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2716 gcc_unreachable ();
2717
2718 arg = CALL_EXPR_ARG (exp, 0);
2719
2720 switch (DECL_FUNCTION_CODE (fndecl))
2721 {
2722 CASE_FLT_FN (BUILT_IN_ICEIL):
2723 CASE_FLT_FN (BUILT_IN_LCEIL):
2724 CASE_FLT_FN (BUILT_IN_LLCEIL):
2725 builtin_optab = lceil_optab;
2726 fallback_fn = BUILT_IN_CEIL;
2727 break;
2728
2729 CASE_FLT_FN (BUILT_IN_IFLOOR):
2730 CASE_FLT_FN (BUILT_IN_LFLOOR):
2731 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2732 builtin_optab = lfloor_optab;
2733 fallback_fn = BUILT_IN_FLOOR;
2734 break;
2735
2736 default:
2737 gcc_unreachable ();
2738 }
2739
2740 /* Make a suitable register to place result in. */
2741 mode = TYPE_MODE (TREE_TYPE (exp));
2742
2743 target = gen_reg_rtx (mode);
2744
2745 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2746 need to expand the argument again. This way, we will not perform
2747 side-effects more the once. */
2748 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2749
2750 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2751
2752 start_sequence ();
2753
2754 /* Compute into TARGET. */
2755 if (expand_sfix_optab (target, op0, builtin_optab))
2756 {
2757 /* Output the entire sequence. */
2758 insns = get_insns ();
2759 end_sequence ();
2760 emit_insn (insns);
2761 return target;
2762 }
2763
2764 /* If we were unable to expand via the builtin, stop the sequence
2765 (without outputting the insns). */
2766 end_sequence ();
2767
2768 /* Fall back to floating point rounding optab. */
2769 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2770
2771 /* For non-C99 targets we may end up without a fallback fndecl here
2772 if the user called __builtin_lfloor directly. In this case emit
2773 a call to the floor/ceil variants nevertheless. This should result
2774 in the best user experience for not full C99 targets. */
2775 if (fallback_fndecl == NULL_TREE)
2776 {
2777 tree fntype;
2778 const char *name = NULL;
2779
2780 switch (DECL_FUNCTION_CODE (fndecl))
2781 {
2782 case BUILT_IN_ICEIL:
2783 case BUILT_IN_LCEIL:
2784 case BUILT_IN_LLCEIL:
2785 name = "ceil";
2786 break;
2787 case BUILT_IN_ICEILF:
2788 case BUILT_IN_LCEILF:
2789 case BUILT_IN_LLCEILF:
2790 name = "ceilf";
2791 break;
2792 case BUILT_IN_ICEILL:
2793 case BUILT_IN_LCEILL:
2794 case BUILT_IN_LLCEILL:
2795 name = "ceill";
2796 break;
2797 case BUILT_IN_IFLOOR:
2798 case BUILT_IN_LFLOOR:
2799 case BUILT_IN_LLFLOOR:
2800 name = "floor";
2801 break;
2802 case BUILT_IN_IFLOORF:
2803 case BUILT_IN_LFLOORF:
2804 case BUILT_IN_LLFLOORF:
2805 name = "floorf";
2806 break;
2807 case BUILT_IN_IFLOORL:
2808 case BUILT_IN_LFLOORL:
2809 case BUILT_IN_LLFLOORL:
2810 name = "floorl";
2811 break;
2812 default:
2813 gcc_unreachable ();
2814 }
2815
2816 fntype = build_function_type_list (TREE_TYPE (arg),
2817 TREE_TYPE (arg), NULL_TREE);
2818 fallback_fndecl = build_fn_decl (name, fntype);
2819 }
2820
2821 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2822
2823 tmp = expand_normal (exp);
2824 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2825
2826 /* Truncate the result of floating point optab to integer
2827 via expand_fix (). */
2828 target = gen_reg_rtx (mode);
2829 expand_fix (target, tmp, 0);
2830
2831 return target;
2832 }
2833
2834 /* Expand a call to one of the builtin math functions doing integer
2835 conversion (lrint).
2836 Return 0 if a normal call should be emitted rather than expanding the
2837 function in-line. EXP is the expression that is a call to the builtin
2838 function; if convenient, the result should be placed in TARGET. */
2839
2840 static rtx
2841 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2842 {
2843 convert_optab builtin_optab;
2844 rtx op0;
2845 rtx_insn *insns;
2846 tree fndecl = get_callee_fndecl (exp);
2847 tree arg;
2848 machine_mode mode;
2849 enum built_in_function fallback_fn = BUILT_IN_NONE;
2850
2851 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2852 gcc_unreachable ();
2853
2854 arg = CALL_EXPR_ARG (exp, 0);
2855
2856 switch (DECL_FUNCTION_CODE (fndecl))
2857 {
2858 CASE_FLT_FN (BUILT_IN_IRINT):
2859 fallback_fn = BUILT_IN_LRINT;
2860 /* FALLTHRU */
2861 CASE_FLT_FN (BUILT_IN_LRINT):
2862 CASE_FLT_FN (BUILT_IN_LLRINT):
2863 builtin_optab = lrint_optab;
2864 break;
2865
2866 CASE_FLT_FN (BUILT_IN_IROUND):
2867 fallback_fn = BUILT_IN_LROUND;
2868 /* FALLTHRU */
2869 CASE_FLT_FN (BUILT_IN_LROUND):
2870 CASE_FLT_FN (BUILT_IN_LLROUND):
2871 builtin_optab = lround_optab;
2872 break;
2873
2874 default:
2875 gcc_unreachable ();
2876 }
2877
2878 /* There's no easy way to detect the case we need to set EDOM. */
2879 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2880 return NULL_RTX;
2881
2882 /* Make a suitable register to place result in. */
2883 mode = TYPE_MODE (TREE_TYPE (exp));
2884
2885 /* There's no easy way to detect the case we need to set EDOM. */
2886 if (!flag_errno_math)
2887 {
2888 rtx result = gen_reg_rtx (mode);
2889
2890 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2891 need to expand the argument again. This way, we will not perform
2892 side-effects more the once. */
2893 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2894
2895 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2896
2897 start_sequence ();
2898
2899 if (expand_sfix_optab (result, op0, builtin_optab))
2900 {
2901 /* Output the entire sequence. */
2902 insns = get_insns ();
2903 end_sequence ();
2904 emit_insn (insns);
2905 return result;
2906 }
2907
2908 /* If we were unable to expand via the builtin, stop the sequence
2909 (without outputting the insns) and call to the library function
2910 with the stabilized argument list. */
2911 end_sequence ();
2912 }
2913
2914 if (fallback_fn != BUILT_IN_NONE)
2915 {
2916 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2917 targets, (int) round (x) should never be transformed into
2918 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2919 a call to lround in the hope that the target provides at least some
2920 C99 functions. This should result in the best user experience for
2921 not full C99 targets. */
2922 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2923 fallback_fn, 0);
2924
2925 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2926 fallback_fndecl, 1, arg);
2927
2928 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2929 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2930 return convert_to_mode (mode, target, 0);
2931 }
2932
2933 return expand_call (exp, target, target == const0_rtx);
2934 }
2935
2936 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2937 a normal call should be emitted rather than expanding the function
2938 in-line. EXP is the expression that is a call to the builtin
2939 function; if convenient, the result should be placed in TARGET. */
2940
2941 static rtx
2942 expand_builtin_powi (tree exp, rtx target)
2943 {
2944 tree arg0, arg1;
2945 rtx op0, op1;
2946 machine_mode mode;
2947 machine_mode mode2;
2948
2949 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2950 return NULL_RTX;
2951
2952 arg0 = CALL_EXPR_ARG (exp, 0);
2953 arg1 = CALL_EXPR_ARG (exp, 1);
2954 mode = TYPE_MODE (TREE_TYPE (exp));
2955
2956 /* Emit a libcall to libgcc. */
2957
2958 /* Mode of the 2nd argument must match that of an int. */
2959 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2960
2961 if (target == NULL_RTX)
2962 target = gen_reg_rtx (mode);
2963
2964 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2965 if (GET_MODE (op0) != mode)
2966 op0 = convert_to_mode (mode, op0, 0);
2967 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2968 if (GET_MODE (op1) != mode2)
2969 op1 = convert_to_mode (mode2, op1, 0);
2970
2971 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2972 target, LCT_CONST, mode, 2,
2973 op0, mode, op1, mode2);
2974
2975 return target;
2976 }
2977
2978 /* Expand expression EXP which is a call to the strlen builtin. Return
2979 NULL_RTX if we failed the caller should emit a normal call, otherwise
2980 try to get the result in TARGET, if convenient. */
2981
2982 static rtx
2983 expand_builtin_strlen (tree exp, rtx target,
2984 machine_mode target_mode)
2985 {
2986 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2987 return NULL_RTX;
2988 else
2989 {
2990 struct expand_operand ops[4];
2991 rtx pat;
2992 tree len;
2993 tree src = CALL_EXPR_ARG (exp, 0);
2994 rtx src_reg;
2995 rtx_insn *before_strlen;
2996 machine_mode insn_mode = target_mode;
2997 enum insn_code icode = CODE_FOR_nothing;
2998 unsigned int align;
2999
3000 /* If the length can be computed at compile-time, return it. */
3001 len = c_strlen (src, 0);
3002 if (len)
3003 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3004
3005 /* If the length can be computed at compile-time and is constant
3006 integer, but there are side-effects in src, evaluate
3007 src for side-effects, then return len.
3008 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3009 can be optimized into: i++; x = 3; */
3010 len = c_strlen (src, 1);
3011 if (len && TREE_CODE (len) == INTEGER_CST)
3012 {
3013 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3015 }
3016
3017 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3018
3019 /* If SRC is not a pointer type, don't do this operation inline. */
3020 if (align == 0)
3021 return NULL_RTX;
3022
3023 /* Bail out if we can't compute strlen in the right mode. */
3024 while (insn_mode != VOIDmode)
3025 {
3026 icode = optab_handler (strlen_optab, insn_mode);
3027 if (icode != CODE_FOR_nothing)
3028 break;
3029
3030 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3031 }
3032 if (insn_mode == VOIDmode)
3033 return NULL_RTX;
3034
3035 /* Make a place to hold the source address. We will not expand
3036 the actual source until we are sure that the expansion will
3037 not fail -- there are trees that cannot be expanded twice. */
3038 src_reg = gen_reg_rtx (Pmode);
3039
3040 /* Mark the beginning of the strlen sequence so we can emit the
3041 source operand later. */
3042 before_strlen = get_last_insn ();
3043
3044 create_output_operand (&ops[0], target, insn_mode);
3045 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3046 create_integer_operand (&ops[2], 0);
3047 create_integer_operand (&ops[3], align);
3048 if (!maybe_expand_insn (icode, 4, ops))
3049 return NULL_RTX;
3050
3051 /* Now that we are assured of success, expand the source. */
3052 start_sequence ();
3053 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3054 if (pat != src_reg)
3055 {
3056 #ifdef POINTERS_EXTEND_UNSIGNED
3057 if (GET_MODE (pat) != Pmode)
3058 pat = convert_to_mode (Pmode, pat,
3059 POINTERS_EXTEND_UNSIGNED);
3060 #endif
3061 emit_move_insn (src_reg, pat);
3062 }
3063 pat = get_insns ();
3064 end_sequence ();
3065
3066 if (before_strlen)
3067 emit_insn_after (pat, before_strlen);
3068 else
3069 emit_insn_before (pat, get_insns ());
3070
3071 /* Return the value in the proper mode for this function. */
3072 if (GET_MODE (ops[0].value) == target_mode)
3073 target = ops[0].value;
3074 else if (target != 0)
3075 convert_move (target, ops[0].value, 0);
3076 else
3077 target = convert_to_mode (target_mode, ops[0].value, 0);
3078
3079 return target;
3080 }
3081 }
3082
3083 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3084 bytes from constant string DATA + OFFSET and return it as target
3085 constant. */
3086
3087 static rtx
3088 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3089 machine_mode mode)
3090 {
3091 const char *str = (const char *) data;
3092
3093 gcc_assert (offset >= 0
3094 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3095 <= strlen (str) + 1));
3096
3097 return c_readstr (str + offset, mode);
3098 }
3099
3100 /* LEN specify length of the block of memcpy/memset operation.
3101 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3102 In some cases we can make very likely guess on max size, then we
3103 set it into PROBABLE_MAX_SIZE. */
3104
3105 static void
3106 determine_block_size (tree len, rtx len_rtx,
3107 unsigned HOST_WIDE_INT *min_size,
3108 unsigned HOST_WIDE_INT *max_size,
3109 unsigned HOST_WIDE_INT *probable_max_size)
3110 {
3111 if (CONST_INT_P (len_rtx))
3112 {
3113 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3114 return;
3115 }
3116 else
3117 {
3118 wide_int min, max;
3119 enum value_range_type range_type = VR_UNDEFINED;
3120
3121 /* Determine bounds from the type. */
3122 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3123 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3124 else
3125 *min_size = 0;
3126 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3127 *probable_max_size = *max_size
3128 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3129 else
3130 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3131
3132 if (TREE_CODE (len) == SSA_NAME)
3133 range_type = get_range_info (len, &min, &max);
3134 if (range_type == VR_RANGE)
3135 {
3136 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3137 *min_size = min.to_uhwi ();
3138 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3139 *probable_max_size = *max_size = max.to_uhwi ();
3140 }
3141 else if (range_type == VR_ANTI_RANGE)
3142 {
3143 /* Anti range 0...N lets us to determine minimal size to N+1. */
3144 if (min == 0)
3145 {
3146 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3147 *min_size = max.to_uhwi () + 1;
3148 }
3149 /* Code like
3150
3151 int n;
3152 if (n < 100)
3153 memcpy (a, b, n)
3154
3155 Produce anti range allowing negative values of N. We still
3156 can use the information and make a guess that N is not negative.
3157 */
3158 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3159 *probable_max_size = min.to_uhwi () - 1;
3160 }
3161 }
3162 gcc_checking_assert (*max_size <=
3163 (unsigned HOST_WIDE_INT)
3164 GET_MODE_MASK (GET_MODE (len_rtx)));
3165 }
3166
3167 /* Helper function to do the actual work for expand_builtin_memcpy. */
3168
3169 static rtx
3170 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3171 {
3172 const char *src_str;
3173 unsigned int src_align = get_pointer_alignment (src);
3174 unsigned int dest_align = get_pointer_alignment (dest);
3175 rtx dest_mem, src_mem, dest_addr, len_rtx;
3176 HOST_WIDE_INT expected_size = -1;
3177 unsigned int expected_align = 0;
3178 unsigned HOST_WIDE_INT min_size;
3179 unsigned HOST_WIDE_INT max_size;
3180 unsigned HOST_WIDE_INT probable_max_size;
3181
3182 /* If DEST is not a pointer type, call the normal function. */
3183 if (dest_align == 0)
3184 return NULL_RTX;
3185
3186 /* If either SRC is not a pointer type, don't do this
3187 operation in-line. */
3188 if (src_align == 0)
3189 return NULL_RTX;
3190
3191 if (currently_expanding_gimple_stmt)
3192 stringop_block_profile (currently_expanding_gimple_stmt,
3193 &expected_align, &expected_size);
3194
3195 if (expected_align < dest_align)
3196 expected_align = dest_align;
3197 dest_mem = get_memory_rtx (dest, len);
3198 set_mem_align (dest_mem, dest_align);
3199 len_rtx = expand_normal (len);
3200 determine_block_size (len, len_rtx, &min_size, &max_size,
3201 &probable_max_size);
3202 src_str = c_getstr (src);
3203
3204 /* If SRC is a string constant and block move would be done
3205 by pieces, we can avoid loading the string from memory
3206 and only stored the computed constants. */
3207 if (src_str
3208 && CONST_INT_P (len_rtx)
3209 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3210 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3211 CONST_CAST (char *, src_str),
3212 dest_align, false))
3213 {
3214 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3215 builtin_memcpy_read_str,
3216 CONST_CAST (char *, src_str),
3217 dest_align, false, 0);
3218 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3219 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3220 return dest_mem;
3221 }
3222
3223 src_mem = get_memory_rtx (src, len);
3224 set_mem_align (src_mem, src_align);
3225
3226 /* Copy word part most expediently. */
3227 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3228 CALL_EXPR_TAILCALL (exp)
3229 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3230 expected_align, expected_size,
3231 min_size, max_size, probable_max_size);
3232
3233 if (dest_addr == 0)
3234 {
3235 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3236 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3237 }
3238
3239 return dest_addr;
3240 }
3241
3242 /* Expand a call EXP to the memcpy builtin.
3243 Return NULL_RTX if we failed, the caller should emit a normal call,
3244 otherwise try to get the result in TARGET, if convenient (and in
3245 mode MODE if that's convenient). */
3246
3247 static rtx
3248 expand_builtin_memcpy (tree exp, rtx target)
3249 {
3250 if (!validate_arglist (exp,
3251 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3252 return NULL_RTX;
3253 else
3254 {
3255 tree dest = CALL_EXPR_ARG (exp, 0);
3256 tree src = CALL_EXPR_ARG (exp, 1);
3257 tree len = CALL_EXPR_ARG (exp, 2);
3258 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3259 }
3260 }
3261
3262 /* Expand an instrumented call EXP to the memcpy builtin.
3263 Return NULL_RTX if we failed, the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). */
3266
3267 static rtx
3268 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3269 {
3270 if (!validate_arglist (exp,
3271 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3272 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3273 INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 2);
3279 tree len = CALL_EXPR_ARG (exp, 4);
3280 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3281
3282 /* Return src bounds with the result. */
3283 if (res)
3284 {
3285 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3286 expand_normal (CALL_EXPR_ARG (exp, 1)));
3287 res = chkp_join_splitted_slot (res, bnd);
3288 }
3289 return res;
3290 }
3291 }
3292
3293 /* Expand a call EXP to the mempcpy builtin.
3294 Return NULL_RTX if we failed; the caller should emit a normal call,
3295 otherwise try to get the result in TARGET, if convenient (and in
3296 mode MODE if that's convenient). If ENDP is 0 return the
3297 destination pointer, if ENDP is 1 return the end pointer ala
3298 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3299 stpcpy. */
3300
3301 static rtx
3302 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3303 {
3304 if (!validate_arglist (exp,
3305 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3306 return NULL_RTX;
3307 else
3308 {
3309 tree dest = CALL_EXPR_ARG (exp, 0);
3310 tree src = CALL_EXPR_ARG (exp, 1);
3311 tree len = CALL_EXPR_ARG (exp, 2);
3312 return expand_builtin_mempcpy_args (dest, src, len,
3313 target, mode, /*endp=*/ 1,
3314 exp);
3315 }
3316 }
3317
3318 /* Expand an instrumented call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed, the caller should emit a normal call,
3320 otherwise try to get the result in TARGET, if convenient (and in
3321 mode MODE if that's convenient). */
3322
3323 static rtx
3324 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3325 {
3326 if (!validate_arglist (exp,
3327 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3328 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3329 INTEGER_TYPE, VOID_TYPE))
3330 return NULL_RTX;
3331 else
3332 {
3333 tree dest = CALL_EXPR_ARG (exp, 0);
3334 tree src = CALL_EXPR_ARG (exp, 2);
3335 tree len = CALL_EXPR_ARG (exp, 4);
3336 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3337 mode, 1, exp);
3338
3339 /* Return src bounds with the result. */
3340 if (res)
3341 {
3342 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3343 expand_normal (CALL_EXPR_ARG (exp, 1)));
3344 res = chkp_join_splitted_slot (res, bnd);
3345 }
3346 return res;
3347 }
3348 }
3349
3350 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3351 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3352 so that this can also be called without constructing an actual CALL_EXPR.
3353 The other arguments and return value are the same as for
3354 expand_builtin_mempcpy. */
3355
3356 static rtx
3357 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3358 rtx target, machine_mode mode, int endp,
3359 tree orig_exp)
3360 {
3361 tree fndecl = get_callee_fndecl (orig_exp);
3362
3363 /* If return value is ignored, transform mempcpy into memcpy. */
3364 if (target == const0_rtx
3365 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3366 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3367 {
3368 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3369 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3370 dest, src, len);
3371 return expand_expr (result, target, mode, EXPAND_NORMAL);
3372 }
3373 else if (target == const0_rtx
3374 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3375 {
3376 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3377 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3378 dest, src, len);
3379 return expand_expr (result, target, mode, EXPAND_NORMAL);
3380 }
3381 else
3382 {
3383 const char *src_str;
3384 unsigned int src_align = get_pointer_alignment (src);
3385 unsigned int dest_align = get_pointer_alignment (dest);
3386 rtx dest_mem, src_mem, len_rtx;
3387
3388 /* If either SRC or DEST is not a pointer type, don't do this
3389 operation in-line. */
3390 if (dest_align == 0 || src_align == 0)
3391 return NULL_RTX;
3392
3393 /* If LEN is not constant, call the normal function. */
3394 if (! tree_fits_uhwi_p (len))
3395 return NULL_RTX;
3396
3397 len_rtx = expand_normal (len);
3398 src_str = c_getstr (src);
3399
3400 /* If SRC is a string constant and block move would be done
3401 by pieces, we can avoid loading the string from memory
3402 and only stored the computed constants. */
3403 if (src_str
3404 && CONST_INT_P (len_rtx)
3405 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3406 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3407 CONST_CAST (char *, src_str),
3408 dest_align, false))
3409 {
3410 dest_mem = get_memory_rtx (dest, len);
3411 set_mem_align (dest_mem, dest_align);
3412 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3413 builtin_memcpy_read_str,
3414 CONST_CAST (char *, src_str),
3415 dest_align, false, endp);
3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 return dest_mem;
3419 }
3420
3421 if (CONST_INT_P (len_rtx)
3422 && can_move_by_pieces (INTVAL (len_rtx),
3423 MIN (dest_align, src_align)))
3424 {
3425 dest_mem = get_memory_rtx (dest, len);
3426 set_mem_align (dest_mem, dest_align);
3427 src_mem = get_memory_rtx (src, len);
3428 set_mem_align (src_mem, src_align);
3429 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3430 MIN (dest_align, src_align), endp);
3431 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3432 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3433 return dest_mem;
3434 }
3435
3436 return NULL_RTX;
3437 }
3438 }
3439
3440 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3441 we failed, the caller should emit a normal call, otherwise try to
3442 get the result in TARGET, if convenient. If ENDP is 0 return the
3443 destination pointer, if ENDP is 1 return the end pointer ala
3444 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3445 stpcpy. */
3446
3447 static rtx
3448 expand_movstr (tree dest, tree src, rtx target, int endp)
3449 {
3450 struct expand_operand ops[3];
3451 rtx dest_mem;
3452 rtx src_mem;
3453
3454 if (!targetm.have_movstr ())
3455 return NULL_RTX;
3456
3457 dest_mem = get_memory_rtx (dest, NULL);
3458 src_mem = get_memory_rtx (src, NULL);
3459 if (!endp)
3460 {
3461 target = force_reg (Pmode, XEXP (dest_mem, 0));
3462 dest_mem = replace_equiv_address (dest_mem, target);
3463 }
3464
3465 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3466 create_fixed_operand (&ops[1], dest_mem);
3467 create_fixed_operand (&ops[2], src_mem);
3468 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3469 return NULL_RTX;
3470
3471 if (endp && target != const0_rtx)
3472 {
3473 target = ops[0].value;
3474 /* movstr is supposed to set end to the address of the NUL
3475 terminator. If the caller requested a mempcpy-like return value,
3476 adjust it. */
3477 if (endp == 1)
3478 {
3479 rtx tem = plus_constant (GET_MODE (target),
3480 gen_lowpart (GET_MODE (target), target), 1);
3481 emit_move_insn (target, force_operand (tem, NULL_RTX));
3482 }
3483 }
3484 return target;
3485 }
3486
3487 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3488 NULL_RTX if we failed the caller should emit a normal call, otherwise
3489 try to get the result in TARGET, if convenient (and in mode MODE if that's
3490 convenient). */
3491
3492 static rtx
3493 expand_builtin_strcpy (tree exp, rtx target)
3494 {
3495 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3496 {
3497 tree dest = CALL_EXPR_ARG (exp, 0);
3498 tree src = CALL_EXPR_ARG (exp, 1);
3499 return expand_builtin_strcpy_args (dest, src, target);
3500 }
3501 return NULL_RTX;
3502 }
3503
3504 /* Helper function to do the actual work for expand_builtin_strcpy. The
3505 arguments to the builtin_strcpy call DEST and SRC are broken out
3506 so that this can also be called without constructing an actual CALL_EXPR.
3507 The other arguments and return value are the same as for
3508 expand_builtin_strcpy. */
3509
3510 static rtx
3511 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3512 {
3513 return expand_movstr (dest, src, target, /*endp=*/0);
3514 }
3515
3516 /* Expand a call EXP to the stpcpy builtin.
3517 Return NULL_RTX if we failed the caller should emit a normal call,
3518 otherwise try to get the result in TARGET, if convenient (and in
3519 mode MODE if that's convenient). */
3520
3521 static rtx
3522 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3523 {
3524 tree dst, src;
3525 location_t loc = EXPR_LOCATION (exp);
3526
3527 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3528 return NULL_RTX;
3529
3530 dst = CALL_EXPR_ARG (exp, 0);
3531 src = CALL_EXPR_ARG (exp, 1);
3532
3533 /* If return value is ignored, transform stpcpy into strcpy. */
3534 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3535 {
3536 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3537 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3538 return expand_expr (result, target, mode, EXPAND_NORMAL);
3539 }
3540 else
3541 {
3542 tree len, lenp1;
3543 rtx ret;
3544
3545 /* Ensure we get an actual string whose length can be evaluated at
3546 compile-time, not an expression containing a string. This is
3547 because the latter will potentially produce pessimized code
3548 when used to produce the return value. */
3549 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3550 return expand_movstr (dst, src, target, /*endp=*/2);
3551
3552 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3553 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3554 target, mode, /*endp=*/2,
3555 exp);
3556
3557 if (ret)
3558 return ret;
3559
3560 if (TREE_CODE (len) == INTEGER_CST)
3561 {
3562 rtx len_rtx = expand_normal (len);
3563
3564 if (CONST_INT_P (len_rtx))
3565 {
3566 ret = expand_builtin_strcpy_args (dst, src, target);
3567
3568 if (ret)
3569 {
3570 if (! target)
3571 {
3572 if (mode != VOIDmode)
3573 target = gen_reg_rtx (mode);
3574 else
3575 target = gen_reg_rtx (GET_MODE (ret));
3576 }
3577 if (GET_MODE (target) != GET_MODE (ret))
3578 ret = gen_lowpart (GET_MODE (target), ret);
3579
3580 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3581 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3582 gcc_assert (ret);
3583
3584 return target;
3585 }
3586 }
3587 }
3588
3589 return expand_movstr (dst, src, target, /*endp=*/2);
3590 }
3591 }
3592
3593 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3594 bytes from constant string DATA + OFFSET and return it as target
3595 constant. */
3596
3597 rtx
3598 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3599 machine_mode mode)
3600 {
3601 const char *str = (const char *) data;
3602
3603 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3604 return const0_rtx;
3605
3606 return c_readstr (str + offset, mode);
3607 }
3608
3609 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3610 NULL_RTX if we failed the caller should emit a normal call. */
3611
3612 static rtx
3613 expand_builtin_strncpy (tree exp, rtx target)
3614 {
3615 location_t loc = EXPR_LOCATION (exp);
3616
3617 if (validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 {
3620 tree dest = CALL_EXPR_ARG (exp, 0);
3621 tree src = CALL_EXPR_ARG (exp, 1);
3622 tree len = CALL_EXPR_ARG (exp, 2);
3623 tree slen = c_strlen (src, 1);
3624
3625 /* We must be passed a constant len and src parameter. */
3626 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3627 return NULL_RTX;
3628
3629 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3630
3631 /* We're required to pad with trailing zeros if the requested
3632 len is greater than strlen(s2)+1. In that case try to
3633 use store_by_pieces, if it fails, punt. */
3634 if (tree_int_cst_lt (slen, len))
3635 {
3636 unsigned int dest_align = get_pointer_alignment (dest);
3637 const char *p = c_getstr (src);
3638 rtx dest_mem;
3639
3640 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3641 || !can_store_by_pieces (tree_to_uhwi (len),
3642 builtin_strncpy_read_str,
3643 CONST_CAST (char *, p),
3644 dest_align, false))
3645 return NULL_RTX;
3646
3647 dest_mem = get_memory_rtx (dest, len);
3648 store_by_pieces (dest_mem, tree_to_uhwi (len),
3649 builtin_strncpy_read_str,
3650 CONST_CAST (char *, p), dest_align, false, 0);
3651 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3652 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3653 return dest_mem;
3654 }
3655 }
3656 return NULL_RTX;
3657 }
3658
3659 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3660 bytes from constant string DATA + OFFSET and return it as target
3661 constant. */
3662
3663 rtx
3664 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3665 machine_mode mode)
3666 {
3667 const char *c = (const char *) data;
3668 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3669
3670 memset (p, *c, GET_MODE_SIZE (mode));
3671
3672 return c_readstr (p, mode);
3673 }
3674
3675 /* Callback routine for store_by_pieces. Return the RTL of a register
3676 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3677 char value given in the RTL register data. For example, if mode is
3678 4 bytes wide, return the RTL for 0x01010101*data. */
3679
3680 static rtx
3681 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3682 machine_mode mode)
3683 {
3684 rtx target, coeff;
3685 size_t size;
3686 char *p;
3687
3688 size = GET_MODE_SIZE (mode);
3689 if (size == 1)
3690 return (rtx) data;
3691
3692 p = XALLOCAVEC (char, size);
3693 memset (p, 1, size);
3694 coeff = c_readstr (p, mode);
3695
3696 target = convert_to_mode (mode, (rtx) data, 1);
3697 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3698 return force_reg (mode, target);
3699 }
3700
3701 /* Expand expression EXP, which is a call to the memset builtin. Return
3702 NULL_RTX if we failed the caller should emit a normal call, otherwise
3703 try to get the result in TARGET, if convenient (and in mode MODE if that's
3704 convenient). */
3705
3706 static rtx
3707 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3708 {
3709 if (!validate_arglist (exp,
3710 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3711 return NULL_RTX;
3712 else
3713 {
3714 tree dest = CALL_EXPR_ARG (exp, 0);
3715 tree val = CALL_EXPR_ARG (exp, 1);
3716 tree len = CALL_EXPR_ARG (exp, 2);
3717 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3718 }
3719 }
3720
3721 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3722 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3725
3726 static rtx
3727 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3728 {
3729 if (!validate_arglist (exp,
3730 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3731 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3732 return NULL_RTX;
3733 else
3734 {
3735 tree dest = CALL_EXPR_ARG (exp, 0);
3736 tree val = CALL_EXPR_ARG (exp, 2);
3737 tree len = CALL_EXPR_ARG (exp, 3);
3738 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3739
3740 /* Return src bounds with the result. */
3741 if (res)
3742 {
3743 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3744 expand_normal (CALL_EXPR_ARG (exp, 1)));
3745 res = chkp_join_splitted_slot (res, bnd);
3746 }
3747 return res;
3748 }
3749 }
3750
3751 /* Helper function to do the actual work for expand_builtin_memset. The
3752 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3753 so that this can also be called without constructing an actual CALL_EXPR.
3754 The other arguments and return value are the same as for
3755 expand_builtin_memset. */
3756
3757 static rtx
3758 expand_builtin_memset_args (tree dest, tree val, tree len,
3759 rtx target, machine_mode mode, tree orig_exp)
3760 {
3761 tree fndecl, fn;
3762 enum built_in_function fcode;
3763 machine_mode val_mode;
3764 char c;
3765 unsigned int dest_align;
3766 rtx dest_mem, dest_addr, len_rtx;
3767 HOST_WIDE_INT expected_size = -1;
3768 unsigned int expected_align = 0;
3769 unsigned HOST_WIDE_INT min_size;
3770 unsigned HOST_WIDE_INT max_size;
3771 unsigned HOST_WIDE_INT probable_max_size;
3772
3773 dest_align = get_pointer_alignment (dest);
3774
3775 /* If DEST is not a pointer type, don't do this operation in-line. */
3776 if (dest_align == 0)
3777 return NULL_RTX;
3778
3779 if (currently_expanding_gimple_stmt)
3780 stringop_block_profile (currently_expanding_gimple_stmt,
3781 &expected_align, &expected_size);
3782
3783 if (expected_align < dest_align)
3784 expected_align = dest_align;
3785
3786 /* If the LEN parameter is zero, return DEST. */
3787 if (integer_zerop (len))
3788 {
3789 /* Evaluate and ignore VAL in case it has side-effects. */
3790 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3791 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3792 }
3793
3794 /* Stabilize the arguments in case we fail. */
3795 dest = builtin_save_expr (dest);
3796 val = builtin_save_expr (val);
3797 len = builtin_save_expr (len);
3798
3799 len_rtx = expand_normal (len);
3800 determine_block_size (len, len_rtx, &min_size, &max_size,
3801 &probable_max_size);
3802 dest_mem = get_memory_rtx (dest, len);
3803 val_mode = TYPE_MODE (unsigned_char_type_node);
3804
3805 if (TREE_CODE (val) != INTEGER_CST)
3806 {
3807 rtx val_rtx;
3808
3809 val_rtx = expand_normal (val);
3810 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3811
3812 /* Assume that we can memset by pieces if we can store
3813 * the coefficients by pieces (in the required modes).
3814 * We can't pass builtin_memset_gen_str as that emits RTL. */
3815 c = 1;
3816 if (tree_fits_uhwi_p (len)
3817 && can_store_by_pieces (tree_to_uhwi (len),
3818 builtin_memset_read_str, &c, dest_align,
3819 true))
3820 {
3821 val_rtx = force_reg (val_mode, val_rtx);
3822 store_by_pieces (dest_mem, tree_to_uhwi (len),
3823 builtin_memset_gen_str, val_rtx, dest_align,
3824 true, 0);
3825 }
3826 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3827 dest_align, expected_align,
3828 expected_size, min_size, max_size,
3829 probable_max_size))
3830 goto do_libcall;
3831
3832 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3833 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3834 return dest_mem;
3835 }
3836
3837 if (target_char_cast (val, &c))
3838 goto do_libcall;
3839
3840 if (c)
3841 {
3842 if (tree_fits_uhwi_p (len)
3843 && can_store_by_pieces (tree_to_uhwi (len),
3844 builtin_memset_read_str, &c, dest_align,
3845 true))
3846 store_by_pieces (dest_mem, tree_to_uhwi (len),
3847 builtin_memset_read_str, &c, dest_align, true, 0);
3848 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3849 gen_int_mode (c, val_mode),
3850 dest_align, expected_align,
3851 expected_size, min_size, max_size,
3852 probable_max_size))
3853 goto do_libcall;
3854
3855 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3856 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3857 return dest_mem;
3858 }
3859
3860 set_mem_align (dest_mem, dest_align);
3861 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3862 CALL_EXPR_TAILCALL (orig_exp)
3863 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3864 expected_align, expected_size,
3865 min_size, max_size,
3866 probable_max_size);
3867
3868 if (dest_addr == 0)
3869 {
3870 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3871 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3872 }
3873
3874 return dest_addr;
3875
3876 do_libcall:
3877 fndecl = get_callee_fndecl (orig_exp);
3878 fcode = DECL_FUNCTION_CODE (fndecl);
3879 if (fcode == BUILT_IN_MEMSET
3880 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3881 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3882 dest, val, len);
3883 else if (fcode == BUILT_IN_BZERO)
3884 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3885 dest, len);
3886 else
3887 gcc_unreachable ();
3888 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3889 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3890 return expand_call (fn, target, target == const0_rtx);
3891 }
3892
3893 /* Expand expression EXP, which is a call to the bzero builtin. Return
3894 NULL_RTX if we failed the caller should emit a normal call. */
3895
3896 static rtx
3897 expand_builtin_bzero (tree exp)
3898 {
3899 tree dest, size;
3900 location_t loc = EXPR_LOCATION (exp);
3901
3902 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3903 return NULL_RTX;
3904
3905 dest = CALL_EXPR_ARG (exp, 0);
3906 size = CALL_EXPR_ARG (exp, 1);
3907
3908 /* New argument list transforming bzero(ptr x, int y) to
3909 memset(ptr x, int 0, size_t y). This is done this way
3910 so that if it isn't expanded inline, we fallback to
3911 calling bzero instead of memset. */
3912
3913 return expand_builtin_memset_args (dest, integer_zero_node,
3914 fold_convert_loc (loc,
3915 size_type_node, size),
3916 const0_rtx, VOIDmode, exp);
3917 }
3918
3919 /* Try to expand cmpstr operation ICODE with the given operands.
3920 Return the result rtx on success, otherwise return null. */
3921
3922 static rtx
3923 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3924 HOST_WIDE_INT align)
3925 {
3926 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3927
3928 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3929 target = NULL_RTX;
3930
3931 struct expand_operand ops[4];
3932 create_output_operand (&ops[0], target, insn_mode);
3933 create_fixed_operand (&ops[1], arg1_rtx);
3934 create_fixed_operand (&ops[2], arg2_rtx);
3935 create_integer_operand (&ops[3], align);
3936 if (maybe_expand_insn (icode, 4, ops))
3937 return ops[0].value;
3938 return NULL_RTX;
3939 }
3940
3941 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3942 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3943 otherwise return null. */
3944
3945 static rtx
3946 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3947 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3948 HOST_WIDE_INT align)
3949 {
3950 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3951
3952 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3953 target = NULL_RTX;
3954
3955 struct expand_operand ops[5];
3956 create_output_operand (&ops[0], target, insn_mode);
3957 create_fixed_operand (&ops[1], arg1_rtx);
3958 create_fixed_operand (&ops[2], arg2_rtx);
3959 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3960 TYPE_UNSIGNED (arg3_type));
3961 create_integer_operand (&ops[4], align);
3962 if (maybe_expand_insn (icode, 5, ops))
3963 return ops[0].value;
3964 return NULL_RTX;
3965 }
3966
3967 /* Expand expression EXP, which is a call to the memcmp built-in function.
3968 Return NULL_RTX if we failed and the caller should emit a normal call,
3969 otherwise try to get the result in TARGET, if convenient. */
3970
3971 static rtx
3972 expand_builtin_memcmp (tree exp, rtx target)
3973 {
3974 if (!validate_arglist (exp,
3975 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3976 return NULL_RTX;
3977
3978 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3979 implementing memcmp because it will stop if it encounters two
3980 zero bytes. */
3981 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3982 if (icode == CODE_FOR_nothing)
3983 return NULL_RTX;
3984
3985 tree arg1 = CALL_EXPR_ARG (exp, 0);
3986 tree arg2 = CALL_EXPR_ARG (exp, 1);
3987 tree len = CALL_EXPR_ARG (exp, 2);
3988
3989 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3990 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3991
3992 /* If we don't have POINTER_TYPE, call the function. */
3993 if (arg1_align == 0 || arg2_align == 0)
3994 return NULL_RTX;
3995
3996 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3997 location_t loc = EXPR_LOCATION (exp);
3998 rtx arg1_rtx = get_memory_rtx (arg1, len);
3999 rtx arg2_rtx = get_memory_rtx (arg2, len);
4000 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4001
4002 /* Set MEM_SIZE as appropriate. */
4003 if (CONST_INT_P (arg3_rtx))
4004 {
4005 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4006 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4007 }
4008
4009 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
4010 TREE_TYPE (len), arg3_rtx,
4011 MIN (arg1_align, arg2_align));
4012 if (result)
4013 {
4014 /* Return the value in the proper mode for this function. */
4015 if (GET_MODE (result) == mode)
4016 return result;
4017
4018 if (target != 0)
4019 {
4020 convert_move (target, result, 0);
4021 return target;
4022 }
4023
4024 return convert_to_mode (mode, result, 0);
4025 }
4026
4027 result = target;
4028 if (! (result != 0
4029 && REG_P (result) && GET_MODE (result) == mode
4030 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4031 result = gen_reg_rtx (mode);
4032
4033 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4034 TYPE_MODE (integer_type_node), 3,
4035 XEXP (arg1_rtx, 0), Pmode,
4036 XEXP (arg2_rtx, 0), Pmode,
4037 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4038 TYPE_UNSIGNED (sizetype)),
4039 TYPE_MODE (sizetype));
4040 return result;
4041 }
4042
4043 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4044 if we failed the caller should emit a normal call, otherwise try to get
4045 the result in TARGET, if convenient. */
4046
4047 static rtx
4048 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4049 {
4050 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
4052
4053 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4054 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4055 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4056 {
4057 rtx arg1_rtx, arg2_rtx;
4058 tree fndecl, fn;
4059 tree arg1 = CALL_EXPR_ARG (exp, 0);
4060 tree arg2 = CALL_EXPR_ARG (exp, 1);
4061 rtx result = NULL_RTX;
4062
4063 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4064 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4065
4066 /* If we don't have POINTER_TYPE, call the function. */
4067 if (arg1_align == 0 || arg2_align == 0)
4068 return NULL_RTX;
4069
4070 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4071 arg1 = builtin_save_expr (arg1);
4072 arg2 = builtin_save_expr (arg2);
4073
4074 arg1_rtx = get_memory_rtx (arg1, NULL);
4075 arg2_rtx = get_memory_rtx (arg2, NULL);
4076
4077 /* Try to call cmpstrsi. */
4078 if (cmpstr_icode != CODE_FOR_nothing)
4079 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4080 MIN (arg1_align, arg2_align));
4081
4082 /* Try to determine at least one length and call cmpstrnsi. */
4083 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4084 {
4085 tree len;
4086 rtx arg3_rtx;
4087
4088 tree len1 = c_strlen (arg1, 1);
4089 tree len2 = c_strlen (arg2, 1);
4090
4091 if (len1)
4092 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4093 if (len2)
4094 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4095
4096 /* If we don't have a constant length for the first, use the length
4097 of the second, if we know it. We don't require a constant for
4098 this case; some cost analysis could be done if both are available
4099 but neither is constant. For now, assume they're equally cheap,
4100 unless one has side effects. If both strings have constant lengths,
4101 use the smaller. */
4102
4103 if (!len1)
4104 len = len2;
4105 else if (!len2)
4106 len = len1;
4107 else if (TREE_SIDE_EFFECTS (len1))
4108 len = len2;
4109 else if (TREE_SIDE_EFFECTS (len2))
4110 len = len1;
4111 else if (TREE_CODE (len1) != INTEGER_CST)
4112 len = len2;
4113 else if (TREE_CODE (len2) != INTEGER_CST)
4114 len = len1;
4115 else if (tree_int_cst_lt (len1, len2))
4116 len = len1;
4117 else
4118 len = len2;
4119
4120 /* If both arguments have side effects, we cannot optimize. */
4121 if (len && !TREE_SIDE_EFFECTS (len))
4122 {
4123 arg3_rtx = expand_normal (len);
4124 result = expand_cmpstrn_or_cmpmem
4125 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4126 arg3_rtx, MIN (arg1_align, arg2_align));
4127 }
4128 }
4129
4130 if (result)
4131 {
4132 /* Return the value in the proper mode for this function. */
4133 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4134 if (GET_MODE (result) == mode)
4135 return result;
4136 if (target == 0)
4137 return convert_to_mode (mode, result, 0);
4138 convert_move (target, result, 0);
4139 return target;
4140 }
4141
4142 /* Expand the library call ourselves using a stabilized argument
4143 list to avoid re-evaluating the function's arguments twice. */
4144 fndecl = get_callee_fndecl (exp);
4145 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4146 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4147 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4148 return expand_call (fn, target, target == const0_rtx);
4149 }
4150 return NULL_RTX;
4151 }
4152
4153 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4154 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4155 the result in TARGET, if convenient. */
4156
4157 static rtx
4158 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4159 ATTRIBUTE_UNUSED machine_mode mode)
4160 {
4161 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4162
4163 if (!validate_arglist (exp,
4164 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4165 return NULL_RTX;
4166
4167 /* If c_strlen can determine an expression for one of the string
4168 lengths, and it doesn't have side effects, then emit cmpstrnsi
4169 using length MIN(strlen(string)+1, arg3). */
4170 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4171 if (cmpstrn_icode != CODE_FOR_nothing)
4172 {
4173 tree len, len1, len2;
4174 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4175 rtx result;
4176 tree fndecl, fn;
4177 tree arg1 = CALL_EXPR_ARG (exp, 0);
4178 tree arg2 = CALL_EXPR_ARG (exp, 1);
4179 tree arg3 = CALL_EXPR_ARG (exp, 2);
4180
4181 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4182 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4183
4184 len1 = c_strlen (arg1, 1);
4185 len2 = c_strlen (arg2, 1);
4186
4187 if (len1)
4188 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4189 if (len2)
4190 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4191
4192 /* If we don't have a constant length for the first, use the length
4193 of the second, if we know it. We don't require a constant for
4194 this case; some cost analysis could be done if both are available
4195 but neither is constant. For now, assume they're equally cheap,
4196 unless one has side effects. If both strings have constant lengths,
4197 use the smaller. */
4198
4199 if (!len1)
4200 len = len2;
4201 else if (!len2)
4202 len = len1;
4203 else if (TREE_SIDE_EFFECTS (len1))
4204 len = len2;
4205 else if (TREE_SIDE_EFFECTS (len2))
4206 len = len1;
4207 else if (TREE_CODE (len1) != INTEGER_CST)
4208 len = len2;
4209 else if (TREE_CODE (len2) != INTEGER_CST)
4210 len = len1;
4211 else if (tree_int_cst_lt (len1, len2))
4212 len = len1;
4213 else
4214 len = len2;
4215
4216 /* If both arguments have side effects, we cannot optimize. */
4217 if (!len || TREE_SIDE_EFFECTS (len))
4218 return NULL_RTX;
4219
4220 /* The actual new length parameter is MIN(len,arg3). */
4221 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4222 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4223
4224 /* If we don't have POINTER_TYPE, call the function. */
4225 if (arg1_align == 0 || arg2_align == 0)
4226 return NULL_RTX;
4227
4228 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4229 arg1 = builtin_save_expr (arg1);
4230 arg2 = builtin_save_expr (arg2);
4231 len = builtin_save_expr (len);
4232
4233 arg1_rtx = get_memory_rtx (arg1, len);
4234 arg2_rtx = get_memory_rtx (arg2, len);
4235 arg3_rtx = expand_normal (len);
4236 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4237 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4238 MIN (arg1_align, arg2_align));
4239 if (result)
4240 {
4241 /* Return the value in the proper mode for this function. */
4242 mode = TYPE_MODE (TREE_TYPE (exp));
4243 if (GET_MODE (result) == mode)
4244 return result;
4245 if (target == 0)
4246 return convert_to_mode (mode, result, 0);
4247 convert_move (target, result, 0);
4248 return target;
4249 }
4250
4251 /* Expand the library call ourselves using a stabilized argument
4252 list to avoid re-evaluating the function's arguments twice. */
4253 fndecl = get_callee_fndecl (exp);
4254 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4255 arg1, arg2, len);
4256 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4257 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4258 return expand_call (fn, target, target == const0_rtx);
4259 }
4260 return NULL_RTX;
4261 }
4262
4263 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4264 if that's convenient. */
4265
4266 rtx
4267 expand_builtin_saveregs (void)
4268 {
4269 rtx val;
4270 rtx_insn *seq;
4271
4272 /* Don't do __builtin_saveregs more than once in a function.
4273 Save the result of the first call and reuse it. */
4274 if (saveregs_value != 0)
4275 return saveregs_value;
4276
4277 /* When this function is called, it means that registers must be
4278 saved on entry to this function. So we migrate the call to the
4279 first insn of this function. */
4280
4281 start_sequence ();
4282
4283 /* Do whatever the machine needs done in this case. */
4284 val = targetm.calls.expand_builtin_saveregs ();
4285
4286 seq = get_insns ();
4287 end_sequence ();
4288
4289 saveregs_value = val;
4290
4291 /* Put the insns after the NOTE that starts the function. If this
4292 is inside a start_sequence, make the outer-level insn chain current, so
4293 the code is placed at the start of the function. */
4294 push_topmost_sequence ();
4295 emit_insn_after (seq, entry_of_function ());
4296 pop_topmost_sequence ();
4297
4298 return val;
4299 }
4300
4301 /* Expand a call to __builtin_next_arg. */
4302
4303 static rtx
4304 expand_builtin_next_arg (void)
4305 {
4306 /* Checking arguments is already done in fold_builtin_next_arg
4307 that must be called before this function. */
4308 return expand_binop (ptr_mode, add_optab,
4309 crtl->args.internal_arg_pointer,
4310 crtl->args.arg_offset_rtx,
4311 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4312 }
4313
4314 /* Make it easier for the backends by protecting the valist argument
4315 from multiple evaluations. */
4316
4317 static tree
4318 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4319 {
4320 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4321
4322 /* The current way of determining the type of valist is completely
4323 bogus. We should have the information on the va builtin instead. */
4324 if (!vatype)
4325 vatype = targetm.fn_abi_va_list (cfun->decl);
4326
4327 if (TREE_CODE (vatype) == ARRAY_TYPE)
4328 {
4329 if (TREE_SIDE_EFFECTS (valist))
4330 valist = save_expr (valist);
4331
4332 /* For this case, the backends will be expecting a pointer to
4333 vatype, but it's possible we've actually been given an array
4334 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4335 So fix it. */
4336 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4337 {
4338 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4339 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4340 }
4341 }
4342 else
4343 {
4344 tree pt = build_pointer_type (vatype);
4345
4346 if (! needs_lvalue)
4347 {
4348 if (! TREE_SIDE_EFFECTS (valist))
4349 return valist;
4350
4351 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4352 TREE_SIDE_EFFECTS (valist) = 1;
4353 }
4354
4355 if (TREE_SIDE_EFFECTS (valist))
4356 valist = save_expr (valist);
4357 valist = fold_build2_loc (loc, MEM_REF,
4358 vatype, valist, build_int_cst (pt, 0));
4359 }
4360
4361 return valist;
4362 }
4363
4364 /* The "standard" definition of va_list is void*. */
4365
4366 tree
4367 std_build_builtin_va_list (void)
4368 {
4369 return ptr_type_node;
4370 }
4371
4372 /* The "standard" abi va_list is va_list_type_node. */
4373
4374 tree
4375 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4376 {
4377 return va_list_type_node;
4378 }
4379
4380 /* The "standard" type of va_list is va_list_type_node. */
4381
4382 tree
4383 std_canonical_va_list_type (tree type)
4384 {
4385 tree wtype, htype;
4386
4387 if (INDIRECT_REF_P (type))
4388 type = TREE_TYPE (type);
4389 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4390 type = TREE_TYPE (type);
4391 wtype = va_list_type_node;
4392 htype = type;
4393 /* Treat structure va_list types. */
4394 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4395 htype = TREE_TYPE (htype);
4396 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4397 {
4398 /* If va_list is an array type, the argument may have decayed
4399 to a pointer type, e.g. by being passed to another function.
4400 In that case, unwrap both types so that we can compare the
4401 underlying records. */
4402 if (TREE_CODE (htype) == ARRAY_TYPE
4403 || POINTER_TYPE_P (htype))
4404 {
4405 wtype = TREE_TYPE (wtype);
4406 htype = TREE_TYPE (htype);
4407 }
4408 }
4409 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4410 return va_list_type_node;
4411
4412 return NULL_TREE;
4413 }
4414
4415 /* The "standard" implementation of va_start: just assign `nextarg' to
4416 the variable. */
4417
4418 void
4419 std_expand_builtin_va_start (tree valist, rtx nextarg)
4420 {
4421 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4422 convert_move (va_r, nextarg, 0);
4423
4424 /* We do not have any valid bounds for the pointer, so
4425 just store zero bounds for it. */
4426 if (chkp_function_instrumented_p (current_function_decl))
4427 chkp_expand_bounds_reset_for_mem (valist,
4428 make_tree (TREE_TYPE (valist),
4429 nextarg));
4430 }
4431
4432 /* Expand EXP, a call to __builtin_va_start. */
4433
4434 static rtx
4435 expand_builtin_va_start (tree exp)
4436 {
4437 rtx nextarg;
4438 tree valist;
4439 location_t loc = EXPR_LOCATION (exp);
4440
4441 if (call_expr_nargs (exp) < 2)
4442 {
4443 error_at (loc, "too few arguments to function %<va_start%>");
4444 return const0_rtx;
4445 }
4446
4447 if (fold_builtin_next_arg (exp, true))
4448 return const0_rtx;
4449
4450 nextarg = expand_builtin_next_arg ();
4451 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4452
4453 if (targetm.expand_builtin_va_start)
4454 targetm.expand_builtin_va_start (valist, nextarg);
4455 else
4456 std_expand_builtin_va_start (valist, nextarg);
4457
4458 return const0_rtx;
4459 }
4460
4461 /* Expand EXP, a call to __builtin_va_end. */
4462
4463 static rtx
4464 expand_builtin_va_end (tree exp)
4465 {
4466 tree valist = CALL_EXPR_ARG (exp, 0);
4467
4468 /* Evaluate for side effects, if needed. I hate macros that don't
4469 do that. */
4470 if (TREE_SIDE_EFFECTS (valist))
4471 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4472
4473 return const0_rtx;
4474 }
4475
4476 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4477 builtin rather than just as an assignment in stdarg.h because of the
4478 nastiness of array-type va_list types. */
4479
4480 static rtx
4481 expand_builtin_va_copy (tree exp)
4482 {
4483 tree dst, src, t;
4484 location_t loc = EXPR_LOCATION (exp);
4485
4486 dst = CALL_EXPR_ARG (exp, 0);
4487 src = CALL_EXPR_ARG (exp, 1);
4488
4489 dst = stabilize_va_list_loc (loc, dst, 1);
4490 src = stabilize_va_list_loc (loc, src, 0);
4491
4492 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4493
4494 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4495 {
4496 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4497 TREE_SIDE_EFFECTS (t) = 1;
4498 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4499 }
4500 else
4501 {
4502 rtx dstb, srcb, size;
4503
4504 /* Evaluate to pointers. */
4505 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4506 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4507 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4508 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4509
4510 dstb = convert_memory_address (Pmode, dstb);
4511 srcb = convert_memory_address (Pmode, srcb);
4512
4513 /* "Dereference" to BLKmode memories. */
4514 dstb = gen_rtx_MEM (BLKmode, dstb);
4515 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4516 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4517 srcb = gen_rtx_MEM (BLKmode, srcb);
4518 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4519 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4520
4521 /* Copy. */
4522 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4523 }
4524
4525 return const0_rtx;
4526 }
4527
4528 /* Expand a call to one of the builtin functions __builtin_frame_address or
4529 __builtin_return_address. */
4530
4531 static rtx
4532 expand_builtin_frame_address (tree fndecl, tree exp)
4533 {
4534 /* The argument must be a nonnegative integer constant.
4535 It counts the number of frames to scan up the stack.
4536 The value is either the frame pointer value or the return
4537 address saved in that frame. */
4538 if (call_expr_nargs (exp) == 0)
4539 /* Warning about missing arg was already issued. */
4540 return const0_rtx;
4541 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4542 {
4543 error ("invalid argument to %qD", fndecl);
4544 return const0_rtx;
4545 }
4546 else
4547 {
4548 /* Number of frames to scan up the stack. */
4549 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4550
4551 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4552
4553 /* Some ports cannot access arbitrary stack frames. */
4554 if (tem == NULL)
4555 {
4556 warning (0, "unsupported argument to %qD", fndecl);
4557 return const0_rtx;
4558 }
4559
4560 if (count)
4561 {
4562 /* Warn since no effort is made to ensure that any frame
4563 beyond the current one exists or can be safely reached. */
4564 warning (OPT_Wframe_address, "calling %qD with "
4565 "a nonzero argument is unsafe", fndecl);
4566 }
4567
4568 /* For __builtin_frame_address, return what we've got. */
4569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4570 return tem;
4571
4572 if (!REG_P (tem)
4573 && ! CONSTANT_P (tem))
4574 tem = copy_addr_to_reg (tem);
4575 return tem;
4576 }
4577 }
4578
4579 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4580 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4581 is the same as for allocate_dynamic_stack_space. */
4582
4583 static rtx
4584 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4585 {
4586 rtx op0;
4587 rtx result;
4588 bool valid_arglist;
4589 unsigned int align;
4590 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4591 == BUILT_IN_ALLOCA_WITH_ALIGN);
4592
4593 valid_arglist
4594 = (alloca_with_align
4595 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4596 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4597
4598 if (!valid_arglist)
4599 return NULL_RTX;
4600
4601 /* Compute the argument. */
4602 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4603
4604 /* Compute the alignment. */
4605 align = (alloca_with_align
4606 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4607 : BIGGEST_ALIGNMENT);
4608
4609 /* Allocate the desired space. */
4610 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4611 result = convert_memory_address (ptr_mode, result);
4612
4613 return result;
4614 }
4615
4616 /* Expand a call to bswap builtin in EXP.
4617 Return NULL_RTX if a normal call should be emitted rather than expanding the
4618 function in-line. If convenient, the result should be placed in TARGET.
4619 SUBTARGET may be used as the target for computing one of EXP's operands. */
4620
4621 static rtx
4622 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4623 rtx subtarget)
4624 {
4625 tree arg;
4626 rtx op0;
4627
4628 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4629 return NULL_RTX;
4630
4631 arg = CALL_EXPR_ARG (exp, 0);
4632 op0 = expand_expr (arg,
4633 subtarget && GET_MODE (subtarget) == target_mode
4634 ? subtarget : NULL_RTX,
4635 target_mode, EXPAND_NORMAL);
4636 if (GET_MODE (op0) != target_mode)
4637 op0 = convert_to_mode (target_mode, op0, 1);
4638
4639 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4640
4641 gcc_assert (target);
4642
4643 return convert_to_mode (target_mode, target, 1);
4644 }
4645
4646 /* Expand a call to a unary builtin in EXP.
4647 Return NULL_RTX if a normal call should be emitted rather than expanding the
4648 function in-line. If convenient, the result should be placed in TARGET.
4649 SUBTARGET may be used as the target for computing one of EXP's operands. */
4650
4651 static rtx
4652 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4653 rtx subtarget, optab op_optab)
4654 {
4655 rtx op0;
4656
4657 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4658 return NULL_RTX;
4659
4660 /* Compute the argument. */
4661 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4662 (subtarget
4663 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4664 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4665 VOIDmode, EXPAND_NORMAL);
4666 /* Compute op, into TARGET if possible.
4667 Set TARGET to wherever the result comes back. */
4668 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4669 op_optab, op0, target, op_optab != clrsb_optab);
4670 gcc_assert (target);
4671
4672 return convert_to_mode (target_mode, target, 0);
4673 }
4674
4675 /* Expand a call to __builtin_expect. We just return our argument
4676 as the builtin_expect semantic should've been already executed by
4677 tree branch prediction pass. */
4678
4679 static rtx
4680 expand_builtin_expect (tree exp, rtx target)
4681 {
4682 tree arg;
4683
4684 if (call_expr_nargs (exp) < 2)
4685 return const0_rtx;
4686 arg = CALL_EXPR_ARG (exp, 0);
4687
4688 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4689 /* When guessing was done, the hints should be already stripped away. */
4690 gcc_assert (!flag_guess_branch_prob
4691 || optimize == 0 || seen_error ());
4692 return target;
4693 }
4694
4695 /* Expand a call to __builtin_assume_aligned. We just return our first
4696 argument as the builtin_assume_aligned semantic should've been already
4697 executed by CCP. */
4698
4699 static rtx
4700 expand_builtin_assume_aligned (tree exp, rtx target)
4701 {
4702 if (call_expr_nargs (exp) < 2)
4703 return const0_rtx;
4704 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4705 EXPAND_NORMAL);
4706 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4707 && (call_expr_nargs (exp) < 3
4708 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4709 return target;
4710 }
4711
4712 void
4713 expand_builtin_trap (void)
4714 {
4715 if (targetm.have_trap ())
4716 {
4717 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4718 /* For trap insns when not accumulating outgoing args force
4719 REG_ARGS_SIZE note to prevent crossjumping of calls with
4720 different args sizes. */
4721 if (!ACCUMULATE_OUTGOING_ARGS)
4722 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4723 }
4724 else
4725 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4726 emit_barrier ();
4727 }
4728
4729 /* Expand a call to __builtin_unreachable. We do nothing except emit
4730 a barrier saying that control flow will not pass here.
4731
4732 It is the responsibility of the program being compiled to ensure
4733 that control flow does never reach __builtin_unreachable. */
4734 static void
4735 expand_builtin_unreachable (void)
4736 {
4737 emit_barrier ();
4738 }
4739
4740 /* Expand EXP, a call to fabs, fabsf or fabsl.
4741 Return NULL_RTX if a normal call should be emitted rather than expanding
4742 the function inline. If convenient, the result should be placed
4743 in TARGET. SUBTARGET may be used as the target for computing
4744 the operand. */
4745
4746 static rtx
4747 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4748 {
4749 machine_mode mode;
4750 tree arg;
4751 rtx op0;
4752
4753 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4754 return NULL_RTX;
4755
4756 arg = CALL_EXPR_ARG (exp, 0);
4757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4758 mode = TYPE_MODE (TREE_TYPE (arg));
4759 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4760 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4761 }
4762
4763 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4764 Return NULL is a normal call should be emitted rather than expanding the
4765 function inline. If convenient, the result should be placed in TARGET.
4766 SUBTARGET may be used as the target for computing the operand. */
4767
4768 static rtx
4769 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4770 {
4771 rtx op0, op1;
4772 tree arg;
4773
4774 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4775 return NULL_RTX;
4776
4777 arg = CALL_EXPR_ARG (exp, 0);
4778 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4779
4780 arg = CALL_EXPR_ARG (exp, 1);
4781 op1 = expand_normal (arg);
4782
4783 return expand_copysign (op0, op1, target);
4784 }
4785
4786 /* Expand a call to __builtin___clear_cache. */
4787
4788 static rtx
4789 expand_builtin___clear_cache (tree exp)
4790 {
4791 if (!targetm.code_for_clear_cache)
4792 {
4793 #ifdef CLEAR_INSN_CACHE
4794 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4795 does something. Just do the default expansion to a call to
4796 __clear_cache(). */
4797 return NULL_RTX;
4798 #else
4799 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4800 does nothing. There is no need to call it. Do nothing. */
4801 return const0_rtx;
4802 #endif /* CLEAR_INSN_CACHE */
4803 }
4804
4805 /* We have a "clear_cache" insn, and it will handle everything. */
4806 tree begin, end;
4807 rtx begin_rtx, end_rtx;
4808
4809 /* We must not expand to a library call. If we did, any
4810 fallback library function in libgcc that might contain a call to
4811 __builtin___clear_cache() would recurse infinitely. */
4812 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4813 {
4814 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4815 return const0_rtx;
4816 }
4817
4818 if (targetm.have_clear_cache ())
4819 {
4820 struct expand_operand ops[2];
4821
4822 begin = CALL_EXPR_ARG (exp, 0);
4823 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4824
4825 end = CALL_EXPR_ARG (exp, 1);
4826 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4827
4828 create_address_operand (&ops[0], begin_rtx);
4829 create_address_operand (&ops[1], end_rtx);
4830 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4831 return const0_rtx;
4832 }
4833 return const0_rtx;
4834 }
4835
4836 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4837
4838 static rtx
4839 round_trampoline_addr (rtx tramp)
4840 {
4841 rtx temp, addend, mask;
4842
4843 /* If we don't need too much alignment, we'll have been guaranteed
4844 proper alignment by get_trampoline_type. */
4845 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4846 return tramp;
4847
4848 /* Round address up to desired boundary. */
4849 temp = gen_reg_rtx (Pmode);
4850 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4851 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4852
4853 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4854 temp, 0, OPTAB_LIB_WIDEN);
4855 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4856 temp, 0, OPTAB_LIB_WIDEN);
4857
4858 return tramp;
4859 }
4860
4861 static rtx
4862 expand_builtin_init_trampoline (tree exp, bool onstack)
4863 {
4864 tree t_tramp, t_func, t_chain;
4865 rtx m_tramp, r_tramp, r_chain, tmp;
4866
4867 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4868 POINTER_TYPE, VOID_TYPE))
4869 return NULL_RTX;
4870
4871 t_tramp = CALL_EXPR_ARG (exp, 0);
4872 t_func = CALL_EXPR_ARG (exp, 1);
4873 t_chain = CALL_EXPR_ARG (exp, 2);
4874
4875 r_tramp = expand_normal (t_tramp);
4876 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4877 MEM_NOTRAP_P (m_tramp) = 1;
4878
4879 /* If ONSTACK, the TRAMP argument should be the address of a field
4880 within the local function's FRAME decl. Either way, let's see if
4881 we can fill in the MEM_ATTRs for this memory. */
4882 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4883 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4884
4885 /* Creator of a heap trampoline is responsible for making sure the
4886 address is aligned to at least STACK_BOUNDARY. Normally malloc
4887 will ensure this anyhow. */
4888 tmp = round_trampoline_addr (r_tramp);
4889 if (tmp != r_tramp)
4890 {
4891 m_tramp = change_address (m_tramp, BLKmode, tmp);
4892 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4893 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4894 }
4895
4896 /* The FUNC argument should be the address of the nested function.
4897 Extract the actual function decl to pass to the hook. */
4898 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4899 t_func = TREE_OPERAND (t_func, 0);
4900 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4901
4902 r_chain = expand_normal (t_chain);
4903
4904 /* Generate insns to initialize the trampoline. */
4905 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4906
4907 if (onstack)
4908 {
4909 trampolines_created = 1;
4910
4911 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4912 "trampoline generated for nested function %qD", t_func);
4913 }
4914
4915 return const0_rtx;
4916 }
4917
4918 static rtx
4919 expand_builtin_adjust_trampoline (tree exp)
4920 {
4921 rtx tramp;
4922
4923 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4924 return NULL_RTX;
4925
4926 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4927 tramp = round_trampoline_addr (tramp);
4928 if (targetm.calls.trampoline_adjust_address)
4929 tramp = targetm.calls.trampoline_adjust_address (tramp);
4930
4931 return tramp;
4932 }
4933
4934 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4935 function. The function first checks whether the back end provides
4936 an insn to implement signbit for the respective mode. If not, it
4937 checks whether the floating point format of the value is such that
4938 the sign bit can be extracted. If that is not the case, error out.
4939 EXP is the expression that is a call to the builtin function; if
4940 convenient, the result should be placed in TARGET. */
4941 static rtx
4942 expand_builtin_signbit (tree exp, rtx target)
4943 {
4944 const struct real_format *fmt;
4945 machine_mode fmode, imode, rmode;
4946 tree arg;
4947 int word, bitpos;
4948 enum insn_code icode;
4949 rtx temp;
4950 location_t loc = EXPR_LOCATION (exp);
4951
4952 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4953 return NULL_RTX;
4954
4955 arg = CALL_EXPR_ARG (exp, 0);
4956 fmode = TYPE_MODE (TREE_TYPE (arg));
4957 rmode = TYPE_MODE (TREE_TYPE (exp));
4958 fmt = REAL_MODE_FORMAT (fmode);
4959
4960 arg = builtin_save_expr (arg);
4961
4962 /* Expand the argument yielding a RTX expression. */
4963 temp = expand_normal (arg);
4964
4965 /* Check if the back end provides an insn that handles signbit for the
4966 argument's mode. */
4967 icode = optab_handler (signbit_optab, fmode);
4968 if (icode != CODE_FOR_nothing)
4969 {
4970 rtx_insn *last = get_last_insn ();
4971 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4972 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4973 return target;
4974 delete_insns_since (last);
4975 }
4976
4977 /* For floating point formats without a sign bit, implement signbit
4978 as "ARG < 0.0". */
4979 bitpos = fmt->signbit_ro;
4980 if (bitpos < 0)
4981 {
4982 /* But we can't do this if the format supports signed zero. */
4983 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4984
4985 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4986 build_real (TREE_TYPE (arg), dconst0));
4987 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4988 }
4989
4990 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4991 {
4992 imode = int_mode_for_mode (fmode);
4993 gcc_assert (imode != BLKmode);
4994 temp = gen_lowpart (imode, temp);
4995 }
4996 else
4997 {
4998 imode = word_mode;
4999 /* Handle targets with different FP word orders. */
5000 if (FLOAT_WORDS_BIG_ENDIAN)
5001 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5002 else
5003 word = bitpos / BITS_PER_WORD;
5004 temp = operand_subword_force (temp, word, fmode);
5005 bitpos = bitpos % BITS_PER_WORD;
5006 }
5007
5008 /* Force the intermediate word_mode (or narrower) result into a
5009 register. This avoids attempting to create paradoxical SUBREGs
5010 of floating point modes below. */
5011 temp = force_reg (imode, temp);
5012
5013 /* If the bitpos is within the "result mode" lowpart, the operation
5014 can be implement with a single bitwise AND. Otherwise, we need
5015 a right shift and an AND. */
5016
5017 if (bitpos < GET_MODE_BITSIZE (rmode))
5018 {
5019 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5020
5021 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5022 temp = gen_lowpart (rmode, temp);
5023 temp = expand_binop (rmode, and_optab, temp,
5024 immed_wide_int_const (mask, rmode),
5025 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5026 }
5027 else
5028 {
5029 /* Perform a logical right shift to place the signbit in the least
5030 significant bit, then truncate the result to the desired mode
5031 and mask just this bit. */
5032 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5033 temp = gen_lowpart (rmode, temp);
5034 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5035 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5036 }
5037
5038 return temp;
5039 }
5040
5041 /* Expand fork or exec calls. TARGET is the desired target of the
5042 call. EXP is the call. FN is the
5043 identificator of the actual function. IGNORE is nonzero if the
5044 value is to be ignored. */
5045
5046 static rtx
5047 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5048 {
5049 tree id, decl;
5050 tree call;
5051
5052 /* If we are not profiling, just call the function. */
5053 if (!profile_arc_flag)
5054 return NULL_RTX;
5055
5056 /* Otherwise call the wrapper. This should be equivalent for the rest of
5057 compiler, so the code does not diverge, and the wrapper may run the
5058 code necessary for keeping the profiling sane. */
5059
5060 switch (DECL_FUNCTION_CODE (fn))
5061 {
5062 case BUILT_IN_FORK:
5063 id = get_identifier ("__gcov_fork");
5064 break;
5065
5066 case BUILT_IN_EXECL:
5067 id = get_identifier ("__gcov_execl");
5068 break;
5069
5070 case BUILT_IN_EXECV:
5071 id = get_identifier ("__gcov_execv");
5072 break;
5073
5074 case BUILT_IN_EXECLP:
5075 id = get_identifier ("__gcov_execlp");
5076 break;
5077
5078 case BUILT_IN_EXECLE:
5079 id = get_identifier ("__gcov_execle");
5080 break;
5081
5082 case BUILT_IN_EXECVP:
5083 id = get_identifier ("__gcov_execvp");
5084 break;
5085
5086 case BUILT_IN_EXECVE:
5087 id = get_identifier ("__gcov_execve");
5088 break;
5089
5090 default:
5091 gcc_unreachable ();
5092 }
5093
5094 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5095 FUNCTION_DECL, id, TREE_TYPE (fn));
5096 DECL_EXTERNAL (decl) = 1;
5097 TREE_PUBLIC (decl) = 1;
5098 DECL_ARTIFICIAL (decl) = 1;
5099 TREE_NOTHROW (decl) = 1;
5100 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5101 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5102 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5103 return expand_call (call, target, ignore);
5104 }
5105
5106
5107 \f
5108 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5109 the pointer in these functions is void*, the tree optimizers may remove
5110 casts. The mode computed in expand_builtin isn't reliable either, due
5111 to __sync_bool_compare_and_swap.
5112
5113 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5114 group of builtins. This gives us log2 of the mode size. */
5115
5116 static inline machine_mode
5117 get_builtin_sync_mode (int fcode_diff)
5118 {
5119 /* The size is not negotiable, so ask not to get BLKmode in return
5120 if the target indicates that a smaller size would be better. */
5121 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5122 }
5123
5124 /* Expand the memory expression LOC and return the appropriate memory operand
5125 for the builtin_sync operations. */
5126
5127 static rtx
5128 get_builtin_sync_mem (tree loc, machine_mode mode)
5129 {
5130 rtx addr, mem;
5131
5132 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5133 addr = convert_memory_address (Pmode, addr);
5134
5135 /* Note that we explicitly do not want any alias information for this
5136 memory, so that we kill all other live memories. Otherwise we don't
5137 satisfy the full barrier semantics of the intrinsic. */
5138 mem = validize_mem (gen_rtx_MEM (mode, addr));
5139
5140 /* The alignment needs to be at least according to that of the mode. */
5141 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5142 get_pointer_alignment (loc)));
5143 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5144 MEM_VOLATILE_P (mem) = 1;
5145
5146 return mem;
5147 }
5148
5149 /* Make sure an argument is in the right mode.
5150 EXP is the tree argument.
5151 MODE is the mode it should be in. */
5152
5153 static rtx
5154 expand_expr_force_mode (tree exp, machine_mode mode)
5155 {
5156 rtx val;
5157 machine_mode old_mode;
5158
5159 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5160 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5161 of CONST_INTs, where we know the old_mode only from the call argument. */
5162
5163 old_mode = GET_MODE (val);
5164 if (old_mode == VOIDmode)
5165 old_mode = TYPE_MODE (TREE_TYPE (exp));
5166 val = convert_modes (mode, old_mode, val, 1);
5167 return val;
5168 }
5169
5170
5171 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5172 EXP is the CALL_EXPR. CODE is the rtx code
5173 that corresponds to the arithmetic or logical operation from the name;
5174 an exception here is that NOT actually means NAND. TARGET is an optional
5175 place for us to store the results; AFTER is true if this is the
5176 fetch_and_xxx form. */
5177
5178 static rtx
5179 expand_builtin_sync_operation (machine_mode mode, tree exp,
5180 enum rtx_code code, bool after,
5181 rtx target)
5182 {
5183 rtx val, mem;
5184 location_t loc = EXPR_LOCATION (exp);
5185
5186 if (code == NOT && warn_sync_nand)
5187 {
5188 tree fndecl = get_callee_fndecl (exp);
5189 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5190
5191 static bool warned_f_a_n, warned_n_a_f;
5192
5193 switch (fcode)
5194 {
5195 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5196 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5197 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5198 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5199 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5200 if (warned_f_a_n)
5201 break;
5202
5203 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5204 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5205 warned_f_a_n = true;
5206 break;
5207
5208 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5209 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5210 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5211 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5212 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5213 if (warned_n_a_f)
5214 break;
5215
5216 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5217 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5218 warned_n_a_f = true;
5219 break;
5220
5221 default:
5222 gcc_unreachable ();
5223 }
5224 }
5225
5226 /* Expand the operands. */
5227 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5228 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5229
5230 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5231 after);
5232 }
5233
5234 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5235 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5236 true if this is the boolean form. TARGET is a place for us to store the
5237 results; this is NOT optional if IS_BOOL is true. */
5238
5239 static rtx
5240 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5241 bool is_bool, rtx target)
5242 {
5243 rtx old_val, new_val, mem;
5244 rtx *pbool, *poval;
5245
5246 /* Expand the operands. */
5247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5248 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5249 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5250
5251 pbool = poval = NULL;
5252 if (target != const0_rtx)
5253 {
5254 if (is_bool)
5255 pbool = &target;
5256 else
5257 poval = &target;
5258 }
5259 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5260 false, MEMMODEL_SYNC_SEQ_CST,
5261 MEMMODEL_SYNC_SEQ_CST))
5262 return NULL_RTX;
5263
5264 return target;
5265 }
5266
5267 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5268 general form is actually an atomic exchange, and some targets only
5269 support a reduced form with the second argument being a constant 1.
5270 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5271 the results. */
5272
5273 static rtx
5274 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5275 rtx target)
5276 {
5277 rtx val, mem;
5278
5279 /* Expand the operands. */
5280 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5281 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5282
5283 return expand_sync_lock_test_and_set (target, mem, val);
5284 }
5285
5286 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5287
5288 static void
5289 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5290 {
5291 rtx mem;
5292
5293 /* Expand the operands. */
5294 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5295
5296 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5297 }
5298
5299 /* Given an integer representing an ``enum memmodel'', verify its
5300 correctness and return the memory model enum. */
5301
5302 static enum memmodel
5303 get_memmodel (tree exp)
5304 {
5305 rtx op;
5306 unsigned HOST_WIDE_INT val;
5307
5308 /* If the parameter is not a constant, it's a run time value so we'll just
5309 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5310 if (TREE_CODE (exp) != INTEGER_CST)
5311 return MEMMODEL_SEQ_CST;
5312
5313 op = expand_normal (exp);
5314
5315 val = INTVAL (op);
5316 if (targetm.memmodel_check)
5317 val = targetm.memmodel_check (val);
5318 else if (val & ~MEMMODEL_MASK)
5319 {
5320 warning (OPT_Winvalid_memory_model,
5321 "Unknown architecture specifier in memory model to builtin.");
5322 return MEMMODEL_SEQ_CST;
5323 }
5324
5325 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5326 if (memmodel_base (val) >= MEMMODEL_LAST)
5327 {
5328 warning (OPT_Winvalid_memory_model,
5329 "invalid memory model argument to builtin");
5330 return MEMMODEL_SEQ_CST;
5331 }
5332
5333 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5334 be conservative and promote consume to acquire. */
5335 if (val == MEMMODEL_CONSUME)
5336 val = MEMMODEL_ACQUIRE;
5337
5338 return (enum memmodel) val;
5339 }
5340
5341 /* Expand the __atomic_exchange intrinsic:
5342 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5343 EXP is the CALL_EXPR.
5344 TARGET is an optional place for us to store the results. */
5345
5346 static rtx
5347 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5348 {
5349 rtx val, mem;
5350 enum memmodel model;
5351
5352 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5353
5354 if (!flag_inline_atomics)
5355 return NULL_RTX;
5356
5357 /* Expand the operands. */
5358 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5359 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5360
5361 return expand_atomic_exchange (target, mem, val, model);
5362 }
5363
5364 /* Expand the __atomic_compare_exchange intrinsic:
5365 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5366 TYPE desired, BOOL weak,
5367 enum memmodel success,
5368 enum memmodel failure)
5369 EXP is the CALL_EXPR.
5370 TARGET is an optional place for us to store the results. */
5371
5372 static rtx
5373 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5374 rtx target)
5375 {
5376 rtx expect, desired, mem, oldval;
5377 rtx_code_label *label;
5378 enum memmodel success, failure;
5379 tree weak;
5380 bool is_weak;
5381
5382 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5383 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5384
5385 if (failure > success)
5386 {
5387 warning (OPT_Winvalid_memory_model,
5388 "failure memory model cannot be stronger than success memory "
5389 "model for %<__atomic_compare_exchange%>");
5390 success = MEMMODEL_SEQ_CST;
5391 }
5392
5393 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5394 {
5395 warning (OPT_Winvalid_memory_model,
5396 "invalid failure memory model for "
5397 "%<__atomic_compare_exchange%>");
5398 failure = MEMMODEL_SEQ_CST;
5399 success = MEMMODEL_SEQ_CST;
5400 }
5401
5402
5403 if (!flag_inline_atomics)
5404 return NULL_RTX;
5405
5406 /* Expand the operands. */
5407 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5408
5409 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5410 expect = convert_memory_address (Pmode, expect);
5411 expect = gen_rtx_MEM (mode, expect);
5412 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5413
5414 weak = CALL_EXPR_ARG (exp, 3);
5415 is_weak = false;
5416 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5417 is_weak = true;
5418
5419 if (target == const0_rtx)
5420 target = NULL;
5421
5422 /* Lest the rtl backend create a race condition with an imporoper store
5423 to memory, always create a new pseudo for OLDVAL. */
5424 oldval = NULL;
5425
5426 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5427 is_weak, success, failure))
5428 return NULL_RTX;
5429
5430 /* Conditionally store back to EXPECT, lest we create a race condition
5431 with an improper store to memory. */
5432 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5433 the normal case where EXPECT is totally private, i.e. a register. At
5434 which point the store can be unconditional. */
5435 label = gen_label_rtx ();
5436 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5437 GET_MODE (target), 1, label);
5438 emit_move_insn (expect, oldval);
5439 emit_label (label);
5440
5441 return target;
5442 }
5443
5444 /* Expand the __atomic_load intrinsic:
5445 TYPE __atomic_load (TYPE *object, enum memmodel)
5446 EXP is the CALL_EXPR.
5447 TARGET is an optional place for us to store the results. */
5448
5449 static rtx
5450 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5451 {
5452 rtx mem;
5453 enum memmodel model;
5454
5455 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5456 if (is_mm_release (model) || is_mm_acq_rel (model))
5457 {
5458 warning (OPT_Winvalid_memory_model,
5459 "invalid memory model for %<__atomic_load%>");
5460 model = MEMMODEL_SEQ_CST;
5461 }
5462
5463 if (!flag_inline_atomics)
5464 return NULL_RTX;
5465
5466 /* Expand the operand. */
5467 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5468
5469 return expand_atomic_load (target, mem, model);
5470 }
5471
5472
5473 /* Expand the __atomic_store intrinsic:
5474 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5475 EXP is the CALL_EXPR.
5476 TARGET is an optional place for us to store the results. */
5477
5478 static rtx
5479 expand_builtin_atomic_store (machine_mode mode, tree exp)
5480 {
5481 rtx mem, val;
5482 enum memmodel model;
5483
5484 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5485 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5486 || is_mm_release (model)))
5487 {
5488 warning (OPT_Winvalid_memory_model,
5489 "invalid memory model for %<__atomic_store%>");
5490 model = MEMMODEL_SEQ_CST;
5491 }
5492
5493 if (!flag_inline_atomics)
5494 return NULL_RTX;
5495
5496 /* Expand the operands. */
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5499
5500 return expand_atomic_store (mem, val, model, false);
5501 }
5502
5503 /* Expand the __atomic_fetch_XXX intrinsic:
5504 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5505 EXP is the CALL_EXPR.
5506 TARGET is an optional place for us to store the results.
5507 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5508 FETCH_AFTER is true if returning the result of the operation.
5509 FETCH_AFTER is false if returning the value before the operation.
5510 IGNORE is true if the result is not used.
5511 EXT_CALL is the correct builtin for an external call if this cannot be
5512 resolved to an instruction sequence. */
5513
5514 static rtx
5515 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5516 enum rtx_code code, bool fetch_after,
5517 bool ignore, enum built_in_function ext_call)
5518 {
5519 rtx val, mem, ret;
5520 enum memmodel model;
5521 tree fndecl;
5522 tree addr;
5523
5524 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5525
5526 /* Expand the operands. */
5527 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5528 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5529
5530 /* Only try generating instructions if inlining is turned on. */
5531 if (flag_inline_atomics)
5532 {
5533 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5534 if (ret)
5535 return ret;
5536 }
5537
5538 /* Return if a different routine isn't needed for the library call. */
5539 if (ext_call == BUILT_IN_NONE)
5540 return NULL_RTX;
5541
5542 /* Change the call to the specified function. */
5543 fndecl = get_callee_fndecl (exp);
5544 addr = CALL_EXPR_FN (exp);
5545 STRIP_NOPS (addr);
5546
5547 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5548 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5549
5550 /* Expand the call here so we can emit trailing code. */
5551 ret = expand_call (exp, target, ignore);
5552
5553 /* Replace the original function just in case it matters. */
5554 TREE_OPERAND (addr, 0) = fndecl;
5555
5556 /* Then issue the arithmetic correction to return the right result. */
5557 if (!ignore)
5558 {
5559 if (code == NOT)
5560 {
5561 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5562 OPTAB_LIB_WIDEN);
5563 ret = expand_simple_unop (mode, NOT, ret, target, true);
5564 }
5565 else
5566 ret = expand_simple_binop (mode, code, ret, val, target, true,
5567 OPTAB_LIB_WIDEN);
5568 }
5569 return ret;
5570 }
5571
5572 /* Expand an atomic clear operation.
5573 void _atomic_clear (BOOL *obj, enum memmodel)
5574 EXP is the call expression. */
5575
5576 static rtx
5577 expand_builtin_atomic_clear (tree exp)
5578 {
5579 machine_mode mode;
5580 rtx mem, ret;
5581 enum memmodel model;
5582
5583 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5584 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5585 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5586
5587 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5588 {
5589 warning (OPT_Winvalid_memory_model,
5590 "invalid memory model for %<__atomic_store%>");
5591 model = MEMMODEL_SEQ_CST;
5592 }
5593
5594 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5595 Failing that, a store is issued by __atomic_store. The only way this can
5596 fail is if the bool type is larger than a word size. Unlikely, but
5597 handle it anyway for completeness. Assume a single threaded model since
5598 there is no atomic support in this case, and no barriers are required. */
5599 ret = expand_atomic_store (mem, const0_rtx, model, true);
5600 if (!ret)
5601 emit_move_insn (mem, const0_rtx);
5602 return const0_rtx;
5603 }
5604
5605 /* Expand an atomic test_and_set operation.
5606 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5607 EXP is the call expression. */
5608
5609 static rtx
5610 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5611 {
5612 rtx mem;
5613 enum memmodel model;
5614 machine_mode mode;
5615
5616 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5617 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5618 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5619
5620 return expand_atomic_test_and_set (target, mem, model);
5621 }
5622
5623
5624 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5625 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5626
5627 static tree
5628 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5629 {
5630 int size;
5631 machine_mode mode;
5632 unsigned int mode_align, type_align;
5633
5634 if (TREE_CODE (arg0) != INTEGER_CST)
5635 return NULL_TREE;
5636
5637 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5638 mode = mode_for_size (size, MODE_INT, 0);
5639 mode_align = GET_MODE_ALIGNMENT (mode);
5640
5641 if (TREE_CODE (arg1) == INTEGER_CST)
5642 {
5643 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5644
5645 /* Either this argument is null, or it's a fake pointer encoding
5646 the alignment of the object. */
5647 val = val & -val;
5648 val *= BITS_PER_UNIT;
5649
5650 if (val == 0 || mode_align < val)
5651 type_align = mode_align;
5652 else
5653 type_align = val;
5654 }
5655 else
5656 {
5657 tree ttype = TREE_TYPE (arg1);
5658
5659 /* This function is usually invoked and folded immediately by the front
5660 end before anything else has a chance to look at it. The pointer
5661 parameter at this point is usually cast to a void *, so check for that
5662 and look past the cast. */
5663 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5664 && VOID_TYPE_P (TREE_TYPE (ttype)))
5665 arg1 = TREE_OPERAND (arg1, 0);
5666
5667 ttype = TREE_TYPE (arg1);
5668 gcc_assert (POINTER_TYPE_P (ttype));
5669
5670 /* Get the underlying type of the object. */
5671 ttype = TREE_TYPE (ttype);
5672 type_align = TYPE_ALIGN (ttype);
5673 }
5674
5675 /* If the object has smaller alignment, the lock free routines cannot
5676 be used. */
5677 if (type_align < mode_align)
5678 return boolean_false_node;
5679
5680 /* Check if a compare_and_swap pattern exists for the mode which represents
5681 the required size. The pattern is not allowed to fail, so the existence
5682 of the pattern indicates support is present. */
5683 if (can_compare_and_swap_p (mode, true))
5684 return boolean_true_node;
5685 else
5686 return boolean_false_node;
5687 }
5688
5689 /* Return true if the parameters to call EXP represent an object which will
5690 always generate lock free instructions. The first argument represents the
5691 size of the object, and the second parameter is a pointer to the object
5692 itself. If NULL is passed for the object, then the result is based on
5693 typical alignment for an object of the specified size. Otherwise return
5694 false. */
5695
5696 static rtx
5697 expand_builtin_atomic_always_lock_free (tree exp)
5698 {
5699 tree size;
5700 tree arg0 = CALL_EXPR_ARG (exp, 0);
5701 tree arg1 = CALL_EXPR_ARG (exp, 1);
5702
5703 if (TREE_CODE (arg0) != INTEGER_CST)
5704 {
5705 error ("non-constant argument 1 to __atomic_always_lock_free");
5706 return const0_rtx;
5707 }
5708
5709 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5710 if (size == boolean_true_node)
5711 return const1_rtx;
5712 return const0_rtx;
5713 }
5714
5715 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5716 is lock free on this architecture. */
5717
5718 static tree
5719 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5720 {
5721 if (!flag_inline_atomics)
5722 return NULL_TREE;
5723
5724 /* If it isn't always lock free, don't generate a result. */
5725 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5726 return boolean_true_node;
5727
5728 return NULL_TREE;
5729 }
5730
5731 /* Return true if the parameters to call EXP represent an object which will
5732 always generate lock free instructions. The first argument represents the
5733 size of the object, and the second parameter is a pointer to the object
5734 itself. If NULL is passed for the object, then the result is based on
5735 typical alignment for an object of the specified size. Otherwise return
5736 NULL*/
5737
5738 static rtx
5739 expand_builtin_atomic_is_lock_free (tree exp)
5740 {
5741 tree size;
5742 tree arg0 = CALL_EXPR_ARG (exp, 0);
5743 tree arg1 = CALL_EXPR_ARG (exp, 1);
5744
5745 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5746 {
5747 error ("non-integer argument 1 to __atomic_is_lock_free");
5748 return NULL_RTX;
5749 }
5750
5751 if (!flag_inline_atomics)
5752 return NULL_RTX;
5753
5754 /* If the value is known at compile time, return the RTX for it. */
5755 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5756 if (size == boolean_true_node)
5757 return const1_rtx;
5758
5759 return NULL_RTX;
5760 }
5761
5762 /* Expand the __atomic_thread_fence intrinsic:
5763 void __atomic_thread_fence (enum memmodel)
5764 EXP is the CALL_EXPR. */
5765
5766 static void
5767 expand_builtin_atomic_thread_fence (tree exp)
5768 {
5769 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5770 expand_mem_thread_fence (model);
5771 }
5772
5773 /* Expand the __atomic_signal_fence intrinsic:
5774 void __atomic_signal_fence (enum memmodel)
5775 EXP is the CALL_EXPR. */
5776
5777 static void
5778 expand_builtin_atomic_signal_fence (tree exp)
5779 {
5780 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5781 expand_mem_signal_fence (model);
5782 }
5783
5784 /* Expand the __sync_synchronize intrinsic. */
5785
5786 static void
5787 expand_builtin_sync_synchronize (void)
5788 {
5789 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5790 }
5791
5792 static rtx
5793 expand_builtin_thread_pointer (tree exp, rtx target)
5794 {
5795 enum insn_code icode;
5796 if (!validate_arglist (exp, VOID_TYPE))
5797 return const0_rtx;
5798 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5799 if (icode != CODE_FOR_nothing)
5800 {
5801 struct expand_operand op;
5802 /* If the target is not sutitable then create a new target. */
5803 if (target == NULL_RTX
5804 || !REG_P (target)
5805 || GET_MODE (target) != Pmode)
5806 target = gen_reg_rtx (Pmode);
5807 create_output_operand (&op, target, Pmode);
5808 expand_insn (icode, 1, &op);
5809 return target;
5810 }
5811 error ("__builtin_thread_pointer is not supported on this target");
5812 return const0_rtx;
5813 }
5814
5815 static void
5816 expand_builtin_set_thread_pointer (tree exp)
5817 {
5818 enum insn_code icode;
5819 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5820 return;
5821 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5822 if (icode != CODE_FOR_nothing)
5823 {
5824 struct expand_operand op;
5825 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5826 Pmode, EXPAND_NORMAL);
5827 create_input_operand (&op, val, Pmode);
5828 expand_insn (icode, 1, &op);
5829 return;
5830 }
5831 error ("__builtin_set_thread_pointer is not supported on this target");
5832 }
5833
5834 \f
5835 /* Emit code to restore the current value of stack. */
5836
5837 static void
5838 expand_stack_restore (tree var)
5839 {
5840 rtx_insn *prev;
5841 rtx sa = expand_normal (var);
5842
5843 sa = convert_memory_address (Pmode, sa);
5844
5845 prev = get_last_insn ();
5846 emit_stack_restore (SAVE_BLOCK, sa);
5847
5848 record_new_stack_level ();
5849
5850 fixup_args_size_notes (prev, get_last_insn (), 0);
5851 }
5852
5853 /* Emit code to save the current value of stack. */
5854
5855 static rtx
5856 expand_stack_save (void)
5857 {
5858 rtx ret = NULL_RTX;
5859
5860 emit_stack_save (SAVE_BLOCK, &ret);
5861 return ret;
5862 }
5863
5864
5865 /* Expand an expression EXP that calls a built-in function,
5866 with result going to TARGET if that's convenient
5867 (and in mode MODE if that's convenient).
5868 SUBTARGET may be used as the target for computing one of EXP's operands.
5869 IGNORE is nonzero if the value is to be ignored. */
5870
5871 rtx
5872 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5873 int ignore)
5874 {
5875 tree fndecl = get_callee_fndecl (exp);
5876 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5877 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5878 int flags;
5879
5880 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5881 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5882
5883 /* When ASan is enabled, we don't want to expand some memory/string
5884 builtins and rely on libsanitizer's hooks. This allows us to avoid
5885 redundant checks and be sure, that possible overflow will be detected
5886 by ASan. */
5887
5888 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5889 return expand_call (exp, target, ignore);
5890
5891 /* When not optimizing, generate calls to library functions for a certain
5892 set of builtins. */
5893 if (!optimize
5894 && !called_as_built_in (fndecl)
5895 && fcode != BUILT_IN_FORK
5896 && fcode != BUILT_IN_EXECL
5897 && fcode != BUILT_IN_EXECV
5898 && fcode != BUILT_IN_EXECLP
5899 && fcode != BUILT_IN_EXECLE
5900 && fcode != BUILT_IN_EXECVP
5901 && fcode != BUILT_IN_EXECVE
5902 && fcode != BUILT_IN_ALLOCA
5903 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5904 && fcode != BUILT_IN_FREE
5905 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5906 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5907 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5908 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5909 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5910 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5911 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5912 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5913 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5914 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5915 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5916 && fcode != BUILT_IN_CHKP_BNDRET)
5917 return expand_call (exp, target, ignore);
5918
5919 /* The built-in function expanders test for target == const0_rtx
5920 to determine whether the function's result will be ignored. */
5921 if (ignore)
5922 target = const0_rtx;
5923
5924 /* If the result of a pure or const built-in function is ignored, and
5925 none of its arguments are volatile, we can avoid expanding the
5926 built-in call and just evaluate the arguments for side-effects. */
5927 if (target == const0_rtx
5928 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5929 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5930 {
5931 bool volatilep = false;
5932 tree arg;
5933 call_expr_arg_iterator iter;
5934
5935 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5936 if (TREE_THIS_VOLATILE (arg))
5937 {
5938 volatilep = true;
5939 break;
5940 }
5941
5942 if (! volatilep)
5943 {
5944 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5945 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5946 return const0_rtx;
5947 }
5948 }
5949
5950 /* expand_builtin_with_bounds is supposed to be used for
5951 instrumented builtin calls. */
5952 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5953
5954 switch (fcode)
5955 {
5956 CASE_FLT_FN (BUILT_IN_FABS):
5957 case BUILT_IN_FABSD32:
5958 case BUILT_IN_FABSD64:
5959 case BUILT_IN_FABSD128:
5960 target = expand_builtin_fabs (exp, target, subtarget);
5961 if (target)
5962 return target;
5963 break;
5964
5965 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5966 target = expand_builtin_copysign (exp, target, subtarget);
5967 if (target)
5968 return target;
5969 break;
5970
5971 /* Just do a normal library call if we were unable to fold
5972 the values. */
5973 CASE_FLT_FN (BUILT_IN_CABS):
5974 break;
5975
5976 CASE_FLT_FN (BUILT_IN_EXP):
5977 CASE_FLT_FN (BUILT_IN_EXP10):
5978 CASE_FLT_FN (BUILT_IN_POW10):
5979 CASE_FLT_FN (BUILT_IN_EXP2):
5980 CASE_FLT_FN (BUILT_IN_EXPM1):
5981 CASE_FLT_FN (BUILT_IN_LOGB):
5982 CASE_FLT_FN (BUILT_IN_LOG):
5983 CASE_FLT_FN (BUILT_IN_LOG10):
5984 CASE_FLT_FN (BUILT_IN_LOG2):
5985 CASE_FLT_FN (BUILT_IN_LOG1P):
5986 CASE_FLT_FN (BUILT_IN_TAN):
5987 CASE_FLT_FN (BUILT_IN_ASIN):
5988 CASE_FLT_FN (BUILT_IN_ACOS):
5989 CASE_FLT_FN (BUILT_IN_ATAN):
5990 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5991 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5992 because of possible accuracy problems. */
5993 if (! flag_unsafe_math_optimizations)
5994 break;
5995 CASE_FLT_FN (BUILT_IN_SQRT):
5996 CASE_FLT_FN (BUILT_IN_FLOOR):
5997 CASE_FLT_FN (BUILT_IN_CEIL):
5998 CASE_FLT_FN (BUILT_IN_TRUNC):
5999 CASE_FLT_FN (BUILT_IN_ROUND):
6000 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6001 CASE_FLT_FN (BUILT_IN_RINT):
6002 target = expand_builtin_mathfn (exp, target, subtarget);
6003 if (target)
6004 return target;
6005 break;
6006
6007 CASE_FLT_FN (BUILT_IN_FMA):
6008 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6009 if (target)
6010 return target;
6011 break;
6012
6013 CASE_FLT_FN (BUILT_IN_ILOGB):
6014 if (! flag_unsafe_math_optimizations)
6015 break;
6016 CASE_FLT_FN (BUILT_IN_ISINF):
6017 CASE_FLT_FN (BUILT_IN_FINITE):
6018 case BUILT_IN_ISFINITE:
6019 case BUILT_IN_ISNORMAL:
6020 target = expand_builtin_interclass_mathfn (exp, target);
6021 if (target)
6022 return target;
6023 break;
6024
6025 CASE_FLT_FN (BUILT_IN_ICEIL):
6026 CASE_FLT_FN (BUILT_IN_LCEIL):
6027 CASE_FLT_FN (BUILT_IN_LLCEIL):
6028 CASE_FLT_FN (BUILT_IN_LFLOOR):
6029 CASE_FLT_FN (BUILT_IN_IFLOOR):
6030 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6031 target = expand_builtin_int_roundingfn (exp, target);
6032 if (target)
6033 return target;
6034 break;
6035
6036 CASE_FLT_FN (BUILT_IN_IRINT):
6037 CASE_FLT_FN (BUILT_IN_LRINT):
6038 CASE_FLT_FN (BUILT_IN_LLRINT):
6039 CASE_FLT_FN (BUILT_IN_IROUND):
6040 CASE_FLT_FN (BUILT_IN_LROUND):
6041 CASE_FLT_FN (BUILT_IN_LLROUND):
6042 target = expand_builtin_int_roundingfn_2 (exp, target);
6043 if (target)
6044 return target;
6045 break;
6046
6047 CASE_FLT_FN (BUILT_IN_POWI):
6048 target = expand_builtin_powi (exp, target);
6049 if (target)
6050 return target;
6051 break;
6052
6053 CASE_FLT_FN (BUILT_IN_ATAN2):
6054 CASE_FLT_FN (BUILT_IN_LDEXP):
6055 CASE_FLT_FN (BUILT_IN_SCALB):
6056 CASE_FLT_FN (BUILT_IN_SCALBN):
6057 CASE_FLT_FN (BUILT_IN_SCALBLN):
6058 if (! flag_unsafe_math_optimizations)
6059 break;
6060
6061 CASE_FLT_FN (BUILT_IN_FMOD):
6062 CASE_FLT_FN (BUILT_IN_REMAINDER):
6063 CASE_FLT_FN (BUILT_IN_DREM):
6064 CASE_FLT_FN (BUILT_IN_POW):
6065 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6066 if (target)
6067 return target;
6068 break;
6069
6070 CASE_FLT_FN (BUILT_IN_CEXPI):
6071 target = expand_builtin_cexpi (exp, target);
6072 gcc_assert (target);
6073 return target;
6074
6075 CASE_FLT_FN (BUILT_IN_SIN):
6076 CASE_FLT_FN (BUILT_IN_COS):
6077 if (! flag_unsafe_math_optimizations)
6078 break;
6079 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6080 if (target)
6081 return target;
6082 break;
6083
6084 CASE_FLT_FN (BUILT_IN_SINCOS):
6085 if (! flag_unsafe_math_optimizations)
6086 break;
6087 target = expand_builtin_sincos (exp);
6088 if (target)
6089 return target;
6090 break;
6091
6092 case BUILT_IN_APPLY_ARGS:
6093 return expand_builtin_apply_args ();
6094
6095 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6096 FUNCTION with a copy of the parameters described by
6097 ARGUMENTS, and ARGSIZE. It returns a block of memory
6098 allocated on the stack into which is stored all the registers
6099 that might possibly be used for returning the result of a
6100 function. ARGUMENTS is the value returned by
6101 __builtin_apply_args. ARGSIZE is the number of bytes of
6102 arguments that must be copied. ??? How should this value be
6103 computed? We'll also need a safe worst case value for varargs
6104 functions. */
6105 case BUILT_IN_APPLY:
6106 if (!validate_arglist (exp, POINTER_TYPE,
6107 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6108 && !validate_arglist (exp, REFERENCE_TYPE,
6109 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6110 return const0_rtx;
6111 else
6112 {
6113 rtx ops[3];
6114
6115 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6116 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6117 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6118
6119 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6120 }
6121
6122 /* __builtin_return (RESULT) causes the function to return the
6123 value described by RESULT. RESULT is address of the block of
6124 memory returned by __builtin_apply. */
6125 case BUILT_IN_RETURN:
6126 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6127 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6128 return const0_rtx;
6129
6130 case BUILT_IN_SAVEREGS:
6131 return expand_builtin_saveregs ();
6132
6133 case BUILT_IN_VA_ARG_PACK:
6134 /* All valid uses of __builtin_va_arg_pack () are removed during
6135 inlining. */
6136 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6137 return const0_rtx;
6138
6139 case BUILT_IN_VA_ARG_PACK_LEN:
6140 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6141 inlining. */
6142 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6143 return const0_rtx;
6144
6145 /* Return the address of the first anonymous stack arg. */
6146 case BUILT_IN_NEXT_ARG:
6147 if (fold_builtin_next_arg (exp, false))
6148 return const0_rtx;
6149 return expand_builtin_next_arg ();
6150
6151 case BUILT_IN_CLEAR_CACHE:
6152 target = expand_builtin___clear_cache (exp);
6153 if (target)
6154 return target;
6155 break;
6156
6157 case BUILT_IN_CLASSIFY_TYPE:
6158 return expand_builtin_classify_type (exp);
6159
6160 case BUILT_IN_CONSTANT_P:
6161 return const0_rtx;
6162
6163 case BUILT_IN_FRAME_ADDRESS:
6164 case BUILT_IN_RETURN_ADDRESS:
6165 return expand_builtin_frame_address (fndecl, exp);
6166
6167 /* Returns the address of the area where the structure is returned.
6168 0 otherwise. */
6169 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6170 if (call_expr_nargs (exp) != 0
6171 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6172 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6173 return const0_rtx;
6174 else
6175 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6176
6177 case BUILT_IN_ALLOCA:
6178 case BUILT_IN_ALLOCA_WITH_ALIGN:
6179 /* If the allocation stems from the declaration of a variable-sized
6180 object, it cannot accumulate. */
6181 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6182 if (target)
6183 return target;
6184 break;
6185
6186 case BUILT_IN_STACK_SAVE:
6187 return expand_stack_save ();
6188
6189 case BUILT_IN_STACK_RESTORE:
6190 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6191 return const0_rtx;
6192
6193 case BUILT_IN_BSWAP16:
6194 case BUILT_IN_BSWAP32:
6195 case BUILT_IN_BSWAP64:
6196 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6197 if (target)
6198 return target;
6199 break;
6200
6201 CASE_INT_FN (BUILT_IN_FFS):
6202 target = expand_builtin_unop (target_mode, exp, target,
6203 subtarget, ffs_optab);
6204 if (target)
6205 return target;
6206 break;
6207
6208 CASE_INT_FN (BUILT_IN_CLZ):
6209 target = expand_builtin_unop (target_mode, exp, target,
6210 subtarget, clz_optab);
6211 if (target)
6212 return target;
6213 break;
6214
6215 CASE_INT_FN (BUILT_IN_CTZ):
6216 target = expand_builtin_unop (target_mode, exp, target,
6217 subtarget, ctz_optab);
6218 if (target)
6219 return target;
6220 break;
6221
6222 CASE_INT_FN (BUILT_IN_CLRSB):
6223 target = expand_builtin_unop (target_mode, exp, target,
6224 subtarget, clrsb_optab);
6225 if (target)
6226 return target;
6227 break;
6228
6229 CASE_INT_FN (BUILT_IN_POPCOUNT):
6230 target = expand_builtin_unop (target_mode, exp, target,
6231 subtarget, popcount_optab);
6232 if (target)
6233 return target;
6234 break;
6235
6236 CASE_INT_FN (BUILT_IN_PARITY):
6237 target = expand_builtin_unop (target_mode, exp, target,
6238 subtarget, parity_optab);
6239 if (target)
6240 return target;
6241 break;
6242
6243 case BUILT_IN_STRLEN:
6244 target = expand_builtin_strlen (exp, target, target_mode);
6245 if (target)
6246 return target;
6247 break;
6248
6249 case BUILT_IN_STRCPY:
6250 target = expand_builtin_strcpy (exp, target);
6251 if (target)
6252 return target;
6253 break;
6254
6255 case BUILT_IN_STRNCPY:
6256 target = expand_builtin_strncpy (exp, target);
6257 if (target)
6258 return target;
6259 break;
6260
6261 case BUILT_IN_STPCPY:
6262 target = expand_builtin_stpcpy (exp, target, mode);
6263 if (target)
6264 return target;
6265 break;
6266
6267 case BUILT_IN_MEMCPY:
6268 target = expand_builtin_memcpy (exp, target);
6269 if (target)
6270 return target;
6271 break;
6272
6273 case BUILT_IN_MEMPCPY:
6274 target = expand_builtin_mempcpy (exp, target, mode);
6275 if (target)
6276 return target;
6277 break;
6278
6279 case BUILT_IN_MEMSET:
6280 target = expand_builtin_memset (exp, target, mode);
6281 if (target)
6282 return target;
6283 break;
6284
6285 case BUILT_IN_BZERO:
6286 target = expand_builtin_bzero (exp);
6287 if (target)
6288 return target;
6289 break;
6290
6291 case BUILT_IN_STRCMP:
6292 target = expand_builtin_strcmp (exp, target);
6293 if (target)
6294 return target;
6295 break;
6296
6297 case BUILT_IN_STRNCMP:
6298 target = expand_builtin_strncmp (exp, target, mode);
6299 if (target)
6300 return target;
6301 break;
6302
6303 case BUILT_IN_BCMP:
6304 case BUILT_IN_MEMCMP:
6305 target = expand_builtin_memcmp (exp, target);
6306 if (target)
6307 return target;
6308 break;
6309
6310 case BUILT_IN_SETJMP:
6311 /* This should have been lowered to the builtins below. */
6312 gcc_unreachable ();
6313
6314 case BUILT_IN_SETJMP_SETUP:
6315 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6316 and the receiver label. */
6317 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6318 {
6319 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6320 VOIDmode, EXPAND_NORMAL);
6321 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6322 rtx_insn *label_r = label_rtx (label);
6323
6324 /* This is copied from the handling of non-local gotos. */
6325 expand_builtin_setjmp_setup (buf_addr, label_r);
6326 nonlocal_goto_handler_labels
6327 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6328 nonlocal_goto_handler_labels);
6329 /* ??? Do not let expand_label treat us as such since we would
6330 not want to be both on the list of non-local labels and on
6331 the list of forced labels. */
6332 FORCED_LABEL (label) = 0;
6333 return const0_rtx;
6334 }
6335 break;
6336
6337 case BUILT_IN_SETJMP_RECEIVER:
6338 /* __builtin_setjmp_receiver is passed the receiver label. */
6339 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6340 {
6341 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6342 rtx_insn *label_r = label_rtx (label);
6343
6344 expand_builtin_setjmp_receiver (label_r);
6345 return const0_rtx;
6346 }
6347 break;
6348
6349 /* __builtin_longjmp is passed a pointer to an array of five words.
6350 It's similar to the C library longjmp function but works with
6351 __builtin_setjmp above. */
6352 case BUILT_IN_LONGJMP:
6353 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6354 {
6355 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6356 VOIDmode, EXPAND_NORMAL);
6357 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6358
6359 if (value != const1_rtx)
6360 {
6361 error ("%<__builtin_longjmp%> second argument must be 1");
6362 return const0_rtx;
6363 }
6364
6365 expand_builtin_longjmp (buf_addr, value);
6366 return const0_rtx;
6367 }
6368 break;
6369
6370 case BUILT_IN_NONLOCAL_GOTO:
6371 target = expand_builtin_nonlocal_goto (exp);
6372 if (target)
6373 return target;
6374 break;
6375
6376 /* This updates the setjmp buffer that is its argument with the value
6377 of the current stack pointer. */
6378 case BUILT_IN_UPDATE_SETJMP_BUF:
6379 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6380 {
6381 rtx buf_addr
6382 = expand_normal (CALL_EXPR_ARG (exp, 0));
6383
6384 expand_builtin_update_setjmp_buf (buf_addr);
6385 return const0_rtx;
6386 }
6387 break;
6388
6389 case BUILT_IN_TRAP:
6390 expand_builtin_trap ();
6391 return const0_rtx;
6392
6393 case BUILT_IN_UNREACHABLE:
6394 expand_builtin_unreachable ();
6395 return const0_rtx;
6396
6397 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6398 case BUILT_IN_SIGNBITD32:
6399 case BUILT_IN_SIGNBITD64:
6400 case BUILT_IN_SIGNBITD128:
6401 target = expand_builtin_signbit (exp, target);
6402 if (target)
6403 return target;
6404 break;
6405
6406 /* Various hooks for the DWARF 2 __throw routine. */
6407 case BUILT_IN_UNWIND_INIT:
6408 expand_builtin_unwind_init ();
6409 return const0_rtx;
6410 case BUILT_IN_DWARF_CFA:
6411 return virtual_cfa_rtx;
6412 #ifdef DWARF2_UNWIND_INFO
6413 case BUILT_IN_DWARF_SP_COLUMN:
6414 return expand_builtin_dwarf_sp_column ();
6415 case BUILT_IN_INIT_DWARF_REG_SIZES:
6416 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6417 return const0_rtx;
6418 #endif
6419 case BUILT_IN_FROB_RETURN_ADDR:
6420 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6421 case BUILT_IN_EXTRACT_RETURN_ADDR:
6422 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6423 case BUILT_IN_EH_RETURN:
6424 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6425 CALL_EXPR_ARG (exp, 1));
6426 return const0_rtx;
6427 case BUILT_IN_EH_RETURN_DATA_REGNO:
6428 return expand_builtin_eh_return_data_regno (exp);
6429 case BUILT_IN_EXTEND_POINTER:
6430 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6431 case BUILT_IN_EH_POINTER:
6432 return expand_builtin_eh_pointer (exp);
6433 case BUILT_IN_EH_FILTER:
6434 return expand_builtin_eh_filter (exp);
6435 case BUILT_IN_EH_COPY_VALUES:
6436 return expand_builtin_eh_copy_values (exp);
6437
6438 case BUILT_IN_VA_START:
6439 return expand_builtin_va_start (exp);
6440 case BUILT_IN_VA_END:
6441 return expand_builtin_va_end (exp);
6442 case BUILT_IN_VA_COPY:
6443 return expand_builtin_va_copy (exp);
6444 case BUILT_IN_EXPECT:
6445 return expand_builtin_expect (exp, target);
6446 case BUILT_IN_ASSUME_ALIGNED:
6447 return expand_builtin_assume_aligned (exp, target);
6448 case BUILT_IN_PREFETCH:
6449 expand_builtin_prefetch (exp);
6450 return const0_rtx;
6451
6452 case BUILT_IN_INIT_TRAMPOLINE:
6453 return expand_builtin_init_trampoline (exp, true);
6454 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6455 return expand_builtin_init_trampoline (exp, false);
6456 case BUILT_IN_ADJUST_TRAMPOLINE:
6457 return expand_builtin_adjust_trampoline (exp);
6458
6459 case BUILT_IN_FORK:
6460 case BUILT_IN_EXECL:
6461 case BUILT_IN_EXECV:
6462 case BUILT_IN_EXECLP:
6463 case BUILT_IN_EXECLE:
6464 case BUILT_IN_EXECVP:
6465 case BUILT_IN_EXECVE:
6466 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6467 if (target)
6468 return target;
6469 break;
6470
6471 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6472 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6473 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6474 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6475 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6476 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6477 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6478 if (target)
6479 return target;
6480 break;
6481
6482 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6483 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6484 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6485 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6486 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6487 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6488 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6489 if (target)
6490 return target;
6491 break;
6492
6493 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6494 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6495 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6496 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6497 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6499 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6500 if (target)
6501 return target;
6502 break;
6503
6504 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6505 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6506 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6507 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6508 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6509 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6510 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6511 if (target)
6512 return target;
6513 break;
6514
6515 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6516 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6517 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6518 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6519 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6521 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6522 if (target)
6523 return target;
6524 break;
6525
6526 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6527 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6528 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6529 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6530 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6532 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6533 if (target)
6534 return target;
6535 break;
6536
6537 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6538 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6539 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6540 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6541 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6543 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6544 if (target)
6545 return target;
6546 break;
6547
6548 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6549 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6550 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6551 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6552 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6554 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6555 if (target)
6556 return target;
6557 break;
6558
6559 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6560 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6561 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6562 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6563 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6565 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6566 if (target)
6567 return target;
6568 break;
6569
6570 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6571 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6572 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6573 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6574 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6576 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6582 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6583 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6584 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6585 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6587 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6588 if (target)
6589 return target;
6590 break;
6591
6592 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6593 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6594 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6595 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6596 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6598 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6599 if (target)
6600 return target;
6601 break;
6602
6603 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6605 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6606 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6607 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6608 if (mode == VOIDmode)
6609 mode = TYPE_MODE (boolean_type_node);
6610 if (!target || !register_operand (target, mode))
6611 target = gen_reg_rtx (mode);
6612
6613 mode = get_builtin_sync_mode
6614 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6615 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6616 if (target)
6617 return target;
6618 break;
6619
6620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6625 mode = get_builtin_sync_mode
6626 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6627 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6628 if (target)
6629 return target;
6630 break;
6631
6632 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6633 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6634 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6635 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6636 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6637 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6638 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6639 if (target)
6640 return target;
6641 break;
6642
6643 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6644 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6645 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6646 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6647 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6648 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6649 expand_builtin_sync_lock_release (mode, exp);
6650 return const0_rtx;
6651
6652 case BUILT_IN_SYNC_SYNCHRONIZE:
6653 expand_builtin_sync_synchronize ();
6654 return const0_rtx;
6655
6656 case BUILT_IN_ATOMIC_EXCHANGE_1:
6657 case BUILT_IN_ATOMIC_EXCHANGE_2:
6658 case BUILT_IN_ATOMIC_EXCHANGE_4:
6659 case BUILT_IN_ATOMIC_EXCHANGE_8:
6660 case BUILT_IN_ATOMIC_EXCHANGE_16:
6661 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6662 target = expand_builtin_atomic_exchange (mode, exp, target);
6663 if (target)
6664 return target;
6665 break;
6666
6667 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6668 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6669 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6670 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6671 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6672 {
6673 unsigned int nargs, z;
6674 vec<tree, va_gc> *vec;
6675
6676 mode =
6677 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6678 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6679 if (target)
6680 return target;
6681
6682 /* If this is turned into an external library call, the weak parameter
6683 must be dropped to match the expected parameter list. */
6684 nargs = call_expr_nargs (exp);
6685 vec_alloc (vec, nargs - 1);
6686 for (z = 0; z < 3; z++)
6687 vec->quick_push (CALL_EXPR_ARG (exp, z));
6688 /* Skip the boolean weak parameter. */
6689 for (z = 4; z < 6; z++)
6690 vec->quick_push (CALL_EXPR_ARG (exp, z));
6691 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6692 break;
6693 }
6694
6695 case BUILT_IN_ATOMIC_LOAD_1:
6696 case BUILT_IN_ATOMIC_LOAD_2:
6697 case BUILT_IN_ATOMIC_LOAD_4:
6698 case BUILT_IN_ATOMIC_LOAD_8:
6699 case BUILT_IN_ATOMIC_LOAD_16:
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6701 target = expand_builtin_atomic_load (mode, exp, target);
6702 if (target)
6703 return target;
6704 break;
6705
6706 case BUILT_IN_ATOMIC_STORE_1:
6707 case BUILT_IN_ATOMIC_STORE_2:
6708 case BUILT_IN_ATOMIC_STORE_4:
6709 case BUILT_IN_ATOMIC_STORE_8:
6710 case BUILT_IN_ATOMIC_STORE_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6712 target = expand_builtin_atomic_store (mode, exp);
6713 if (target)
6714 return const0_rtx;
6715 break;
6716
6717 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6718 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6719 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6720 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6721 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6722 {
6723 enum built_in_function lib;
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6725 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6726 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6728 ignore, lib);
6729 if (target)
6730 return target;
6731 break;
6732 }
6733 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6734 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6735 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6736 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6737 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6738 {
6739 enum built_in_function lib;
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6741 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6742 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6744 ignore, lib);
6745 if (target)
6746 return target;
6747 break;
6748 }
6749 case BUILT_IN_ATOMIC_AND_FETCH_1:
6750 case BUILT_IN_ATOMIC_AND_FETCH_2:
6751 case BUILT_IN_ATOMIC_AND_FETCH_4:
6752 case BUILT_IN_ATOMIC_AND_FETCH_8:
6753 case BUILT_IN_ATOMIC_AND_FETCH_16:
6754 {
6755 enum built_in_function lib;
6756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6757 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6758 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6759 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6760 ignore, lib);
6761 if (target)
6762 return target;
6763 break;
6764 }
6765 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6766 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6767 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6768 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6769 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6770 {
6771 enum built_in_function lib;
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6773 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6774 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6776 ignore, lib);
6777 if (target)
6778 return target;
6779 break;
6780 }
6781 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6782 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6783 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6784 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6785 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6786 {
6787 enum built_in_function lib;
6788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6789 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6790 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6792 ignore, lib);
6793 if (target)
6794 return target;
6795 break;
6796 }
6797 case BUILT_IN_ATOMIC_OR_FETCH_1:
6798 case BUILT_IN_ATOMIC_OR_FETCH_2:
6799 case BUILT_IN_ATOMIC_OR_FETCH_4:
6800 case BUILT_IN_ATOMIC_OR_FETCH_8:
6801 case BUILT_IN_ATOMIC_OR_FETCH_16:
6802 {
6803 enum built_in_function lib;
6804 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6805 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6806 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6807 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6808 ignore, lib);
6809 if (target)
6810 return target;
6811 break;
6812 }
6813 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6814 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6815 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6816 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6817 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6819 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6820 ignore, BUILT_IN_NONE);
6821 if (target)
6822 return target;
6823 break;
6824
6825 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6826 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6827 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6828 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6829 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6830 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6831 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6832 ignore, BUILT_IN_NONE);
6833 if (target)
6834 return target;
6835 break;
6836
6837 case BUILT_IN_ATOMIC_FETCH_AND_1:
6838 case BUILT_IN_ATOMIC_FETCH_AND_2:
6839 case BUILT_IN_ATOMIC_FETCH_AND_4:
6840 case BUILT_IN_ATOMIC_FETCH_AND_8:
6841 case BUILT_IN_ATOMIC_FETCH_AND_16:
6842 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6843 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6844 ignore, BUILT_IN_NONE);
6845 if (target)
6846 return target;
6847 break;
6848
6849 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6850 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6851 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6852 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6853 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6854 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6855 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6856 ignore, BUILT_IN_NONE);
6857 if (target)
6858 return target;
6859 break;
6860
6861 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6862 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6863 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6864 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6865 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6866 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6867 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6868 ignore, BUILT_IN_NONE);
6869 if (target)
6870 return target;
6871 break;
6872
6873 case BUILT_IN_ATOMIC_FETCH_OR_1:
6874 case BUILT_IN_ATOMIC_FETCH_OR_2:
6875 case BUILT_IN_ATOMIC_FETCH_OR_4:
6876 case BUILT_IN_ATOMIC_FETCH_OR_8:
6877 case BUILT_IN_ATOMIC_FETCH_OR_16:
6878 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6879 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6880 ignore, BUILT_IN_NONE);
6881 if (target)
6882 return target;
6883 break;
6884
6885 case BUILT_IN_ATOMIC_TEST_AND_SET:
6886 return expand_builtin_atomic_test_and_set (exp, target);
6887
6888 case BUILT_IN_ATOMIC_CLEAR:
6889 return expand_builtin_atomic_clear (exp);
6890
6891 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6892 return expand_builtin_atomic_always_lock_free (exp);
6893
6894 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6895 target = expand_builtin_atomic_is_lock_free (exp);
6896 if (target)
6897 return target;
6898 break;
6899
6900 case BUILT_IN_ATOMIC_THREAD_FENCE:
6901 expand_builtin_atomic_thread_fence (exp);
6902 return const0_rtx;
6903
6904 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6905 expand_builtin_atomic_signal_fence (exp);
6906 return const0_rtx;
6907
6908 case BUILT_IN_OBJECT_SIZE:
6909 return expand_builtin_object_size (exp);
6910
6911 case BUILT_IN_MEMCPY_CHK:
6912 case BUILT_IN_MEMPCPY_CHK:
6913 case BUILT_IN_MEMMOVE_CHK:
6914 case BUILT_IN_MEMSET_CHK:
6915 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6916 if (target)
6917 return target;
6918 break;
6919
6920 case BUILT_IN_STRCPY_CHK:
6921 case BUILT_IN_STPCPY_CHK:
6922 case BUILT_IN_STRNCPY_CHK:
6923 case BUILT_IN_STPNCPY_CHK:
6924 case BUILT_IN_STRCAT_CHK:
6925 case BUILT_IN_STRNCAT_CHK:
6926 case BUILT_IN_SNPRINTF_CHK:
6927 case BUILT_IN_VSNPRINTF_CHK:
6928 maybe_emit_chk_warning (exp, fcode);
6929 break;
6930
6931 case BUILT_IN_SPRINTF_CHK:
6932 case BUILT_IN_VSPRINTF_CHK:
6933 maybe_emit_sprintf_chk_warning (exp, fcode);
6934 break;
6935
6936 case BUILT_IN_FREE:
6937 if (warn_free_nonheap_object)
6938 maybe_emit_free_warning (exp);
6939 break;
6940
6941 case BUILT_IN_THREAD_POINTER:
6942 return expand_builtin_thread_pointer (exp, target);
6943
6944 case BUILT_IN_SET_THREAD_POINTER:
6945 expand_builtin_set_thread_pointer (exp);
6946 return const0_rtx;
6947
6948 case BUILT_IN_CILK_DETACH:
6949 expand_builtin_cilk_detach (exp);
6950 return const0_rtx;
6951
6952 case BUILT_IN_CILK_POP_FRAME:
6953 expand_builtin_cilk_pop_frame (exp);
6954 return const0_rtx;
6955
6956 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6957 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6958 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6959 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6960 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6961 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6962 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6963 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6964 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6965 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6966 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6967 /* We allow user CHKP builtins if Pointer Bounds
6968 Checker is off. */
6969 if (!chkp_function_instrumented_p (current_function_decl))
6970 {
6971 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6972 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6973 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6974 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6975 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6976 return expand_normal (CALL_EXPR_ARG (exp, 0));
6977 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6978 return expand_normal (size_zero_node);
6979 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6980 return expand_normal (size_int (-1));
6981 else
6982 return const0_rtx;
6983 }
6984 /* FALLTHROUGH */
6985
6986 case BUILT_IN_CHKP_BNDMK:
6987 case BUILT_IN_CHKP_BNDSTX:
6988 case BUILT_IN_CHKP_BNDCL:
6989 case BUILT_IN_CHKP_BNDCU:
6990 case BUILT_IN_CHKP_BNDLDX:
6991 case BUILT_IN_CHKP_BNDRET:
6992 case BUILT_IN_CHKP_INTERSECT:
6993 case BUILT_IN_CHKP_NARROW:
6994 case BUILT_IN_CHKP_EXTRACT_LOWER:
6995 case BUILT_IN_CHKP_EXTRACT_UPPER:
6996 /* Software implementation of Pointer Bounds Checker is NYI.
6997 Target support is required. */
6998 error ("Your target platform does not support -fcheck-pointer-bounds");
6999 break;
7000
7001 case BUILT_IN_ACC_ON_DEVICE:
7002 /* Do library call, if we failed to expand the builtin when
7003 folding. */
7004 break;
7005
7006 default: /* just do library call, if unknown builtin */
7007 break;
7008 }
7009
7010 /* The switch statement above can drop through to cause the function
7011 to be called normally. */
7012 return expand_call (exp, target, ignore);
7013 }
7014
7015 /* Similar to expand_builtin but is used for instrumented calls. */
7016
7017 rtx
7018 expand_builtin_with_bounds (tree exp, rtx target,
7019 rtx subtarget ATTRIBUTE_UNUSED,
7020 machine_mode mode, int ignore)
7021 {
7022 tree fndecl = get_callee_fndecl (exp);
7023 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7024
7025 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7026
7027 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7028 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7029
7030 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7031 && fcode < END_CHKP_BUILTINS);
7032
7033 switch (fcode)
7034 {
7035 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7036 target = expand_builtin_memcpy_with_bounds (exp, target);
7037 if (target)
7038 return target;
7039 break;
7040
7041 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7042 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7043 if (target)
7044 return target;
7045 break;
7046
7047 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7048 target = expand_builtin_memset_with_bounds (exp, target, mode);
7049 if (target)
7050 return target;
7051 break;
7052
7053 default:
7054 break;
7055 }
7056
7057 /* The switch statement above can drop through to cause the function
7058 to be called normally. */
7059 return expand_call (exp, target, ignore);
7060 }
7061
7062 /* Determine whether a tree node represents a call to a built-in
7063 function. If the tree T is a call to a built-in function with
7064 the right number of arguments of the appropriate types, return
7065 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7066 Otherwise the return value is END_BUILTINS. */
7067
7068 enum built_in_function
7069 builtin_mathfn_code (const_tree t)
7070 {
7071 const_tree fndecl, arg, parmlist;
7072 const_tree argtype, parmtype;
7073 const_call_expr_arg_iterator iter;
7074
7075 if (TREE_CODE (t) != CALL_EXPR
7076 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7077 return END_BUILTINS;
7078
7079 fndecl = get_callee_fndecl (t);
7080 if (fndecl == NULL_TREE
7081 || TREE_CODE (fndecl) != FUNCTION_DECL
7082 || ! DECL_BUILT_IN (fndecl)
7083 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7084 return END_BUILTINS;
7085
7086 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7087 init_const_call_expr_arg_iterator (t, &iter);
7088 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7089 {
7090 /* If a function doesn't take a variable number of arguments,
7091 the last element in the list will have type `void'. */
7092 parmtype = TREE_VALUE (parmlist);
7093 if (VOID_TYPE_P (parmtype))
7094 {
7095 if (more_const_call_expr_args_p (&iter))
7096 return END_BUILTINS;
7097 return DECL_FUNCTION_CODE (fndecl);
7098 }
7099
7100 if (! more_const_call_expr_args_p (&iter))
7101 return END_BUILTINS;
7102
7103 arg = next_const_call_expr_arg (&iter);
7104 argtype = TREE_TYPE (arg);
7105
7106 if (SCALAR_FLOAT_TYPE_P (parmtype))
7107 {
7108 if (! SCALAR_FLOAT_TYPE_P (argtype))
7109 return END_BUILTINS;
7110 }
7111 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7112 {
7113 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7114 return END_BUILTINS;
7115 }
7116 else if (POINTER_TYPE_P (parmtype))
7117 {
7118 if (! POINTER_TYPE_P (argtype))
7119 return END_BUILTINS;
7120 }
7121 else if (INTEGRAL_TYPE_P (parmtype))
7122 {
7123 if (! INTEGRAL_TYPE_P (argtype))
7124 return END_BUILTINS;
7125 }
7126 else
7127 return END_BUILTINS;
7128 }
7129
7130 /* Variable-length argument list. */
7131 return DECL_FUNCTION_CODE (fndecl);
7132 }
7133
7134 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7135 evaluate to a constant. */
7136
7137 static tree
7138 fold_builtin_constant_p (tree arg)
7139 {
7140 /* We return 1 for a numeric type that's known to be a constant
7141 value at compile-time or for an aggregate type that's a
7142 literal constant. */
7143 STRIP_NOPS (arg);
7144
7145 /* If we know this is a constant, emit the constant of one. */
7146 if (CONSTANT_CLASS_P (arg)
7147 || (TREE_CODE (arg) == CONSTRUCTOR
7148 && TREE_CONSTANT (arg)))
7149 return integer_one_node;
7150 if (TREE_CODE (arg) == ADDR_EXPR)
7151 {
7152 tree op = TREE_OPERAND (arg, 0);
7153 if (TREE_CODE (op) == STRING_CST
7154 || (TREE_CODE (op) == ARRAY_REF
7155 && integer_zerop (TREE_OPERAND (op, 1))
7156 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7157 return integer_one_node;
7158 }
7159
7160 /* If this expression has side effects, show we don't know it to be a
7161 constant. Likewise if it's a pointer or aggregate type since in
7162 those case we only want literals, since those are only optimized
7163 when generating RTL, not later.
7164 And finally, if we are compiling an initializer, not code, we
7165 need to return a definite result now; there's not going to be any
7166 more optimization done. */
7167 if (TREE_SIDE_EFFECTS (arg)
7168 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7169 || POINTER_TYPE_P (TREE_TYPE (arg))
7170 || cfun == 0
7171 || folding_initializer
7172 || force_folding_builtin_constant_p)
7173 return integer_zero_node;
7174
7175 return NULL_TREE;
7176 }
7177
7178 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7179 return it as a truthvalue. */
7180
7181 static tree
7182 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7183 tree predictor)
7184 {
7185 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7186
7187 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7188 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7189 ret_type = TREE_TYPE (TREE_TYPE (fn));
7190 pred_type = TREE_VALUE (arg_types);
7191 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7192
7193 pred = fold_convert_loc (loc, pred_type, pred);
7194 expected = fold_convert_loc (loc, expected_type, expected);
7195 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7196 predictor);
7197
7198 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7199 build_int_cst (ret_type, 0));
7200 }
7201
7202 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7203 NULL_TREE if no simplification is possible. */
7204
7205 tree
7206 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7207 {
7208 tree inner, fndecl, inner_arg0;
7209 enum tree_code code;
7210
7211 /* Distribute the expected value over short-circuiting operators.
7212 See through the cast from truthvalue_type_node to long. */
7213 inner_arg0 = arg0;
7214 while (CONVERT_EXPR_P (inner_arg0)
7215 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7216 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7217 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7218
7219 /* If this is a builtin_expect within a builtin_expect keep the
7220 inner one. See through a comparison against a constant. It
7221 might have been added to create a thruthvalue. */
7222 inner = inner_arg0;
7223
7224 if (COMPARISON_CLASS_P (inner)
7225 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7226 inner = TREE_OPERAND (inner, 0);
7227
7228 if (TREE_CODE (inner) == CALL_EXPR
7229 && (fndecl = get_callee_fndecl (inner))
7230 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7231 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7232 return arg0;
7233
7234 inner = inner_arg0;
7235 code = TREE_CODE (inner);
7236 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7237 {
7238 tree op0 = TREE_OPERAND (inner, 0);
7239 tree op1 = TREE_OPERAND (inner, 1);
7240
7241 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7242 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7243 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7244
7245 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7246 }
7247
7248 /* If the argument isn't invariant then there's nothing else we can do. */
7249 if (!TREE_CONSTANT (inner_arg0))
7250 return NULL_TREE;
7251
7252 /* If we expect that a comparison against the argument will fold to
7253 a constant return the constant. In practice, this means a true
7254 constant or the address of a non-weak symbol. */
7255 inner = inner_arg0;
7256 STRIP_NOPS (inner);
7257 if (TREE_CODE (inner) == ADDR_EXPR)
7258 {
7259 do
7260 {
7261 inner = TREE_OPERAND (inner, 0);
7262 }
7263 while (TREE_CODE (inner) == COMPONENT_REF
7264 || TREE_CODE (inner) == ARRAY_REF);
7265 if ((TREE_CODE (inner) == VAR_DECL
7266 || TREE_CODE (inner) == FUNCTION_DECL)
7267 && DECL_WEAK (inner))
7268 return NULL_TREE;
7269 }
7270
7271 /* Otherwise, ARG0 already has the proper type for the return value. */
7272 return arg0;
7273 }
7274
7275 /* Fold a call to __builtin_classify_type with argument ARG. */
7276
7277 static tree
7278 fold_builtin_classify_type (tree arg)
7279 {
7280 if (arg == 0)
7281 return build_int_cst (integer_type_node, no_type_class);
7282
7283 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7284 }
7285
7286 /* Fold a call to __builtin_strlen with argument ARG. */
7287
7288 static tree
7289 fold_builtin_strlen (location_t loc, tree type, tree arg)
7290 {
7291 if (!validate_arg (arg, POINTER_TYPE))
7292 return NULL_TREE;
7293 else
7294 {
7295 tree len = c_strlen (arg, 0);
7296
7297 if (len)
7298 return fold_convert_loc (loc, type, len);
7299
7300 return NULL_TREE;
7301 }
7302 }
7303
7304 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7305
7306 static tree
7307 fold_builtin_inf (location_t loc, tree type, int warn)
7308 {
7309 REAL_VALUE_TYPE real;
7310
7311 /* __builtin_inff is intended to be usable to define INFINITY on all
7312 targets. If an infinity is not available, INFINITY expands "to a
7313 positive constant of type float that overflows at translation
7314 time", footnote "In this case, using INFINITY will violate the
7315 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7316 Thus we pedwarn to ensure this constraint violation is
7317 diagnosed. */
7318 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7319 pedwarn (loc, 0, "target format does not support infinity");
7320
7321 real_inf (&real);
7322 return build_real (type, real);
7323 }
7324
7325 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7326 NULL_TREE if no simplification can be made. */
7327
7328 static tree
7329 fold_builtin_sincos (location_t loc,
7330 tree arg0, tree arg1, tree arg2)
7331 {
7332 tree type;
7333 tree fndecl, call = NULL_TREE;
7334
7335 if (!validate_arg (arg0, REAL_TYPE)
7336 || !validate_arg (arg1, POINTER_TYPE)
7337 || !validate_arg (arg2, POINTER_TYPE))
7338 return NULL_TREE;
7339
7340 type = TREE_TYPE (arg0);
7341
7342 /* Calculate the result when the argument is a constant. */
7343 built_in_function fn = mathfn_built_in_2 (type, BUILT_IN_CEXPI);
7344 if (fn == END_BUILTINS)
7345 return NULL_TREE;
7346
7347 /* Canonicalize sincos to cexpi. */
7348 if (TREE_CODE (arg0) == REAL_CST)
7349 {
7350 tree complex_type = build_complex_type (type);
7351 call = fold_const_call (fn, complex_type, arg0);
7352 }
7353 if (!call)
7354 {
7355 if (!targetm.libc_has_function (function_c99_math_complex)
7356 || !builtin_decl_implicit_p (fn))
7357 return NULL_TREE;
7358 fndecl = builtin_decl_explicit (fn);
7359 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7360 call = builtin_save_expr (call);
7361 }
7362
7363 return build2 (COMPOUND_EXPR, void_type_node,
7364 build2 (MODIFY_EXPR, void_type_node,
7365 build_fold_indirect_ref_loc (loc, arg1),
7366 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7367 build2 (MODIFY_EXPR, void_type_node,
7368 build_fold_indirect_ref_loc (loc, arg2),
7369 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7370 }
7371
7372 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7373 arguments to the call, and TYPE is its return type.
7374 Return NULL_TREE if no simplification can be made. */
7375
7376 static tree
7377 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7378 {
7379 if (!validate_arg (arg1, POINTER_TYPE)
7380 || !validate_arg (arg2, INTEGER_TYPE)
7381 || !validate_arg (len, INTEGER_TYPE))
7382 return NULL_TREE;
7383 else
7384 {
7385 const char *p1;
7386
7387 if (TREE_CODE (arg2) != INTEGER_CST
7388 || !tree_fits_uhwi_p (len))
7389 return NULL_TREE;
7390
7391 p1 = c_getstr (arg1);
7392 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7393 {
7394 char c;
7395 const char *r;
7396 tree tem;
7397
7398 if (target_char_cast (arg2, &c))
7399 return NULL_TREE;
7400
7401 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7402
7403 if (r == NULL)
7404 return build_int_cst (TREE_TYPE (arg1), 0);
7405
7406 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7407 return fold_convert_loc (loc, type, tem);
7408 }
7409 return NULL_TREE;
7410 }
7411 }
7412
7413 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7414 Return NULL_TREE if no simplification can be made. */
7415
7416 static tree
7417 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7418 {
7419 if (!validate_arg (arg1, POINTER_TYPE)
7420 || !validate_arg (arg2, POINTER_TYPE)
7421 || !validate_arg (len, INTEGER_TYPE))
7422 return NULL_TREE;
7423
7424 /* If the LEN parameter is zero, return zero. */
7425 if (integer_zerop (len))
7426 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7427 arg1, arg2);
7428
7429 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7430 if (operand_equal_p (arg1, arg2, 0))
7431 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7432
7433 /* If len parameter is one, return an expression corresponding to
7434 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7435 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7436 {
7437 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7438 tree cst_uchar_ptr_node
7439 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7440
7441 tree ind1
7442 = fold_convert_loc (loc, integer_type_node,
7443 build1 (INDIRECT_REF, cst_uchar_node,
7444 fold_convert_loc (loc,
7445 cst_uchar_ptr_node,
7446 arg1)));
7447 tree ind2
7448 = fold_convert_loc (loc, integer_type_node,
7449 build1 (INDIRECT_REF, cst_uchar_node,
7450 fold_convert_loc (loc,
7451 cst_uchar_ptr_node,
7452 arg2)));
7453 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7454 }
7455
7456 return NULL_TREE;
7457 }
7458
7459 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7460 Return NULL_TREE if no simplification can be made. */
7461
7462 static tree
7463 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7464 {
7465 if (!validate_arg (arg1, POINTER_TYPE)
7466 || !validate_arg (arg2, POINTER_TYPE))
7467 return NULL_TREE;
7468
7469 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7470 if (operand_equal_p (arg1, arg2, 0))
7471 return integer_zero_node;
7472
7473 /* If the second arg is "", return *(const unsigned char*)arg1. */
7474 const char *p2 = c_getstr (arg2);
7475 if (p2 && *p2 == '\0')
7476 {
7477 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7478 tree cst_uchar_ptr_node
7479 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7480
7481 return fold_convert_loc (loc, integer_type_node,
7482 build1 (INDIRECT_REF, cst_uchar_node,
7483 fold_convert_loc (loc,
7484 cst_uchar_ptr_node,
7485 arg1)));
7486 }
7487
7488 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7489 const char *p1 = c_getstr (arg1);
7490 if (p1 && *p1 == '\0')
7491 {
7492 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7493 tree cst_uchar_ptr_node
7494 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7495
7496 tree temp
7497 = fold_convert_loc (loc, integer_type_node,
7498 build1 (INDIRECT_REF, cst_uchar_node,
7499 fold_convert_loc (loc,
7500 cst_uchar_ptr_node,
7501 arg2)));
7502 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7503 }
7504
7505 return NULL_TREE;
7506 }
7507
7508 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7509 Return NULL_TREE if no simplification can be made. */
7510
7511 static tree
7512 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7513 {
7514 if (!validate_arg (arg1, POINTER_TYPE)
7515 || !validate_arg (arg2, POINTER_TYPE)
7516 || !validate_arg (len, INTEGER_TYPE))
7517 return NULL_TREE;
7518
7519 /* If the LEN parameter is zero, return zero. */
7520 if (integer_zerop (len))
7521 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7522 arg1, arg2);
7523
7524 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7525 if (operand_equal_p (arg1, arg2, 0))
7526 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7527
7528 /* If the second arg is "", and the length is greater than zero,
7529 return *(const unsigned char*)arg1. */
7530 const char *p2 = c_getstr (arg2);
7531 if (p2 && *p2 == '\0'
7532 && TREE_CODE (len) == INTEGER_CST
7533 && tree_int_cst_sgn (len) == 1)
7534 {
7535 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7536 tree cst_uchar_ptr_node
7537 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7538
7539 return fold_convert_loc (loc, integer_type_node,
7540 build1 (INDIRECT_REF, cst_uchar_node,
7541 fold_convert_loc (loc,
7542 cst_uchar_ptr_node,
7543 arg1)));
7544 }
7545
7546 /* If the first arg is "", and the length is greater than zero,
7547 return -*(const unsigned char*)arg2. */
7548 const char *p1 = c_getstr (arg1);
7549 if (p1 && *p1 == '\0'
7550 && TREE_CODE (len) == INTEGER_CST
7551 && tree_int_cst_sgn (len) == 1)
7552 {
7553 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7554 tree cst_uchar_ptr_node
7555 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7556
7557 tree temp = fold_convert_loc (loc, integer_type_node,
7558 build1 (INDIRECT_REF, cst_uchar_node,
7559 fold_convert_loc (loc,
7560 cst_uchar_ptr_node,
7561 arg2)));
7562 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7563 }
7564
7565 /* If len parameter is one, return an expression corresponding to
7566 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7567 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7568 {
7569 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7570 tree cst_uchar_ptr_node
7571 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7572
7573 tree ind1 = fold_convert_loc (loc, integer_type_node,
7574 build1 (INDIRECT_REF, cst_uchar_node,
7575 fold_convert_loc (loc,
7576 cst_uchar_ptr_node,
7577 arg1)));
7578 tree ind2 = fold_convert_loc (loc, integer_type_node,
7579 build1 (INDIRECT_REF, cst_uchar_node,
7580 fold_convert_loc (loc,
7581 cst_uchar_ptr_node,
7582 arg2)));
7583 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7584 }
7585
7586 return NULL_TREE;
7587 }
7588
7589 /* Fold a call to builtin isascii with argument ARG. */
7590
7591 static tree
7592 fold_builtin_isascii (location_t loc, tree arg)
7593 {
7594 if (!validate_arg (arg, INTEGER_TYPE))
7595 return NULL_TREE;
7596 else
7597 {
7598 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7599 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7600 build_int_cst (integer_type_node,
7601 ~ (unsigned HOST_WIDE_INT) 0x7f));
7602 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7603 arg, integer_zero_node);
7604 }
7605 }
7606
7607 /* Fold a call to builtin toascii with argument ARG. */
7608
7609 static tree
7610 fold_builtin_toascii (location_t loc, tree arg)
7611 {
7612 if (!validate_arg (arg, INTEGER_TYPE))
7613 return NULL_TREE;
7614
7615 /* Transform toascii(c) -> (c & 0x7f). */
7616 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7617 build_int_cst (integer_type_node, 0x7f));
7618 }
7619
7620 /* Fold a call to builtin isdigit with argument ARG. */
7621
7622 static tree
7623 fold_builtin_isdigit (location_t loc, tree arg)
7624 {
7625 if (!validate_arg (arg, INTEGER_TYPE))
7626 return NULL_TREE;
7627 else
7628 {
7629 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7630 /* According to the C standard, isdigit is unaffected by locale.
7631 However, it definitely is affected by the target character set. */
7632 unsigned HOST_WIDE_INT target_digit0
7633 = lang_hooks.to_target_charset ('0');
7634
7635 if (target_digit0 == 0)
7636 return NULL_TREE;
7637
7638 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7639 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7640 build_int_cst (unsigned_type_node, target_digit0));
7641 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7642 build_int_cst (unsigned_type_node, 9));
7643 }
7644 }
7645
7646 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7647
7648 static tree
7649 fold_builtin_fabs (location_t loc, tree arg, tree type)
7650 {
7651 if (!validate_arg (arg, REAL_TYPE))
7652 return NULL_TREE;
7653
7654 arg = fold_convert_loc (loc, type, arg);
7655 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7656 }
7657
7658 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7659
7660 static tree
7661 fold_builtin_abs (location_t loc, tree arg, tree type)
7662 {
7663 if (!validate_arg (arg, INTEGER_TYPE))
7664 return NULL_TREE;
7665
7666 arg = fold_convert_loc (loc, type, arg);
7667 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7668 }
7669
7670 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7671
7672 static tree
7673 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7674 {
7675 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7676 if (validate_arg (arg0, REAL_TYPE)
7677 && validate_arg (arg1, REAL_TYPE)
7678 && validate_arg (arg2, REAL_TYPE)
7679 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7680 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7681
7682 return NULL_TREE;
7683 }
7684
7685 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7686
7687 static tree
7688 fold_builtin_carg (location_t loc, tree arg, tree type)
7689 {
7690 if (validate_arg (arg, COMPLEX_TYPE)
7691 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7692 {
7693 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7694
7695 if (atan2_fn)
7696 {
7697 tree new_arg = builtin_save_expr (arg);
7698 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7699 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7700 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7701 }
7702 }
7703
7704 return NULL_TREE;
7705 }
7706
7707 /* Fold a call to builtin frexp, we can assume the base is 2. */
7708
7709 static tree
7710 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7711 {
7712 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7713 return NULL_TREE;
7714
7715 STRIP_NOPS (arg0);
7716
7717 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7718 return NULL_TREE;
7719
7720 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7721
7722 /* Proceed if a valid pointer type was passed in. */
7723 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7724 {
7725 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7726 tree frac, exp;
7727
7728 switch (value->cl)
7729 {
7730 case rvc_zero:
7731 /* For +-0, return (*exp = 0, +-0). */
7732 exp = integer_zero_node;
7733 frac = arg0;
7734 break;
7735 case rvc_nan:
7736 case rvc_inf:
7737 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7738 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7739 case rvc_normal:
7740 {
7741 /* Since the frexp function always expects base 2, and in
7742 GCC normalized significands are already in the range
7743 [0.5, 1.0), we have exactly what frexp wants. */
7744 REAL_VALUE_TYPE frac_rvt = *value;
7745 SET_REAL_EXP (&frac_rvt, 0);
7746 frac = build_real (rettype, frac_rvt);
7747 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7748 }
7749 break;
7750 default:
7751 gcc_unreachable ();
7752 }
7753
7754 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7755 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7756 TREE_SIDE_EFFECTS (arg1) = 1;
7757 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7758 }
7759
7760 return NULL_TREE;
7761 }
7762
7763 /* Fold a call to builtin modf. */
7764
7765 static tree
7766 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7767 {
7768 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7769 return NULL_TREE;
7770
7771 STRIP_NOPS (arg0);
7772
7773 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7774 return NULL_TREE;
7775
7776 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7777
7778 /* Proceed if a valid pointer type was passed in. */
7779 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7780 {
7781 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7782 REAL_VALUE_TYPE trunc, frac;
7783
7784 switch (value->cl)
7785 {
7786 case rvc_nan:
7787 case rvc_zero:
7788 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7789 trunc = frac = *value;
7790 break;
7791 case rvc_inf:
7792 /* For +-Inf, return (*arg1 = arg0, +-0). */
7793 frac = dconst0;
7794 frac.sign = value->sign;
7795 trunc = *value;
7796 break;
7797 case rvc_normal:
7798 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7799 real_trunc (&trunc, VOIDmode, value);
7800 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7801 /* If the original number was negative and already
7802 integral, then the fractional part is -0.0. */
7803 if (value->sign && frac.cl == rvc_zero)
7804 frac.sign = value->sign;
7805 break;
7806 }
7807
7808 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7809 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7810 build_real (rettype, trunc));
7811 TREE_SIDE_EFFECTS (arg1) = 1;
7812 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7813 build_real (rettype, frac));
7814 }
7815
7816 return NULL_TREE;
7817 }
7818
7819 /* Given a location LOC, an interclass builtin function decl FNDECL
7820 and its single argument ARG, return an folded expression computing
7821 the same, or NULL_TREE if we either couldn't or didn't want to fold
7822 (the latter happen if there's an RTL instruction available). */
7823
7824 static tree
7825 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7826 {
7827 machine_mode mode;
7828
7829 if (!validate_arg (arg, REAL_TYPE))
7830 return NULL_TREE;
7831
7832 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7833 return NULL_TREE;
7834
7835 mode = TYPE_MODE (TREE_TYPE (arg));
7836
7837 /* If there is no optab, try generic code. */
7838 switch (DECL_FUNCTION_CODE (fndecl))
7839 {
7840 tree result;
7841
7842 CASE_FLT_FN (BUILT_IN_ISINF):
7843 {
7844 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7845 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7846 tree const type = TREE_TYPE (arg);
7847 REAL_VALUE_TYPE r;
7848 char buf[128];
7849
7850 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7851 real_from_string (&r, buf);
7852 result = build_call_expr (isgr_fn, 2,
7853 fold_build1_loc (loc, ABS_EXPR, type, arg),
7854 build_real (type, r));
7855 return result;
7856 }
7857 CASE_FLT_FN (BUILT_IN_FINITE):
7858 case BUILT_IN_ISFINITE:
7859 {
7860 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7861 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7862 tree const type = TREE_TYPE (arg);
7863 REAL_VALUE_TYPE r;
7864 char buf[128];
7865
7866 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7867 real_from_string (&r, buf);
7868 result = build_call_expr (isle_fn, 2,
7869 fold_build1_loc (loc, ABS_EXPR, type, arg),
7870 build_real (type, r));
7871 /*result = fold_build2_loc (loc, UNGT_EXPR,
7872 TREE_TYPE (TREE_TYPE (fndecl)),
7873 fold_build1_loc (loc, ABS_EXPR, type, arg),
7874 build_real (type, r));
7875 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7876 TREE_TYPE (TREE_TYPE (fndecl)),
7877 result);*/
7878 return result;
7879 }
7880 case BUILT_IN_ISNORMAL:
7881 {
7882 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7883 islessequal(fabs(x),DBL_MAX). */
7884 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7885 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7886 tree const type = TREE_TYPE (arg);
7887 REAL_VALUE_TYPE rmax, rmin;
7888 char buf[128];
7889
7890 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7891 real_from_string (&rmax, buf);
7892 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7893 real_from_string (&rmin, buf);
7894 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7895 result = build_call_expr (isle_fn, 2, arg,
7896 build_real (type, rmax));
7897 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
7898 build_call_expr (isge_fn, 2, arg,
7899 build_real (type, rmin)));
7900 return result;
7901 }
7902 default:
7903 break;
7904 }
7905
7906 return NULL_TREE;
7907 }
7908
7909 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7910 ARG is the argument for the call. */
7911
7912 static tree
7913 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7914 {
7915 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7916
7917 if (!validate_arg (arg, REAL_TYPE))
7918 return NULL_TREE;
7919
7920 switch (builtin_index)
7921 {
7922 case BUILT_IN_ISINF:
7923 if (!HONOR_INFINITIES (arg))
7924 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7925
7926 return NULL_TREE;
7927
7928 case BUILT_IN_ISINF_SIGN:
7929 {
7930 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7931 /* In a boolean context, GCC will fold the inner COND_EXPR to
7932 1. So e.g. "if (isinf_sign(x))" would be folded to just
7933 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7934 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
7935 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7936 tree tmp = NULL_TREE;
7937
7938 arg = builtin_save_expr (arg);
7939
7940 if (signbit_fn && isinf_fn)
7941 {
7942 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7943 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7944
7945 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7946 signbit_call, integer_zero_node);
7947 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7948 isinf_call, integer_zero_node);
7949
7950 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7951 integer_minus_one_node, integer_one_node);
7952 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7953 isinf_call, tmp,
7954 integer_zero_node);
7955 }
7956
7957 return tmp;
7958 }
7959
7960 case BUILT_IN_ISFINITE:
7961 if (!HONOR_NANS (arg)
7962 && !HONOR_INFINITIES (arg))
7963 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7964
7965 return NULL_TREE;
7966
7967 case BUILT_IN_ISNAN:
7968 if (!HONOR_NANS (arg))
7969 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7970
7971 arg = builtin_save_expr (arg);
7972 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7973
7974 default:
7975 gcc_unreachable ();
7976 }
7977 }
7978
7979 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7980 This builtin will generate code to return the appropriate floating
7981 point classification depending on the value of the floating point
7982 number passed in. The possible return values must be supplied as
7983 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7984 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7985 one floating point argument which is "type generic". */
7986
7987 static tree
7988 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7989 {
7990 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7991 arg, type, res, tmp;
7992 machine_mode mode;
7993 REAL_VALUE_TYPE r;
7994 char buf[128];
7995
7996 /* Verify the required arguments in the original call. */
7997 if (nargs != 6
7998 || !validate_arg (args[0], INTEGER_TYPE)
7999 || !validate_arg (args[1], INTEGER_TYPE)
8000 || !validate_arg (args[2], INTEGER_TYPE)
8001 || !validate_arg (args[3], INTEGER_TYPE)
8002 || !validate_arg (args[4], INTEGER_TYPE)
8003 || !validate_arg (args[5], REAL_TYPE))
8004 return NULL_TREE;
8005
8006 fp_nan = args[0];
8007 fp_infinite = args[1];
8008 fp_normal = args[2];
8009 fp_subnormal = args[3];
8010 fp_zero = args[4];
8011 arg = args[5];
8012 type = TREE_TYPE (arg);
8013 mode = TYPE_MODE (type);
8014 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8015
8016 /* fpclassify(x) ->
8017 isnan(x) ? FP_NAN :
8018 (fabs(x) == Inf ? FP_INFINITE :
8019 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8020 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8021
8022 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8023 build_real (type, dconst0));
8024 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8025 tmp, fp_zero, fp_subnormal);
8026
8027 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8028 real_from_string (&r, buf);
8029 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8030 arg, build_real (type, r));
8031 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8032
8033 if (HONOR_INFINITIES (mode))
8034 {
8035 real_inf (&r);
8036 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8037 build_real (type, r));
8038 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8039 fp_infinite, res);
8040 }
8041
8042 if (HONOR_NANS (mode))
8043 {
8044 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8045 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8046 }
8047
8048 return res;
8049 }
8050
8051 /* Fold a call to an unordered comparison function such as
8052 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8053 being called and ARG0 and ARG1 are the arguments for the call.
8054 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8055 the opposite of the desired result. UNORDERED_CODE is used
8056 for modes that can hold NaNs and ORDERED_CODE is used for
8057 the rest. */
8058
8059 static tree
8060 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8061 enum tree_code unordered_code,
8062 enum tree_code ordered_code)
8063 {
8064 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8065 enum tree_code code;
8066 tree type0, type1;
8067 enum tree_code code0, code1;
8068 tree cmp_type = NULL_TREE;
8069
8070 type0 = TREE_TYPE (arg0);
8071 type1 = TREE_TYPE (arg1);
8072
8073 code0 = TREE_CODE (type0);
8074 code1 = TREE_CODE (type1);
8075
8076 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8077 /* Choose the wider of two real types. */
8078 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8079 ? type0 : type1;
8080 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8081 cmp_type = type0;
8082 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8083 cmp_type = type1;
8084
8085 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8086 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8087
8088 if (unordered_code == UNORDERED_EXPR)
8089 {
8090 if (!HONOR_NANS (arg0))
8091 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8092 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8093 }
8094
8095 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8096 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8097 fold_build2_loc (loc, code, type, arg0, arg1));
8098 }
8099
8100 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8101 arithmetics if it can never overflow, or into internal functions that
8102 return both result of arithmetics and overflowed boolean flag in
8103 a complex integer result, or some other check for overflow. */
8104
8105 static tree
8106 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8107 tree arg0, tree arg1, tree arg2)
8108 {
8109 enum internal_fn ifn = IFN_LAST;
8110 tree type = TREE_TYPE (TREE_TYPE (arg2));
8111 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8112 switch (fcode)
8113 {
8114 case BUILT_IN_ADD_OVERFLOW:
8115 case BUILT_IN_SADD_OVERFLOW:
8116 case BUILT_IN_SADDL_OVERFLOW:
8117 case BUILT_IN_SADDLL_OVERFLOW:
8118 case BUILT_IN_UADD_OVERFLOW:
8119 case BUILT_IN_UADDL_OVERFLOW:
8120 case BUILT_IN_UADDLL_OVERFLOW:
8121 ifn = IFN_ADD_OVERFLOW;
8122 break;
8123 case BUILT_IN_SUB_OVERFLOW:
8124 case BUILT_IN_SSUB_OVERFLOW:
8125 case BUILT_IN_SSUBL_OVERFLOW:
8126 case BUILT_IN_SSUBLL_OVERFLOW:
8127 case BUILT_IN_USUB_OVERFLOW:
8128 case BUILT_IN_USUBL_OVERFLOW:
8129 case BUILT_IN_USUBLL_OVERFLOW:
8130 ifn = IFN_SUB_OVERFLOW;
8131 break;
8132 case BUILT_IN_MUL_OVERFLOW:
8133 case BUILT_IN_SMUL_OVERFLOW:
8134 case BUILT_IN_SMULL_OVERFLOW:
8135 case BUILT_IN_SMULLL_OVERFLOW:
8136 case BUILT_IN_UMUL_OVERFLOW:
8137 case BUILT_IN_UMULL_OVERFLOW:
8138 case BUILT_IN_UMULLL_OVERFLOW:
8139 ifn = IFN_MUL_OVERFLOW;
8140 break;
8141 default:
8142 gcc_unreachable ();
8143 }
8144 tree ctype = build_complex_type (type);
8145 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8146 2, arg0, arg1);
8147 tree tgt = save_expr (call);
8148 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8149 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8150 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8151 tree store
8152 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8153 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8154 }
8155
8156 /* Fold a call to built-in function FNDECL with 0 arguments.
8157 This function returns NULL_TREE if no simplification was possible. */
8158
8159 static tree
8160 fold_builtin_0 (location_t loc, tree fndecl)
8161 {
8162 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8163 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8164 switch (fcode)
8165 {
8166 CASE_FLT_FN (BUILT_IN_INF):
8167 case BUILT_IN_INFD32:
8168 case BUILT_IN_INFD64:
8169 case BUILT_IN_INFD128:
8170 return fold_builtin_inf (loc, type, true);
8171
8172 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8173 return fold_builtin_inf (loc, type, false);
8174
8175 case BUILT_IN_CLASSIFY_TYPE:
8176 return fold_builtin_classify_type (NULL_TREE);
8177
8178 default:
8179 break;
8180 }
8181 return NULL_TREE;
8182 }
8183
8184 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8185 This function returns NULL_TREE if no simplification was possible. */
8186
8187 static tree
8188 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8189 {
8190 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8191 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8192
8193 if (TREE_CODE (arg0) == ERROR_MARK)
8194 return NULL_TREE;
8195
8196 if (tree ret = fold_const_call (fcode, type, arg0))
8197 return ret;
8198
8199 switch (fcode)
8200 {
8201 case BUILT_IN_CONSTANT_P:
8202 {
8203 tree val = fold_builtin_constant_p (arg0);
8204
8205 /* Gimplification will pull the CALL_EXPR for the builtin out of
8206 an if condition. When not optimizing, we'll not CSE it back.
8207 To avoid link error types of regressions, return false now. */
8208 if (!val && !optimize)
8209 val = integer_zero_node;
8210
8211 return val;
8212 }
8213
8214 case BUILT_IN_CLASSIFY_TYPE:
8215 return fold_builtin_classify_type (arg0);
8216
8217 case BUILT_IN_STRLEN:
8218 return fold_builtin_strlen (loc, type, arg0);
8219
8220 CASE_FLT_FN (BUILT_IN_FABS):
8221 case BUILT_IN_FABSD32:
8222 case BUILT_IN_FABSD64:
8223 case BUILT_IN_FABSD128:
8224 return fold_builtin_fabs (loc, arg0, type);
8225
8226 case BUILT_IN_ABS:
8227 case BUILT_IN_LABS:
8228 case BUILT_IN_LLABS:
8229 case BUILT_IN_IMAXABS:
8230 return fold_builtin_abs (loc, arg0, type);
8231
8232 CASE_FLT_FN (BUILT_IN_CONJ):
8233 if (validate_arg (arg0, COMPLEX_TYPE)
8234 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8235 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8236 break;
8237
8238 CASE_FLT_FN (BUILT_IN_CREAL):
8239 if (validate_arg (arg0, COMPLEX_TYPE)
8240 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8241 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8242 break;
8243
8244 CASE_FLT_FN (BUILT_IN_CIMAG):
8245 if (validate_arg (arg0, COMPLEX_TYPE)
8246 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8247 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8248 break;
8249
8250 CASE_FLT_FN (BUILT_IN_CARG):
8251 return fold_builtin_carg (loc, arg0, type);
8252
8253 case BUILT_IN_ISASCII:
8254 return fold_builtin_isascii (loc, arg0);
8255
8256 case BUILT_IN_TOASCII:
8257 return fold_builtin_toascii (loc, arg0);
8258
8259 case BUILT_IN_ISDIGIT:
8260 return fold_builtin_isdigit (loc, arg0);
8261
8262 CASE_FLT_FN (BUILT_IN_FINITE):
8263 case BUILT_IN_FINITED32:
8264 case BUILT_IN_FINITED64:
8265 case BUILT_IN_FINITED128:
8266 case BUILT_IN_ISFINITE:
8267 {
8268 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8269 if (ret)
8270 return ret;
8271 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8272 }
8273
8274 CASE_FLT_FN (BUILT_IN_ISINF):
8275 case BUILT_IN_ISINFD32:
8276 case BUILT_IN_ISINFD64:
8277 case BUILT_IN_ISINFD128:
8278 {
8279 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8280 if (ret)
8281 return ret;
8282 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8283 }
8284
8285 case BUILT_IN_ISNORMAL:
8286 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8287
8288 case BUILT_IN_ISINF_SIGN:
8289 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8290
8291 CASE_FLT_FN (BUILT_IN_ISNAN):
8292 case BUILT_IN_ISNAND32:
8293 case BUILT_IN_ISNAND64:
8294 case BUILT_IN_ISNAND128:
8295 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8296
8297 case BUILT_IN_FREE:
8298 if (integer_zerop (arg0))
8299 return build_empty_stmt (loc);
8300 break;
8301
8302 default:
8303 break;
8304 }
8305
8306 return NULL_TREE;
8307
8308 }
8309
8310 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8311 This function returns NULL_TREE if no simplification was possible. */
8312
8313 static tree
8314 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8315 {
8316 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8317 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8318
8319 if (TREE_CODE (arg0) == ERROR_MARK
8320 || TREE_CODE (arg1) == ERROR_MARK)
8321 return NULL_TREE;
8322
8323 if (tree ret = fold_const_call (fcode, type, arg0, arg1))
8324 return ret;
8325
8326 switch (fcode)
8327 {
8328 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8329 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8330 if (validate_arg (arg0, REAL_TYPE)
8331 && validate_arg (arg1, POINTER_TYPE))
8332 return do_mpfr_lgamma_r (arg0, arg1, type);
8333 break;
8334
8335 CASE_FLT_FN (BUILT_IN_FREXP):
8336 return fold_builtin_frexp (loc, arg0, arg1, type);
8337
8338 CASE_FLT_FN (BUILT_IN_MODF):
8339 return fold_builtin_modf (loc, arg0, arg1, type);
8340
8341 case BUILT_IN_STRSTR:
8342 return fold_builtin_strstr (loc, arg0, arg1, type);
8343
8344 case BUILT_IN_STRSPN:
8345 return fold_builtin_strspn (loc, arg0, arg1);
8346
8347 case BUILT_IN_STRCSPN:
8348 return fold_builtin_strcspn (loc, arg0, arg1);
8349
8350 case BUILT_IN_STRCHR:
8351 case BUILT_IN_INDEX:
8352 return fold_builtin_strchr (loc, arg0, arg1, type);
8353
8354 case BUILT_IN_STRRCHR:
8355 case BUILT_IN_RINDEX:
8356 return fold_builtin_strrchr (loc, arg0, arg1, type);
8357
8358 case BUILT_IN_STRCMP:
8359 return fold_builtin_strcmp (loc, arg0, arg1);
8360
8361 case BUILT_IN_STRPBRK:
8362 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8363
8364 case BUILT_IN_EXPECT:
8365 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8366
8367 case BUILT_IN_ISGREATER:
8368 return fold_builtin_unordered_cmp (loc, fndecl,
8369 arg0, arg1, UNLE_EXPR, LE_EXPR);
8370 case BUILT_IN_ISGREATEREQUAL:
8371 return fold_builtin_unordered_cmp (loc, fndecl,
8372 arg0, arg1, UNLT_EXPR, LT_EXPR);
8373 case BUILT_IN_ISLESS:
8374 return fold_builtin_unordered_cmp (loc, fndecl,
8375 arg0, arg1, UNGE_EXPR, GE_EXPR);
8376 case BUILT_IN_ISLESSEQUAL:
8377 return fold_builtin_unordered_cmp (loc, fndecl,
8378 arg0, arg1, UNGT_EXPR, GT_EXPR);
8379 case BUILT_IN_ISLESSGREATER:
8380 return fold_builtin_unordered_cmp (loc, fndecl,
8381 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8382 case BUILT_IN_ISUNORDERED:
8383 return fold_builtin_unordered_cmp (loc, fndecl,
8384 arg0, arg1, UNORDERED_EXPR,
8385 NOP_EXPR);
8386
8387 /* We do the folding for va_start in the expander. */
8388 case BUILT_IN_VA_START:
8389 break;
8390
8391 case BUILT_IN_OBJECT_SIZE:
8392 return fold_builtin_object_size (arg0, arg1);
8393
8394 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8395 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8396
8397 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8398 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8399
8400 default:
8401 break;
8402 }
8403 return NULL_TREE;
8404 }
8405
8406 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8407 and ARG2.
8408 This function returns NULL_TREE if no simplification was possible. */
8409
8410 static tree
8411 fold_builtin_3 (location_t loc, tree fndecl,
8412 tree arg0, tree arg1, tree arg2)
8413 {
8414 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8415 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8416
8417 if (TREE_CODE (arg0) == ERROR_MARK
8418 || TREE_CODE (arg1) == ERROR_MARK
8419 || TREE_CODE (arg2) == ERROR_MARK)
8420 return NULL_TREE;
8421
8422 if (tree ret = fold_const_call (fcode, type, arg0, arg1, arg2))
8423 return ret;
8424
8425 switch (fcode)
8426 {
8427
8428 CASE_FLT_FN (BUILT_IN_SINCOS):
8429 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8430
8431 CASE_FLT_FN (BUILT_IN_FMA):
8432 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8433
8434 CASE_FLT_FN (BUILT_IN_REMQUO):
8435 if (validate_arg (arg0, REAL_TYPE)
8436 && validate_arg (arg1, REAL_TYPE)
8437 && validate_arg (arg2, POINTER_TYPE))
8438 return do_mpfr_remquo (arg0, arg1, arg2);
8439 break;
8440
8441 case BUILT_IN_STRNCMP:
8442 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8443
8444 case BUILT_IN_MEMCHR:
8445 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8446
8447 case BUILT_IN_BCMP:
8448 case BUILT_IN_MEMCMP:
8449 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8450
8451 case BUILT_IN_EXPECT:
8452 return fold_builtin_expect (loc, arg0, arg1, arg2);
8453
8454 case BUILT_IN_ADD_OVERFLOW:
8455 case BUILT_IN_SUB_OVERFLOW:
8456 case BUILT_IN_MUL_OVERFLOW:
8457 case BUILT_IN_SADD_OVERFLOW:
8458 case BUILT_IN_SADDL_OVERFLOW:
8459 case BUILT_IN_SADDLL_OVERFLOW:
8460 case BUILT_IN_SSUB_OVERFLOW:
8461 case BUILT_IN_SSUBL_OVERFLOW:
8462 case BUILT_IN_SSUBLL_OVERFLOW:
8463 case BUILT_IN_SMUL_OVERFLOW:
8464 case BUILT_IN_SMULL_OVERFLOW:
8465 case BUILT_IN_SMULLL_OVERFLOW:
8466 case BUILT_IN_UADD_OVERFLOW:
8467 case BUILT_IN_UADDL_OVERFLOW:
8468 case BUILT_IN_UADDLL_OVERFLOW:
8469 case BUILT_IN_USUB_OVERFLOW:
8470 case BUILT_IN_USUBL_OVERFLOW:
8471 case BUILT_IN_USUBLL_OVERFLOW:
8472 case BUILT_IN_UMUL_OVERFLOW:
8473 case BUILT_IN_UMULL_OVERFLOW:
8474 case BUILT_IN_UMULLL_OVERFLOW:
8475 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8476
8477 default:
8478 break;
8479 }
8480 return NULL_TREE;
8481 }
8482
8483 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8484 arguments. IGNORE is true if the result of the
8485 function call is ignored. This function returns NULL_TREE if no
8486 simplification was possible. */
8487
8488 tree
8489 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8490 {
8491 tree ret = NULL_TREE;
8492
8493 switch (nargs)
8494 {
8495 case 0:
8496 ret = fold_builtin_0 (loc, fndecl);
8497 break;
8498 case 1:
8499 ret = fold_builtin_1 (loc, fndecl, args[0]);
8500 break;
8501 case 2:
8502 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8503 break;
8504 case 3:
8505 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8506 break;
8507 default:
8508 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8509 break;
8510 }
8511 if (ret)
8512 {
8513 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8514 SET_EXPR_LOCATION (ret, loc);
8515 TREE_NO_WARNING (ret) = 1;
8516 return ret;
8517 }
8518 return NULL_TREE;
8519 }
8520
8521 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8522 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8523 of arguments in ARGS to be omitted. OLDNARGS is the number of
8524 elements in ARGS. */
8525
8526 static tree
8527 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8528 int skip, tree fndecl, int n, va_list newargs)
8529 {
8530 int nargs = oldnargs - skip + n;
8531 tree *buffer;
8532
8533 if (n > 0)
8534 {
8535 int i, j;
8536
8537 buffer = XALLOCAVEC (tree, nargs);
8538 for (i = 0; i < n; i++)
8539 buffer[i] = va_arg (newargs, tree);
8540 for (j = skip; j < oldnargs; j++, i++)
8541 buffer[i] = args[j];
8542 }
8543 else
8544 buffer = args + skip;
8545
8546 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8547 }
8548
8549 /* Return true if FNDECL shouldn't be folded right now.
8550 If a built-in function has an inline attribute always_inline
8551 wrapper, defer folding it after always_inline functions have
8552 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8553 might not be performed. */
8554
8555 bool
8556 avoid_folding_inline_builtin (tree fndecl)
8557 {
8558 return (DECL_DECLARED_INLINE_P (fndecl)
8559 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8560 && cfun
8561 && !cfun->always_inline_functions_inlined
8562 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8563 }
8564
8565 /* A wrapper function for builtin folding that prevents warnings for
8566 "statement without effect" and the like, caused by removing the
8567 call node earlier than the warning is generated. */
8568
8569 tree
8570 fold_call_expr (location_t loc, tree exp, bool ignore)
8571 {
8572 tree ret = NULL_TREE;
8573 tree fndecl = get_callee_fndecl (exp);
8574 if (fndecl
8575 && TREE_CODE (fndecl) == FUNCTION_DECL
8576 && DECL_BUILT_IN (fndecl)
8577 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8578 yet. Defer folding until we see all the arguments
8579 (after inlining). */
8580 && !CALL_EXPR_VA_ARG_PACK (exp))
8581 {
8582 int nargs = call_expr_nargs (exp);
8583
8584 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8585 instead last argument is __builtin_va_arg_pack (). Defer folding
8586 even in that case, until arguments are finalized. */
8587 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8588 {
8589 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8590 if (fndecl2
8591 && TREE_CODE (fndecl2) == FUNCTION_DECL
8592 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8593 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8594 return NULL_TREE;
8595 }
8596
8597 if (avoid_folding_inline_builtin (fndecl))
8598 return NULL_TREE;
8599
8600 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8601 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8602 CALL_EXPR_ARGP (exp), ignore);
8603 else
8604 {
8605 tree *args = CALL_EXPR_ARGP (exp);
8606 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8607 if (ret)
8608 return ret;
8609 }
8610 }
8611 return NULL_TREE;
8612 }
8613
8614 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8615 N arguments are passed in the array ARGARRAY. Return a folded
8616 expression or NULL_TREE if no simplification was possible. */
8617
8618 tree
8619 fold_builtin_call_array (location_t loc, tree,
8620 tree fn,
8621 int n,
8622 tree *argarray)
8623 {
8624 if (TREE_CODE (fn) != ADDR_EXPR)
8625 return NULL_TREE;
8626
8627 tree fndecl = TREE_OPERAND (fn, 0);
8628 if (TREE_CODE (fndecl) == FUNCTION_DECL
8629 && DECL_BUILT_IN (fndecl))
8630 {
8631 /* If last argument is __builtin_va_arg_pack (), arguments to this
8632 function are not finalized yet. Defer folding until they are. */
8633 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8634 {
8635 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8636 if (fndecl2
8637 && TREE_CODE (fndecl2) == FUNCTION_DECL
8638 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8639 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8640 return NULL_TREE;
8641 }
8642 if (avoid_folding_inline_builtin (fndecl))
8643 return NULL_TREE;
8644 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8645 return targetm.fold_builtin (fndecl, n, argarray, false);
8646 else
8647 return fold_builtin_n (loc, fndecl, argarray, n, false);
8648 }
8649
8650 return NULL_TREE;
8651 }
8652
8653 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8654 along with N new arguments specified as the "..." parameters. SKIP
8655 is the number of arguments in EXP to be omitted. This function is used
8656 to do varargs-to-varargs transformations. */
8657
8658 static tree
8659 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8660 {
8661 va_list ap;
8662 tree t;
8663
8664 va_start (ap, n);
8665 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8666 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8667 va_end (ap);
8668
8669 return t;
8670 }
8671
8672 /* Validate a single argument ARG against a tree code CODE representing
8673 a type. */
8674
8675 static bool
8676 validate_arg (const_tree arg, enum tree_code code)
8677 {
8678 if (!arg)
8679 return false;
8680 else if (code == POINTER_TYPE)
8681 return POINTER_TYPE_P (TREE_TYPE (arg));
8682 else if (code == INTEGER_TYPE)
8683 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8684 return code == TREE_CODE (TREE_TYPE (arg));
8685 }
8686
8687 /* This function validates the types of a function call argument list
8688 against a specified list of tree_codes. If the last specifier is a 0,
8689 that represents an ellipses, otherwise the last specifier must be a
8690 VOID_TYPE.
8691
8692 This is the GIMPLE version of validate_arglist. Eventually we want to
8693 completely convert builtins.c to work from GIMPLEs and the tree based
8694 validate_arglist will then be removed. */
8695
8696 bool
8697 validate_gimple_arglist (const gcall *call, ...)
8698 {
8699 enum tree_code code;
8700 bool res = 0;
8701 va_list ap;
8702 const_tree arg;
8703 size_t i;
8704
8705 va_start (ap, call);
8706 i = 0;
8707
8708 do
8709 {
8710 code = (enum tree_code) va_arg (ap, int);
8711 switch (code)
8712 {
8713 case 0:
8714 /* This signifies an ellipses, any further arguments are all ok. */
8715 res = true;
8716 goto end;
8717 case VOID_TYPE:
8718 /* This signifies an endlink, if no arguments remain, return
8719 true, otherwise return false. */
8720 res = (i == gimple_call_num_args (call));
8721 goto end;
8722 default:
8723 /* If no parameters remain or the parameter's code does not
8724 match the specified code, return false. Otherwise continue
8725 checking any remaining arguments. */
8726 arg = gimple_call_arg (call, i++);
8727 if (!validate_arg (arg, code))
8728 goto end;
8729 break;
8730 }
8731 }
8732 while (1);
8733
8734 /* We need gotos here since we can only have one VA_CLOSE in a
8735 function. */
8736 end: ;
8737 va_end (ap);
8738
8739 return res;
8740 }
8741
8742 /* Default target-specific builtin expander that does nothing. */
8743
8744 rtx
8745 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8746 rtx target ATTRIBUTE_UNUSED,
8747 rtx subtarget ATTRIBUTE_UNUSED,
8748 machine_mode mode ATTRIBUTE_UNUSED,
8749 int ignore ATTRIBUTE_UNUSED)
8750 {
8751 return NULL_RTX;
8752 }
8753
8754 /* Returns true is EXP represents data that would potentially reside
8755 in a readonly section. */
8756
8757 bool
8758 readonly_data_expr (tree exp)
8759 {
8760 STRIP_NOPS (exp);
8761
8762 if (TREE_CODE (exp) != ADDR_EXPR)
8763 return false;
8764
8765 exp = get_base_address (TREE_OPERAND (exp, 0));
8766 if (!exp)
8767 return false;
8768
8769 /* Make sure we call decl_readonly_section only for trees it
8770 can handle (since it returns true for everything it doesn't
8771 understand). */
8772 if (TREE_CODE (exp) == STRING_CST
8773 || TREE_CODE (exp) == CONSTRUCTOR
8774 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8775 return decl_readonly_section (exp, 0);
8776 else
8777 return false;
8778 }
8779
8780 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8781 to the call, and TYPE is its return type.
8782
8783 Return NULL_TREE if no simplification was possible, otherwise return the
8784 simplified form of the call as a tree.
8785
8786 The simplified form may be a constant or other expression which
8787 computes the same value, but in a more efficient manner (including
8788 calls to other builtin functions).
8789
8790 The call may contain arguments which need to be evaluated, but
8791 which are not useful to determine the result of the call. In
8792 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8793 COMPOUND_EXPR will be an argument which must be evaluated.
8794 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8795 COMPOUND_EXPR in the chain will contain the tree for the simplified
8796 form of the builtin function call. */
8797
8798 static tree
8799 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8800 {
8801 if (!validate_arg (s1, POINTER_TYPE)
8802 || !validate_arg (s2, POINTER_TYPE))
8803 return NULL_TREE;
8804 else
8805 {
8806 tree fn;
8807 const char *p1, *p2;
8808
8809 p2 = c_getstr (s2);
8810 if (p2 == NULL)
8811 return NULL_TREE;
8812
8813 p1 = c_getstr (s1);
8814 if (p1 != NULL)
8815 {
8816 const char *r = strstr (p1, p2);
8817 tree tem;
8818
8819 if (r == NULL)
8820 return build_int_cst (TREE_TYPE (s1), 0);
8821
8822 /* Return an offset into the constant string argument. */
8823 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8824 return fold_convert_loc (loc, type, tem);
8825 }
8826
8827 /* The argument is const char *, and the result is char *, so we need
8828 a type conversion here to avoid a warning. */
8829 if (p2[0] == '\0')
8830 return fold_convert_loc (loc, type, s1);
8831
8832 if (p2[1] != '\0')
8833 return NULL_TREE;
8834
8835 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8836 if (!fn)
8837 return NULL_TREE;
8838
8839 /* New argument list transforming strstr(s1, s2) to
8840 strchr(s1, s2[0]). */
8841 return build_call_expr_loc (loc, fn, 2, s1,
8842 build_int_cst (integer_type_node, p2[0]));
8843 }
8844 }
8845
8846 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8847 the call, and TYPE is its return type.
8848
8849 Return NULL_TREE if no simplification was possible, otherwise return the
8850 simplified form of the call as a tree.
8851
8852 The simplified form may be a constant or other expression which
8853 computes the same value, but in a more efficient manner (including
8854 calls to other builtin functions).
8855
8856 The call may contain arguments which need to be evaluated, but
8857 which are not useful to determine the result of the call. In
8858 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8859 COMPOUND_EXPR will be an argument which must be evaluated.
8860 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8861 COMPOUND_EXPR in the chain will contain the tree for the simplified
8862 form of the builtin function call. */
8863
8864 static tree
8865 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8866 {
8867 if (!validate_arg (s1, POINTER_TYPE)
8868 || !validate_arg (s2, INTEGER_TYPE))
8869 return NULL_TREE;
8870 else
8871 {
8872 const char *p1;
8873
8874 if (TREE_CODE (s2) != INTEGER_CST)
8875 return NULL_TREE;
8876
8877 p1 = c_getstr (s1);
8878 if (p1 != NULL)
8879 {
8880 char c;
8881 const char *r;
8882 tree tem;
8883
8884 if (target_char_cast (s2, &c))
8885 return NULL_TREE;
8886
8887 r = strchr (p1, c);
8888
8889 if (r == NULL)
8890 return build_int_cst (TREE_TYPE (s1), 0);
8891
8892 /* Return an offset into the constant string argument. */
8893 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8894 return fold_convert_loc (loc, type, tem);
8895 }
8896 return NULL_TREE;
8897 }
8898 }
8899
8900 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8901 the call, and TYPE is its return type.
8902
8903 Return NULL_TREE if no simplification was possible, otherwise return the
8904 simplified form of the call as a tree.
8905
8906 The simplified form may be a constant or other expression which
8907 computes the same value, but in a more efficient manner (including
8908 calls to other builtin functions).
8909
8910 The call may contain arguments which need to be evaluated, but
8911 which are not useful to determine the result of the call. In
8912 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8913 COMPOUND_EXPR will be an argument which must be evaluated.
8914 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8915 COMPOUND_EXPR in the chain will contain the tree for the simplified
8916 form of the builtin function call. */
8917
8918 static tree
8919 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8920 {
8921 if (!validate_arg (s1, POINTER_TYPE)
8922 || !validate_arg (s2, INTEGER_TYPE))
8923 return NULL_TREE;
8924 else
8925 {
8926 tree fn;
8927 const char *p1;
8928
8929 if (TREE_CODE (s2) != INTEGER_CST)
8930 return NULL_TREE;
8931
8932 p1 = c_getstr (s1);
8933 if (p1 != NULL)
8934 {
8935 char c;
8936 const char *r;
8937 tree tem;
8938
8939 if (target_char_cast (s2, &c))
8940 return NULL_TREE;
8941
8942 r = strrchr (p1, c);
8943
8944 if (r == NULL)
8945 return build_int_cst (TREE_TYPE (s1), 0);
8946
8947 /* Return an offset into the constant string argument. */
8948 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8949 return fold_convert_loc (loc, type, tem);
8950 }
8951
8952 if (! integer_zerop (s2))
8953 return NULL_TREE;
8954
8955 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8956 if (!fn)
8957 return NULL_TREE;
8958
8959 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
8960 return build_call_expr_loc (loc, fn, 2, s1, s2);
8961 }
8962 }
8963
8964 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8965 to the call, and TYPE is its return type.
8966
8967 Return NULL_TREE if no simplification was possible, otherwise return the
8968 simplified form of the call as a tree.
8969
8970 The simplified form may be a constant or other expression which
8971 computes the same value, but in a more efficient manner (including
8972 calls to other builtin functions).
8973
8974 The call may contain arguments which need to be evaluated, but
8975 which are not useful to determine the result of the call. In
8976 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8977 COMPOUND_EXPR will be an argument which must be evaluated.
8978 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8979 COMPOUND_EXPR in the chain will contain the tree for the simplified
8980 form of the builtin function call. */
8981
8982 static tree
8983 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8984 {
8985 if (!validate_arg (s1, POINTER_TYPE)
8986 || !validate_arg (s2, POINTER_TYPE))
8987 return NULL_TREE;
8988 else
8989 {
8990 tree fn;
8991 const char *p1, *p2;
8992
8993 p2 = c_getstr (s2);
8994 if (p2 == NULL)
8995 return NULL_TREE;
8996
8997 p1 = c_getstr (s1);
8998 if (p1 != NULL)
8999 {
9000 const char *r = strpbrk (p1, p2);
9001 tree tem;
9002
9003 if (r == NULL)
9004 return build_int_cst (TREE_TYPE (s1), 0);
9005
9006 /* Return an offset into the constant string argument. */
9007 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9008 return fold_convert_loc (loc, type, tem);
9009 }
9010
9011 if (p2[0] == '\0')
9012 /* strpbrk(x, "") == NULL.
9013 Evaluate and ignore s1 in case it had side-effects. */
9014 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9015
9016 if (p2[1] != '\0')
9017 return NULL_TREE; /* Really call strpbrk. */
9018
9019 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9020 if (!fn)
9021 return NULL_TREE;
9022
9023 /* New argument list transforming strpbrk(s1, s2) to
9024 strchr(s1, s2[0]). */
9025 return build_call_expr_loc (loc, fn, 2, s1,
9026 build_int_cst (integer_type_node, p2[0]));
9027 }
9028 }
9029
9030 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9031 to the call.
9032
9033 Return NULL_TREE if no simplification was possible, otherwise return the
9034 simplified form of the call as a tree.
9035
9036 The simplified form may be a constant or other expression which
9037 computes the same value, but in a more efficient manner (including
9038 calls to other builtin functions).
9039
9040 The call may contain arguments which need to be evaluated, but
9041 which are not useful to determine the result of the call. In
9042 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9043 COMPOUND_EXPR will be an argument which must be evaluated.
9044 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9045 COMPOUND_EXPR in the chain will contain the tree for the simplified
9046 form of the builtin function call. */
9047
9048 static tree
9049 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9050 {
9051 if (!validate_arg (s1, POINTER_TYPE)
9052 || !validate_arg (s2, POINTER_TYPE))
9053 return NULL_TREE;
9054 else
9055 {
9056 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9057
9058 /* If either argument is "", return NULL_TREE. */
9059 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9060 /* Evaluate and ignore both arguments in case either one has
9061 side-effects. */
9062 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9063 s1, s2);
9064 return NULL_TREE;
9065 }
9066 }
9067
9068 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9069 to the call.
9070
9071 Return NULL_TREE if no simplification was possible, otherwise return the
9072 simplified form of the call as a tree.
9073
9074 The simplified form may be a constant or other expression which
9075 computes the same value, but in a more efficient manner (including
9076 calls to other builtin functions).
9077
9078 The call may contain arguments which need to be evaluated, but
9079 which are not useful to determine the result of the call. In
9080 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9081 COMPOUND_EXPR will be an argument which must be evaluated.
9082 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9083 COMPOUND_EXPR in the chain will contain the tree for the simplified
9084 form of the builtin function call. */
9085
9086 static tree
9087 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9088 {
9089 if (!validate_arg (s1, POINTER_TYPE)
9090 || !validate_arg (s2, POINTER_TYPE))
9091 return NULL_TREE;
9092 else
9093 {
9094 /* If the first argument is "", return NULL_TREE. */
9095 const char *p1 = c_getstr (s1);
9096 if (p1 && *p1 == '\0')
9097 {
9098 /* Evaluate and ignore argument s2 in case it has
9099 side-effects. */
9100 return omit_one_operand_loc (loc, size_type_node,
9101 size_zero_node, s2);
9102 }
9103
9104 /* If the second argument is "", return __builtin_strlen(s1). */
9105 const char *p2 = c_getstr (s2);
9106 if (p2 && *p2 == '\0')
9107 {
9108 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9109
9110 /* If the replacement _DECL isn't initialized, don't do the
9111 transformation. */
9112 if (!fn)
9113 return NULL_TREE;
9114
9115 return build_call_expr_loc (loc, fn, 1, s1);
9116 }
9117 return NULL_TREE;
9118 }
9119 }
9120
9121 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9122 produced. False otherwise. This is done so that we don't output the error
9123 or warning twice or three times. */
9124
9125 bool
9126 fold_builtin_next_arg (tree exp, bool va_start_p)
9127 {
9128 tree fntype = TREE_TYPE (current_function_decl);
9129 int nargs = call_expr_nargs (exp);
9130 tree arg;
9131 /* There is good chance the current input_location points inside the
9132 definition of the va_start macro (perhaps on the token for
9133 builtin) in a system header, so warnings will not be emitted.
9134 Use the location in real source code. */
9135 source_location current_location =
9136 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9137 NULL);
9138
9139 if (!stdarg_p (fntype))
9140 {
9141 error ("%<va_start%> used in function with fixed args");
9142 return true;
9143 }
9144
9145 if (va_start_p)
9146 {
9147 if (va_start_p && (nargs != 2))
9148 {
9149 error ("wrong number of arguments to function %<va_start%>");
9150 return true;
9151 }
9152 arg = CALL_EXPR_ARG (exp, 1);
9153 }
9154 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9155 when we checked the arguments and if needed issued a warning. */
9156 else
9157 {
9158 if (nargs == 0)
9159 {
9160 /* Evidently an out of date version of <stdarg.h>; can't validate
9161 va_start's second argument, but can still work as intended. */
9162 warning_at (current_location,
9163 OPT_Wvarargs,
9164 "%<__builtin_next_arg%> called without an argument");
9165 return true;
9166 }
9167 else if (nargs > 1)
9168 {
9169 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9170 return true;
9171 }
9172 arg = CALL_EXPR_ARG (exp, 0);
9173 }
9174
9175 if (TREE_CODE (arg) == SSA_NAME)
9176 arg = SSA_NAME_VAR (arg);
9177
9178 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9179 or __builtin_next_arg (0) the first time we see it, after checking
9180 the arguments and if needed issuing a warning. */
9181 if (!integer_zerop (arg))
9182 {
9183 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9184
9185 /* Strip off all nops for the sake of the comparison. This
9186 is not quite the same as STRIP_NOPS. It does more.
9187 We must also strip off INDIRECT_EXPR for C++ reference
9188 parameters. */
9189 while (CONVERT_EXPR_P (arg)
9190 || TREE_CODE (arg) == INDIRECT_REF)
9191 arg = TREE_OPERAND (arg, 0);
9192 if (arg != last_parm)
9193 {
9194 /* FIXME: Sometimes with the tree optimizers we can get the
9195 not the last argument even though the user used the last
9196 argument. We just warn and set the arg to be the last
9197 argument so that we will get wrong-code because of
9198 it. */
9199 warning_at (current_location,
9200 OPT_Wvarargs,
9201 "second parameter of %<va_start%> not last named argument");
9202 }
9203
9204 /* Undefined by C99 7.15.1.4p4 (va_start):
9205 "If the parameter parmN is declared with the register storage
9206 class, with a function or array type, or with a type that is
9207 not compatible with the type that results after application of
9208 the default argument promotions, the behavior is undefined."
9209 */
9210 else if (DECL_REGISTER (arg))
9211 {
9212 warning_at (current_location,
9213 OPT_Wvarargs,
9214 "undefined behaviour when second parameter of "
9215 "%<va_start%> is declared with %<register%> storage");
9216 }
9217
9218 /* We want to verify the second parameter just once before the tree
9219 optimizers are run and then avoid keeping it in the tree,
9220 as otherwise we could warn even for correct code like:
9221 void foo (int i, ...)
9222 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9223 if (va_start_p)
9224 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9225 else
9226 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9227 }
9228 return false;
9229 }
9230
9231
9232 /* Expand a call EXP to __builtin_object_size. */
9233
9234 static rtx
9235 expand_builtin_object_size (tree exp)
9236 {
9237 tree ost;
9238 int object_size_type;
9239 tree fndecl = get_callee_fndecl (exp);
9240
9241 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9242 {
9243 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9244 exp, fndecl);
9245 expand_builtin_trap ();
9246 return const0_rtx;
9247 }
9248
9249 ost = CALL_EXPR_ARG (exp, 1);
9250 STRIP_NOPS (ost);
9251
9252 if (TREE_CODE (ost) != INTEGER_CST
9253 || tree_int_cst_sgn (ost) < 0
9254 || compare_tree_int (ost, 3) > 0)
9255 {
9256 error ("%Klast argument of %D is not integer constant between 0 and 3",
9257 exp, fndecl);
9258 expand_builtin_trap ();
9259 return const0_rtx;
9260 }
9261
9262 object_size_type = tree_to_shwi (ost);
9263
9264 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9265 }
9266
9267 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9268 FCODE is the BUILT_IN_* to use.
9269 Return NULL_RTX if we failed; the caller should emit a normal call,
9270 otherwise try to get the result in TARGET, if convenient (and in
9271 mode MODE if that's convenient). */
9272
9273 static rtx
9274 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9275 enum built_in_function fcode)
9276 {
9277 tree dest, src, len, size;
9278
9279 if (!validate_arglist (exp,
9280 POINTER_TYPE,
9281 fcode == BUILT_IN_MEMSET_CHK
9282 ? INTEGER_TYPE : POINTER_TYPE,
9283 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9284 return NULL_RTX;
9285
9286 dest = CALL_EXPR_ARG (exp, 0);
9287 src = CALL_EXPR_ARG (exp, 1);
9288 len = CALL_EXPR_ARG (exp, 2);
9289 size = CALL_EXPR_ARG (exp, 3);
9290
9291 if (! tree_fits_uhwi_p (size))
9292 return NULL_RTX;
9293
9294 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9295 {
9296 tree fn;
9297
9298 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9299 {
9300 warning_at (tree_nonartificial_location (exp),
9301 0, "%Kcall to %D will always overflow destination buffer",
9302 exp, get_callee_fndecl (exp));
9303 return NULL_RTX;
9304 }
9305
9306 fn = NULL_TREE;
9307 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9308 mem{cpy,pcpy,move,set} is available. */
9309 switch (fcode)
9310 {
9311 case BUILT_IN_MEMCPY_CHK:
9312 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9313 break;
9314 case BUILT_IN_MEMPCPY_CHK:
9315 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9316 break;
9317 case BUILT_IN_MEMMOVE_CHK:
9318 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9319 break;
9320 case BUILT_IN_MEMSET_CHK:
9321 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9322 break;
9323 default:
9324 break;
9325 }
9326
9327 if (! fn)
9328 return NULL_RTX;
9329
9330 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9331 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9332 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9333 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9334 }
9335 else if (fcode == BUILT_IN_MEMSET_CHK)
9336 return NULL_RTX;
9337 else
9338 {
9339 unsigned int dest_align = get_pointer_alignment (dest);
9340
9341 /* If DEST is not a pointer type, call the normal function. */
9342 if (dest_align == 0)
9343 return NULL_RTX;
9344
9345 /* If SRC and DEST are the same (and not volatile), do nothing. */
9346 if (operand_equal_p (src, dest, 0))
9347 {
9348 tree expr;
9349
9350 if (fcode != BUILT_IN_MEMPCPY_CHK)
9351 {
9352 /* Evaluate and ignore LEN in case it has side-effects. */
9353 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9354 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9355 }
9356
9357 expr = fold_build_pointer_plus (dest, len);
9358 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9359 }
9360
9361 /* __memmove_chk special case. */
9362 if (fcode == BUILT_IN_MEMMOVE_CHK)
9363 {
9364 unsigned int src_align = get_pointer_alignment (src);
9365
9366 if (src_align == 0)
9367 return NULL_RTX;
9368
9369 /* If src is categorized for a readonly section we can use
9370 normal __memcpy_chk. */
9371 if (readonly_data_expr (src))
9372 {
9373 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9374 if (!fn)
9375 return NULL_RTX;
9376 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9377 dest, src, len, size);
9378 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9379 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9380 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9381 }
9382 }
9383 return NULL_RTX;
9384 }
9385 }
9386
9387 /* Emit warning if a buffer overflow is detected at compile time. */
9388
9389 static void
9390 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9391 {
9392 int is_strlen = 0;
9393 tree len, size;
9394 location_t loc = tree_nonartificial_location (exp);
9395
9396 switch (fcode)
9397 {
9398 case BUILT_IN_STRCPY_CHK:
9399 case BUILT_IN_STPCPY_CHK:
9400 /* For __strcat_chk the warning will be emitted only if overflowing
9401 by at least strlen (dest) + 1 bytes. */
9402 case BUILT_IN_STRCAT_CHK:
9403 len = CALL_EXPR_ARG (exp, 1);
9404 size = CALL_EXPR_ARG (exp, 2);
9405 is_strlen = 1;
9406 break;
9407 case BUILT_IN_STRNCAT_CHK:
9408 case BUILT_IN_STRNCPY_CHK:
9409 case BUILT_IN_STPNCPY_CHK:
9410 len = CALL_EXPR_ARG (exp, 2);
9411 size = CALL_EXPR_ARG (exp, 3);
9412 break;
9413 case BUILT_IN_SNPRINTF_CHK:
9414 case BUILT_IN_VSNPRINTF_CHK:
9415 len = CALL_EXPR_ARG (exp, 1);
9416 size = CALL_EXPR_ARG (exp, 3);
9417 break;
9418 default:
9419 gcc_unreachable ();
9420 }
9421
9422 if (!len || !size)
9423 return;
9424
9425 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9426 return;
9427
9428 if (is_strlen)
9429 {
9430 len = c_strlen (len, 1);
9431 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9432 return;
9433 }
9434 else if (fcode == BUILT_IN_STRNCAT_CHK)
9435 {
9436 tree src = CALL_EXPR_ARG (exp, 1);
9437 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9438 return;
9439 src = c_strlen (src, 1);
9440 if (! src || ! tree_fits_uhwi_p (src))
9441 {
9442 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9443 exp, get_callee_fndecl (exp));
9444 return;
9445 }
9446 else if (tree_int_cst_lt (src, size))
9447 return;
9448 }
9449 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9450 return;
9451
9452 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9453 exp, get_callee_fndecl (exp));
9454 }
9455
9456 /* Emit warning if a buffer overflow is detected at compile time
9457 in __sprintf_chk/__vsprintf_chk calls. */
9458
9459 static void
9460 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9461 {
9462 tree size, len, fmt;
9463 const char *fmt_str;
9464 int nargs = call_expr_nargs (exp);
9465
9466 /* Verify the required arguments in the original call. */
9467
9468 if (nargs < 4)
9469 return;
9470 size = CALL_EXPR_ARG (exp, 2);
9471 fmt = CALL_EXPR_ARG (exp, 3);
9472
9473 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9474 return;
9475
9476 /* Check whether the format is a literal string constant. */
9477 fmt_str = c_getstr (fmt);
9478 if (fmt_str == NULL)
9479 return;
9480
9481 if (!init_target_chars ())
9482 return;
9483
9484 /* If the format doesn't contain % args or %%, we know its size. */
9485 if (strchr (fmt_str, target_percent) == 0)
9486 len = build_int_cstu (size_type_node, strlen (fmt_str));
9487 /* If the format is "%s" and first ... argument is a string literal,
9488 we know it too. */
9489 else if (fcode == BUILT_IN_SPRINTF_CHK
9490 && strcmp (fmt_str, target_percent_s) == 0)
9491 {
9492 tree arg;
9493
9494 if (nargs < 5)
9495 return;
9496 arg = CALL_EXPR_ARG (exp, 4);
9497 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9498 return;
9499
9500 len = c_strlen (arg, 1);
9501 if (!len || ! tree_fits_uhwi_p (len))
9502 return;
9503 }
9504 else
9505 return;
9506
9507 if (! tree_int_cst_lt (len, size))
9508 warning_at (tree_nonartificial_location (exp),
9509 0, "%Kcall to %D will always overflow destination buffer",
9510 exp, get_callee_fndecl (exp));
9511 }
9512
9513 /* Emit warning if a free is called with address of a variable. */
9514
9515 static void
9516 maybe_emit_free_warning (tree exp)
9517 {
9518 tree arg = CALL_EXPR_ARG (exp, 0);
9519
9520 STRIP_NOPS (arg);
9521 if (TREE_CODE (arg) != ADDR_EXPR)
9522 return;
9523
9524 arg = get_base_address (TREE_OPERAND (arg, 0));
9525 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9526 return;
9527
9528 if (SSA_VAR_P (arg))
9529 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9530 "%Kattempt to free a non-heap object %qD", exp, arg);
9531 else
9532 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9533 "%Kattempt to free a non-heap object", exp);
9534 }
9535
9536 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9537 if possible. */
9538
9539 static tree
9540 fold_builtin_object_size (tree ptr, tree ost)
9541 {
9542 unsigned HOST_WIDE_INT bytes;
9543 int object_size_type;
9544
9545 if (!validate_arg (ptr, POINTER_TYPE)
9546 || !validate_arg (ost, INTEGER_TYPE))
9547 return NULL_TREE;
9548
9549 STRIP_NOPS (ost);
9550
9551 if (TREE_CODE (ost) != INTEGER_CST
9552 || tree_int_cst_sgn (ost) < 0
9553 || compare_tree_int (ost, 3) > 0)
9554 return NULL_TREE;
9555
9556 object_size_type = tree_to_shwi (ost);
9557
9558 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9559 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9560 and (size_t) 0 for types 2 and 3. */
9561 if (TREE_SIDE_EFFECTS (ptr))
9562 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9563
9564 if (TREE_CODE (ptr) == ADDR_EXPR)
9565 {
9566 bytes = compute_builtin_object_size (ptr, object_size_type);
9567 if (wi::fits_to_tree_p (bytes, size_type_node))
9568 return build_int_cstu (size_type_node, bytes);
9569 }
9570 else if (TREE_CODE (ptr) == SSA_NAME)
9571 {
9572 /* If object size is not known yet, delay folding until
9573 later. Maybe subsequent passes will help determining
9574 it. */
9575 bytes = compute_builtin_object_size (ptr, object_size_type);
9576 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
9577 && wi::fits_to_tree_p (bytes, size_type_node))
9578 return build_int_cstu (size_type_node, bytes);
9579 }
9580
9581 return NULL_TREE;
9582 }
9583
9584 /* Builtins with folding operations that operate on "..." arguments
9585 need special handling; we need to store the arguments in a convenient
9586 data structure before attempting any folding. Fortunately there are
9587 only a few builtins that fall into this category. FNDECL is the
9588 function, EXP is the CALL_EXPR for the call. */
9589
9590 static tree
9591 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9592 {
9593 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9594 tree ret = NULL_TREE;
9595
9596 switch (fcode)
9597 {
9598 case BUILT_IN_FPCLASSIFY:
9599 ret = fold_builtin_fpclassify (loc, args, nargs);
9600 break;
9601
9602 default:
9603 break;
9604 }
9605 if (ret)
9606 {
9607 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9608 SET_EXPR_LOCATION (ret, loc);
9609 TREE_NO_WARNING (ret) = 1;
9610 return ret;
9611 }
9612 return NULL_TREE;
9613 }
9614
9615 /* Initialize format string characters in the target charset. */
9616
9617 bool
9618 init_target_chars (void)
9619 {
9620 static bool init;
9621 if (!init)
9622 {
9623 target_newline = lang_hooks.to_target_charset ('\n');
9624 target_percent = lang_hooks.to_target_charset ('%');
9625 target_c = lang_hooks.to_target_charset ('c');
9626 target_s = lang_hooks.to_target_charset ('s');
9627 if (target_newline == 0 || target_percent == 0 || target_c == 0
9628 || target_s == 0)
9629 return false;
9630
9631 target_percent_c[0] = target_percent;
9632 target_percent_c[1] = target_c;
9633 target_percent_c[2] = '\0';
9634
9635 target_percent_s[0] = target_percent;
9636 target_percent_s[1] = target_s;
9637 target_percent_s[2] = '\0';
9638
9639 target_percent_s_newline[0] = target_percent;
9640 target_percent_s_newline[1] = target_s;
9641 target_percent_s_newline[2] = target_newline;
9642 target_percent_s_newline[3] = '\0';
9643
9644 init = true;
9645 }
9646 return true;
9647 }
9648
9649 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9650 and no overflow/underflow occurred. INEXACT is true if M was not
9651 exactly calculated. TYPE is the tree type for the result. This
9652 function assumes that you cleared the MPFR flags and then
9653 calculated M to see if anything subsequently set a flag prior to
9654 entering this function. Return NULL_TREE if any checks fail. */
9655
9656 static tree
9657 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9658 {
9659 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9660 overflow/underflow occurred. If -frounding-math, proceed iff the
9661 result of calling FUNC was exact. */
9662 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9663 && (!flag_rounding_math || !inexact))
9664 {
9665 REAL_VALUE_TYPE rr;
9666
9667 real_from_mpfr (&rr, m, type, GMP_RNDN);
9668 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9669 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9670 but the mpft_t is not, then we underflowed in the
9671 conversion. */
9672 if (real_isfinite (&rr)
9673 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9674 {
9675 REAL_VALUE_TYPE rmode;
9676
9677 real_convert (&rmode, TYPE_MODE (type), &rr);
9678 /* Proceed iff the specified mode can hold the value. */
9679 if (real_identical (&rmode, &rr))
9680 return build_real (type, rmode);
9681 }
9682 }
9683 return NULL_TREE;
9684 }
9685
9686 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9687 number and no overflow/underflow occurred. INEXACT is true if M
9688 was not exactly calculated. TYPE is the tree type for the result.
9689 This function assumes that you cleared the MPFR flags and then
9690 calculated M to see if anything subsequently set a flag prior to
9691 entering this function. Return NULL_TREE if any checks fail, if
9692 FORCE_CONVERT is true, then bypass the checks. */
9693
9694 static tree
9695 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9696 {
9697 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9698 overflow/underflow occurred. If -frounding-math, proceed iff the
9699 result of calling FUNC was exact. */
9700 if (force_convert
9701 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9702 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9703 && (!flag_rounding_math || !inexact)))
9704 {
9705 REAL_VALUE_TYPE re, im;
9706
9707 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9708 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9709 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9710 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9711 but the mpft_t is not, then we underflowed in the
9712 conversion. */
9713 if (force_convert
9714 || (real_isfinite (&re) && real_isfinite (&im)
9715 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9716 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9717 {
9718 REAL_VALUE_TYPE re_mode, im_mode;
9719
9720 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9721 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9722 /* Proceed iff the specified mode can hold the value. */
9723 if (force_convert
9724 || (real_identical (&re_mode, &re)
9725 && real_identical (&im_mode, &im)))
9726 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9727 build_real (TREE_TYPE (type), im_mode));
9728 }
9729 }
9730 return NULL_TREE;
9731 }
9732
9733 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9734 the pointer *(ARG_QUO) and return the result. The type is taken
9735 from the type of ARG0 and is used for setting the precision of the
9736 calculation and results. */
9737
9738 static tree
9739 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9740 {
9741 tree const type = TREE_TYPE (arg0);
9742 tree result = NULL_TREE;
9743
9744 STRIP_NOPS (arg0);
9745 STRIP_NOPS (arg1);
9746
9747 /* To proceed, MPFR must exactly represent the target floating point
9748 format, which only happens when the target base equals two. */
9749 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9750 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9751 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9752 {
9753 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9754 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9755
9756 if (real_isfinite (ra0) && real_isfinite (ra1))
9757 {
9758 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9759 const int prec = fmt->p;
9760 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9761 tree result_rem;
9762 long integer_quo;
9763 mpfr_t m0, m1;
9764
9765 mpfr_inits2 (prec, m0, m1, NULL);
9766 mpfr_from_real (m0, ra0, GMP_RNDN);
9767 mpfr_from_real (m1, ra1, GMP_RNDN);
9768 mpfr_clear_flags ();
9769 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9770 /* Remquo is independent of the rounding mode, so pass
9771 inexact=0 to do_mpfr_ckconv(). */
9772 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9773 mpfr_clears (m0, m1, NULL);
9774 if (result_rem)
9775 {
9776 /* MPFR calculates quo in the host's long so it may
9777 return more bits in quo than the target int can hold
9778 if sizeof(host long) > sizeof(target int). This can
9779 happen even for native compilers in LP64 mode. In
9780 these cases, modulo the quo value with the largest
9781 number that the target int can hold while leaving one
9782 bit for the sign. */
9783 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9784 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9785
9786 /* Dereference the quo pointer argument. */
9787 arg_quo = build_fold_indirect_ref (arg_quo);
9788 /* Proceed iff a valid pointer type was passed in. */
9789 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9790 {
9791 /* Set the value. */
9792 tree result_quo
9793 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9794 build_int_cst (TREE_TYPE (arg_quo),
9795 integer_quo));
9796 TREE_SIDE_EFFECTS (result_quo) = 1;
9797 /* Combine the quo assignment with the rem. */
9798 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9799 result_quo, result_rem));
9800 }
9801 }
9802 }
9803 }
9804 return result;
9805 }
9806
9807 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9808 resulting value as a tree with type TYPE. The mpfr precision is
9809 set to the precision of TYPE. We assume that this mpfr function
9810 returns zero if the result could be calculated exactly within the
9811 requested precision. In addition, the integer pointer represented
9812 by ARG_SG will be dereferenced and set to the appropriate signgam
9813 (-1,1) value. */
9814
9815 static tree
9816 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9817 {
9818 tree result = NULL_TREE;
9819
9820 STRIP_NOPS (arg);
9821
9822 /* To proceed, MPFR must exactly represent the target floating point
9823 format, which only happens when the target base equals two. Also
9824 verify ARG is a constant and that ARG_SG is an int pointer. */
9825 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9826 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9827 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9828 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9829 {
9830 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9831
9832 /* In addition to NaN and Inf, the argument cannot be zero or a
9833 negative integer. */
9834 if (real_isfinite (ra)
9835 && ra->cl != rvc_zero
9836 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9837 {
9838 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9839 const int prec = fmt->p;
9840 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9841 int inexact, sg;
9842 mpfr_t m;
9843 tree result_lg;
9844
9845 mpfr_init2 (m, prec);
9846 mpfr_from_real (m, ra, GMP_RNDN);
9847 mpfr_clear_flags ();
9848 inexact = mpfr_lgamma (m, &sg, m, rnd);
9849 result_lg = do_mpfr_ckconv (m, type, inexact);
9850 mpfr_clear (m);
9851 if (result_lg)
9852 {
9853 tree result_sg;
9854
9855 /* Dereference the arg_sg pointer argument. */
9856 arg_sg = build_fold_indirect_ref (arg_sg);
9857 /* Assign the signgam value into *arg_sg. */
9858 result_sg = fold_build2 (MODIFY_EXPR,
9859 TREE_TYPE (arg_sg), arg_sg,
9860 build_int_cst (TREE_TYPE (arg_sg), sg));
9861 TREE_SIDE_EFFECTS (result_sg) = 1;
9862 /* Combine the signgam assignment with the lgamma result. */
9863 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9864 result_sg, result_lg));
9865 }
9866 }
9867 }
9868
9869 return result;
9870 }
9871
9872 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9873 mpc function FUNC on it and return the resulting value as a tree
9874 with type TYPE. The mpfr precision is set to the precision of
9875 TYPE. We assume that function FUNC returns zero if the result
9876 could be calculated exactly within the requested precision. If
9877 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9878 in the arguments and/or results. */
9879
9880 tree
9881 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9882 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9883 {
9884 tree result = NULL_TREE;
9885
9886 STRIP_NOPS (arg0);
9887 STRIP_NOPS (arg1);
9888
9889 /* To proceed, MPFR must exactly represent the target floating point
9890 format, which only happens when the target base equals two. */
9891 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9892 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9893 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9894 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9895 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9896 {
9897 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9898 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9899 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9900 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9901
9902 if (do_nonfinite
9903 || (real_isfinite (re0) && real_isfinite (im0)
9904 && real_isfinite (re1) && real_isfinite (im1)))
9905 {
9906 const struct real_format *const fmt =
9907 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9908 const int prec = fmt->p;
9909 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9910 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9911 int inexact;
9912 mpc_t m0, m1;
9913
9914 mpc_init2 (m0, prec);
9915 mpc_init2 (m1, prec);
9916 mpfr_from_real (mpc_realref (m0), re0, rnd);
9917 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9918 mpfr_from_real (mpc_realref (m1), re1, rnd);
9919 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9920 mpfr_clear_flags ();
9921 inexact = func (m0, m0, m1, crnd);
9922 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9923 mpc_clear (m0);
9924 mpc_clear (m1);
9925 }
9926 }
9927
9928 return result;
9929 }
9930
9931 /* A wrapper function for builtin folding that prevents warnings for
9932 "statement without effect" and the like, caused by removing the
9933 call node earlier than the warning is generated. */
9934
9935 tree
9936 fold_call_stmt (gcall *stmt, bool ignore)
9937 {
9938 tree ret = NULL_TREE;
9939 tree fndecl = gimple_call_fndecl (stmt);
9940 location_t loc = gimple_location (stmt);
9941 if (fndecl
9942 && TREE_CODE (fndecl) == FUNCTION_DECL
9943 && DECL_BUILT_IN (fndecl)
9944 && !gimple_call_va_arg_pack_p (stmt))
9945 {
9946 int nargs = gimple_call_num_args (stmt);
9947 tree *args = (nargs > 0
9948 ? gimple_call_arg_ptr (stmt, 0)
9949 : &error_mark_node);
9950
9951 if (avoid_folding_inline_builtin (fndecl))
9952 return NULL_TREE;
9953 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9954 {
9955 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9956 }
9957 else
9958 {
9959 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9960 if (ret)
9961 {
9962 /* Propagate location information from original call to
9963 expansion of builtin. Otherwise things like
9964 maybe_emit_chk_warning, that operate on the expansion
9965 of a builtin, will use the wrong location information. */
9966 if (gimple_has_location (stmt))
9967 {
9968 tree realret = ret;
9969 if (TREE_CODE (ret) == NOP_EXPR)
9970 realret = TREE_OPERAND (ret, 0);
9971 if (CAN_HAVE_LOCATION_P (realret)
9972 && !EXPR_HAS_LOCATION (realret))
9973 SET_EXPR_LOCATION (realret, loc);
9974 return realret;
9975 }
9976 return ret;
9977 }
9978 }
9979 }
9980 return NULL_TREE;
9981 }
9982
9983 /* Look up the function in builtin_decl that corresponds to DECL
9984 and set ASMSPEC as its user assembler name. DECL must be a
9985 function decl that declares a builtin. */
9986
9987 void
9988 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9989 {
9990 tree builtin;
9991 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9992 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9993 && asmspec != 0);
9994
9995 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9996 set_user_assembler_name (builtin, asmspec);
9997 switch (DECL_FUNCTION_CODE (decl))
9998 {
9999 case BUILT_IN_MEMCPY:
10000 init_block_move_fn (asmspec);
10001 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
10002 break;
10003 case BUILT_IN_MEMSET:
10004 init_block_clear_fn (asmspec);
10005 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
10006 break;
10007 case BUILT_IN_MEMMOVE:
10008 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
10009 break;
10010 case BUILT_IN_MEMCMP:
10011 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
10012 break;
10013 case BUILT_IN_ABORT:
10014 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
10015 break;
10016 case BUILT_IN_FFS:
10017 if (INT_TYPE_SIZE < BITS_PER_WORD)
10018 {
10019 set_user_assembler_libfunc ("ffs", asmspec);
10020 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
10021 MODE_INT, 0), "ffs");
10022 }
10023 break;
10024 default:
10025 break;
10026 }
10027 }
10028
10029 /* Return true if DECL is a builtin that expands to a constant or similarly
10030 simple code. */
10031 bool
10032 is_simple_builtin (tree decl)
10033 {
10034 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10035 switch (DECL_FUNCTION_CODE (decl))
10036 {
10037 /* Builtins that expand to constants. */
10038 case BUILT_IN_CONSTANT_P:
10039 case BUILT_IN_EXPECT:
10040 case BUILT_IN_OBJECT_SIZE:
10041 case BUILT_IN_UNREACHABLE:
10042 /* Simple register moves or loads from stack. */
10043 case BUILT_IN_ASSUME_ALIGNED:
10044 case BUILT_IN_RETURN_ADDRESS:
10045 case BUILT_IN_EXTRACT_RETURN_ADDR:
10046 case BUILT_IN_FROB_RETURN_ADDR:
10047 case BUILT_IN_RETURN:
10048 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10049 case BUILT_IN_FRAME_ADDRESS:
10050 case BUILT_IN_VA_END:
10051 case BUILT_IN_STACK_SAVE:
10052 case BUILT_IN_STACK_RESTORE:
10053 /* Exception state returns or moves registers around. */
10054 case BUILT_IN_EH_FILTER:
10055 case BUILT_IN_EH_POINTER:
10056 case BUILT_IN_EH_COPY_VALUES:
10057 return true;
10058
10059 default:
10060 return false;
10061 }
10062
10063 return false;
10064 }
10065
10066 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10067 most probably expanded inline into reasonably simple code. This is a
10068 superset of is_simple_builtin. */
10069 bool
10070 is_inexpensive_builtin (tree decl)
10071 {
10072 if (!decl)
10073 return false;
10074 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10075 return true;
10076 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10077 switch (DECL_FUNCTION_CODE (decl))
10078 {
10079 case BUILT_IN_ABS:
10080 case BUILT_IN_ALLOCA:
10081 case BUILT_IN_ALLOCA_WITH_ALIGN:
10082 case BUILT_IN_BSWAP16:
10083 case BUILT_IN_BSWAP32:
10084 case BUILT_IN_BSWAP64:
10085 case BUILT_IN_CLZ:
10086 case BUILT_IN_CLZIMAX:
10087 case BUILT_IN_CLZL:
10088 case BUILT_IN_CLZLL:
10089 case BUILT_IN_CTZ:
10090 case BUILT_IN_CTZIMAX:
10091 case BUILT_IN_CTZL:
10092 case BUILT_IN_CTZLL:
10093 case BUILT_IN_FFS:
10094 case BUILT_IN_FFSIMAX:
10095 case BUILT_IN_FFSL:
10096 case BUILT_IN_FFSLL:
10097 case BUILT_IN_IMAXABS:
10098 case BUILT_IN_FINITE:
10099 case BUILT_IN_FINITEF:
10100 case BUILT_IN_FINITEL:
10101 case BUILT_IN_FINITED32:
10102 case BUILT_IN_FINITED64:
10103 case BUILT_IN_FINITED128:
10104 case BUILT_IN_FPCLASSIFY:
10105 case BUILT_IN_ISFINITE:
10106 case BUILT_IN_ISINF_SIGN:
10107 case BUILT_IN_ISINF:
10108 case BUILT_IN_ISINFF:
10109 case BUILT_IN_ISINFL:
10110 case BUILT_IN_ISINFD32:
10111 case BUILT_IN_ISINFD64:
10112 case BUILT_IN_ISINFD128:
10113 case BUILT_IN_ISNAN:
10114 case BUILT_IN_ISNANF:
10115 case BUILT_IN_ISNANL:
10116 case BUILT_IN_ISNAND32:
10117 case BUILT_IN_ISNAND64:
10118 case BUILT_IN_ISNAND128:
10119 case BUILT_IN_ISNORMAL:
10120 case BUILT_IN_ISGREATER:
10121 case BUILT_IN_ISGREATEREQUAL:
10122 case BUILT_IN_ISLESS:
10123 case BUILT_IN_ISLESSEQUAL:
10124 case BUILT_IN_ISLESSGREATER:
10125 case BUILT_IN_ISUNORDERED:
10126 case BUILT_IN_VA_ARG_PACK:
10127 case BUILT_IN_VA_ARG_PACK_LEN:
10128 case BUILT_IN_VA_COPY:
10129 case BUILT_IN_TRAP:
10130 case BUILT_IN_SAVEREGS:
10131 case BUILT_IN_POPCOUNTL:
10132 case BUILT_IN_POPCOUNTLL:
10133 case BUILT_IN_POPCOUNTIMAX:
10134 case BUILT_IN_POPCOUNT:
10135 case BUILT_IN_PARITYL:
10136 case BUILT_IN_PARITYLL:
10137 case BUILT_IN_PARITYIMAX:
10138 case BUILT_IN_PARITY:
10139 case BUILT_IN_LABS:
10140 case BUILT_IN_LLABS:
10141 case BUILT_IN_PREFETCH:
10142 case BUILT_IN_ACC_ON_DEVICE:
10143 return true;
10144
10145 default:
10146 return is_simple_builtin (decl);
10147 }
10148
10149 return false;
10150 }