Move const char * -> int/fp folds to fold-const-call.c
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "fold-const-call.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "tree-object-size.h"
48 #include "realmpfr.h"
49 #include "cfgrtl.h"
50 #include "except.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "libfuncs.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "langhooks.h"
59 #include "value-prof.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "tree-chkp.h"
64 #include "rtl-chkp.h"
65
66
67 struct target_builtins default_target_builtins;
68 #if SWITCHABLE_TARGET
69 struct target_builtins *this_target_builtins = &default_target_builtins;
70 #endif
71
72 /* Define the names of the builtin function types and codes. */
73 const char *const built_in_class_names[BUILT_IN_LAST]
74 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
75
76 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
77 const char * built_in_names[(int) END_BUILTINS] =
78 {
79 #include "builtins.def"
80 };
81 #undef DEF_BUILTIN
82
83 /* Setup an array of builtin_info_type, make sure each element decl is
84 initialized to NULL_TREE. */
85 builtin_info_type builtin_info[(int)END_BUILTINS];
86
87 /* Non-zero if __builtin_constant_p should be folded right away. */
88 bool force_folding_builtin_constant_p;
89
90 static rtx c_readstr (const char *, machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 static rtx result_vector (int, rtx);
96 static void expand_builtin_prefetch (tree);
97 static rtx expand_builtin_apply_args (void);
98 static rtx expand_builtin_apply_args_1 (void);
99 static rtx expand_builtin_apply (rtx, rtx, rtx);
100 static void expand_builtin_return (rtx);
101 static enum type_class type_to_class (tree);
102 static rtx expand_builtin_classify_type (tree);
103 static void expand_errno_check (tree, rtx);
104 static rtx expand_builtin_mathfn (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
108 static rtx expand_builtin_interclass_mathfn (tree, rtx);
109 static rtx expand_builtin_sincos (tree);
110 static rtx expand_builtin_cexpi (tree, rtx);
111 static rtx expand_builtin_int_roundingfn (tree, rtx);
112 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
113 static rtx expand_builtin_next_arg (void);
114 static rtx expand_builtin_va_start (tree);
115 static rtx expand_builtin_va_end (tree);
116 static rtx expand_builtin_va_copy (tree);
117 static rtx expand_builtin_strcmp (tree, rtx);
118 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
120 static rtx expand_builtin_memcpy (tree, rtx);
121 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
122 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
123 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 machine_mode, int, tree);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_strchr (location_t, tree, tree, tree);
151 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
153 static tree fold_builtin_strcmp (location_t, tree, tree);
154 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
155 static tree fold_builtin_isascii (location_t, tree);
156 static tree fold_builtin_toascii (location_t, tree);
157 static tree fold_builtin_isdigit (location_t, tree);
158 static tree fold_builtin_fabs (location_t, tree, tree);
159 static tree fold_builtin_abs (location_t, tree, tree);
160 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
161 enum tree_code);
162 static tree fold_builtin_0 (location_t, tree);
163 static tree fold_builtin_1 (location_t, tree, tree);
164 static tree fold_builtin_2 (location_t, tree, tree, tree);
165 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_varargs (location_t, tree, tree*, int);
167
168 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
169 static tree fold_builtin_strstr (location_t, tree, tree, tree);
170 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
173
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
181
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
192
193 /* Return true if NAME starts with __builtin_ or __sync_. */
194
195 static bool
196 is_builtin_name (const char *name)
197 {
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 if (flag_cilkplus
205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
208 return false;
209 }
210
211
212 /* Return true if DECL is a function symbol representing a built-in. */
213
214 bool
215 is_builtin_fn (tree decl)
216 {
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218 }
219
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
223
224 static bool
225 called_as_built_in (tree node)
226 {
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
232 }
233
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
238
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
245
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
248
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 {
253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
259
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
263 &mode, &unsignedp, &volatilep, true);
264
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
268 {
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
275 }
276 else if (TREE_CODE (exp) == LABEL_DECL)
277 ;
278 else if (TREE_CODE (exp) == CONST_DECL)
279 {
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
285
286 known_alignment = true;
287 }
288 else if (DECL_P (exp))
289 {
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
292 }
293 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
294 {
295 align = TYPE_ALIGN (TREE_TYPE (exp));
296 }
297 else if (TREE_CODE (exp) == INDIRECT_REF
298 || TREE_CODE (exp) == MEM_REF
299 || TREE_CODE (exp) == TARGET_MEM_REF)
300 {
301 tree addr = TREE_OPERAND (exp, 0);
302 unsigned ptr_align;
303 unsigned HOST_WIDE_INT ptr_bitpos;
304 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
305
306 /* If the address is explicitely aligned, handle that. */
307 if (TREE_CODE (addr) == BIT_AND_EXPR
308 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
309 {
310 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
311 ptr_bitmask *= BITS_PER_UNIT;
312 align = ptr_bitmask & -ptr_bitmask;
313 addr = TREE_OPERAND (addr, 0);
314 }
315
316 known_alignment
317 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
318 align = MAX (ptr_align, align);
319
320 /* Re-apply explicit alignment to the bitpos. */
321 ptr_bitpos &= ptr_bitmask;
322
323 /* The alignment of the pointer operand in a TARGET_MEM_REF
324 has to take the variable offset parts into account. */
325 if (TREE_CODE (exp) == TARGET_MEM_REF)
326 {
327 if (TMR_INDEX (exp))
328 {
329 unsigned HOST_WIDE_INT step = 1;
330 if (TMR_STEP (exp))
331 step = TREE_INT_CST_LOW (TMR_STEP (exp));
332 align = MIN (align, (step & -step) * BITS_PER_UNIT);
333 }
334 if (TMR_INDEX2 (exp))
335 align = BITS_PER_UNIT;
336 known_alignment = false;
337 }
338
339 /* When EXP is an actual memory reference then we can use
340 TYPE_ALIGN of a pointer indirection to derive alignment.
341 Do so only if get_pointer_alignment_1 did not reveal absolute
342 alignment knowledge and if using that alignment would
343 improve the situation. */
344 if (!addr_p && !known_alignment
345 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
346 align = TYPE_ALIGN (TREE_TYPE (exp));
347 else
348 {
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
354 }
355 }
356 else if (TREE_CODE (exp) == STRING_CST)
357 {
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
361 if (CONSTANT_CLASS_P (exp))
362 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
363
364 known_alignment = true;
365 }
366
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 if (offset)
370 {
371 unsigned int trailing_zeros = tree_ctz (offset);
372 if (trailing_zeros < HOST_BITS_PER_INT)
373 {
374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
377 }
378 }
379
380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
382 return known_alignment;
383 }
384
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
393 {
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
395 }
396
397 /* Return the alignment in bits of EXP, an object. */
398
399 unsigned int
400 get_object_alignment (tree exp)
401 {
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
404
405 get_object_alignment_1 (exp, &align, &bitpos);
406
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
409
410 if (bitpos != 0)
411 align = (bitpos & -bitpos);
412 return align;
413 }
414
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
419
420 If EXP is not a pointer, false is returned too. */
421
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
425 {
426 STRIP_NOPS (exp);
427
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 {
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
440 {
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
443 {
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
447 }
448 }
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
452 }
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 {
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 {
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* We cannot really tell whether this result is an approximation. */
464 return true;
465 }
466 else
467 {
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
471 }
472 }
473 else if (TREE_CODE (exp) == INTEGER_CST)
474 {
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
479 }
480
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
484 }
485
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
492
493 unsigned int
494 get_pointer_alignment (tree exp)
495 {
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
498
499 get_pointer_alignment_1 (exp, &align, &bitpos);
500
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
503
504 if (bitpos != 0)
505 align = (bitpos & -bitpos);
506
507 return align;
508 }
509
510 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
511 way, because it could contain a zero byte in the middle.
512 TREE_STRING_LENGTH is the size of the character array, not the string.
513
514 ONLY_VALUE should be nonzero if the result is not going to be emitted
515 into the instruction stream and zero if it is going to be expanded.
516 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
517 is returned, otherwise NULL, since
518 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
519 evaluate the side-effects.
520
521 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
522 accesses. Note that this implies the result is not going to be emitted
523 into the instruction stream.
524
525 The value returned is of type `ssizetype'.
526
527 Unfortunately, string_constant can't access the values of const char
528 arrays with initializers, so neither can we do so here. */
529
530 tree
531 c_strlen (tree src, int only_value)
532 {
533 tree offset_node;
534 HOST_WIDE_INT offset;
535 int max;
536 const char *ptr;
537 location_t loc;
538
539 STRIP_NOPS (src);
540 if (TREE_CODE (src) == COND_EXPR
541 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
542 {
543 tree len1, len2;
544
545 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
546 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
547 if (tree_int_cst_equal (len1, len2))
548 return len1;
549 }
550
551 if (TREE_CODE (src) == COMPOUND_EXPR
552 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
553 return c_strlen (TREE_OPERAND (src, 1), only_value);
554
555 loc = EXPR_LOC_OR_LOC (src, input_location);
556
557 src = string_constant (src, &offset_node);
558 if (src == 0)
559 return NULL_TREE;
560
561 max = TREE_STRING_LENGTH (src) - 1;
562 ptr = TREE_STRING_POINTER (src);
563
564 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
565 {
566 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
567 compute the offset to the following null if we don't know where to
568 start searching for it. */
569 int i;
570
571 for (i = 0; i < max; i++)
572 if (ptr[i] == 0)
573 return NULL_TREE;
574
575 /* We don't know the starting offset, but we do know that the string
576 has no internal zero bytes. We can assume that the offset falls
577 within the bounds of the string; otherwise, the programmer deserves
578 what he gets. Subtract the offset from the length of the string,
579 and return that. This would perhaps not be valid if we were dealing
580 with named arrays in addition to literal string constants. */
581
582 return size_diffop_loc (loc, size_int (max), offset_node);
583 }
584
585 /* We have a known offset into the string. Start searching there for
586 a null character if we can represent it as a single HOST_WIDE_INT. */
587 if (offset_node == 0)
588 offset = 0;
589 else if (! tree_fits_shwi_p (offset_node))
590 offset = -1;
591 else
592 offset = tree_to_shwi (offset_node);
593
594 /* If the offset is known to be out of bounds, warn, and call strlen at
595 runtime. */
596 if (offset < 0 || offset > max)
597 {
598 /* Suppress multiple warnings for propagated constant strings. */
599 if (only_value != 2
600 && !TREE_NO_WARNING (src))
601 {
602 warning_at (loc, 0, "offset outside bounds of constant string");
603 TREE_NO_WARNING (src) = 1;
604 }
605 return NULL_TREE;
606 }
607
608 /* Use strlen to search for the first zero byte. Since any strings
609 constructed with build_string will have nulls appended, we win even
610 if we get handed something like (char[4])"abcd".
611
612 Since OFFSET is our starting index into the string, no further
613 calculation is needed. */
614 return ssize_int (strlen (ptr + offset));
615 }
616
617 /* Return a constant integer corresponding to target reading
618 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
619
620 static rtx
621 c_readstr (const char *str, machine_mode mode)
622 {
623 HOST_WIDE_INT ch;
624 unsigned int i, j;
625 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
626
627 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
628 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
629 / HOST_BITS_PER_WIDE_INT;
630
631 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
632 for (i = 0; i < len; i++)
633 tmp[i] = 0;
634
635 ch = 1;
636 for (i = 0; i < GET_MODE_SIZE (mode); i++)
637 {
638 j = i;
639 if (WORDS_BIG_ENDIAN)
640 j = GET_MODE_SIZE (mode) - i - 1;
641 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
642 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
643 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
644 j *= BITS_PER_UNIT;
645
646 if (ch)
647 ch = (unsigned char) str[i];
648 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
649 }
650
651 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
652 return immed_wide_int_const (c, mode);
653 }
654
655 /* Cast a target constant CST to target CHAR and if that value fits into
656 host char type, return zero and put that value into variable pointed to by
657 P. */
658
659 static int
660 target_char_cast (tree cst, char *p)
661 {
662 unsigned HOST_WIDE_INT val, hostval;
663
664 if (TREE_CODE (cst) != INTEGER_CST
665 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
666 return 1;
667
668 /* Do not care if it fits or not right here. */
669 val = TREE_INT_CST_LOW (cst);
670
671 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
672 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
673
674 hostval = val;
675 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
676 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
677
678 if (val != hostval)
679 return 1;
680
681 *p = hostval;
682 return 0;
683 }
684
685 /* Similar to save_expr, but assumes that arbitrary code is not executed
686 in between the multiple evaluations. In particular, we assume that a
687 non-addressable local variable will not be modified. */
688
689 static tree
690 builtin_save_expr (tree exp)
691 {
692 if (TREE_CODE (exp) == SSA_NAME
693 || (TREE_ADDRESSABLE (exp) == 0
694 && (TREE_CODE (exp) == PARM_DECL
695 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
696 return exp;
697
698 return save_expr (exp);
699 }
700
701 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
702 times to get the address of either a higher stack frame, or a return
703 address located within it (depending on FNDECL_CODE). */
704
705 static rtx
706 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
707 {
708 int i;
709 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
710 if (tem == NULL_RTX)
711 {
712 /* For a zero count with __builtin_return_address, we don't care what
713 frame address we return, because target-specific definitions will
714 override us. Therefore frame pointer elimination is OK, and using
715 the soft frame pointer is OK.
716
717 For a nonzero count, or a zero count with __builtin_frame_address,
718 we require a stable offset from the current frame pointer to the
719 previous one, so we must use the hard frame pointer, and
720 we must disable frame pointer elimination. */
721 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
722 tem = frame_pointer_rtx;
723 else
724 {
725 tem = hard_frame_pointer_rtx;
726
727 /* Tell reload not to eliminate the frame pointer. */
728 crtl->accesses_prior_frames = 1;
729 }
730 }
731
732 if (count > 0)
733 SETUP_FRAME_ADDRESSES ();
734
735 /* On the SPARC, the return address is not in the frame, it is in a
736 register. There is no way to access it off of the current frame
737 pointer, but it can be accessed off the previous frame pointer by
738 reading the value from the register window save area. */
739 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
740 count--;
741
742 /* Scan back COUNT frames to the specified frame. */
743 for (i = 0; i < count; i++)
744 {
745 /* Assume the dynamic chain pointer is in the word that the
746 frame address points to, unless otherwise specified. */
747 tem = DYNAMIC_CHAIN_ADDRESS (tem);
748 tem = memory_address (Pmode, tem);
749 tem = gen_frame_mem (Pmode, tem);
750 tem = copy_to_reg (tem);
751 }
752
753 /* For __builtin_frame_address, return what we've got. But, on
754 the SPARC for example, we may have to add a bias. */
755 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
756 return FRAME_ADDR_RTX (tem);
757
758 /* For __builtin_return_address, get the return address from that frame. */
759 #ifdef RETURN_ADDR_RTX
760 tem = RETURN_ADDR_RTX (count, tem);
761 #else
762 tem = memory_address (Pmode,
763 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
764 tem = gen_frame_mem (Pmode, tem);
765 #endif
766 return tem;
767 }
768
769 /* Alias set used for setjmp buffer. */
770 static alias_set_type setjmp_alias_set = -1;
771
772 /* Construct the leading half of a __builtin_setjmp call. Control will
773 return to RECEIVER_LABEL. This is also called directly by the SJLJ
774 exception handling code. */
775
776 void
777 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
778 {
779 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780 rtx stack_save;
781 rtx mem;
782
783 if (setjmp_alias_set == -1)
784 setjmp_alias_set = new_alias_set ();
785
786 buf_addr = convert_memory_address (Pmode, buf_addr);
787
788 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
789
790 /* We store the frame pointer and the address of receiver_label in
791 the buffer and use the rest of it for the stack save area, which
792 is machine-dependent. */
793
794 mem = gen_rtx_MEM (Pmode, buf_addr);
795 set_mem_alias_set (mem, setjmp_alias_set);
796 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
797
798 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
799 GET_MODE_SIZE (Pmode))),
800 set_mem_alias_set (mem, setjmp_alias_set);
801
802 emit_move_insn (validize_mem (mem),
803 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
804
805 stack_save = gen_rtx_MEM (sa_mode,
806 plus_constant (Pmode, buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (stack_save, setjmp_alias_set);
809 emit_stack_save (SAVE_NONLOCAL, &stack_save);
810
811 /* If there is further processing to do, do it. */
812 if (targetm.have_builtin_setjmp_setup ())
813 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
814
815 /* We have a nonlocal label. */
816 cfun->has_nonlocal_label = 1;
817 }
818
819 /* Construct the trailing part of a __builtin_setjmp call. This is
820 also called directly by the SJLJ exception handling code.
821 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
822
823 void
824 expand_builtin_setjmp_receiver (rtx receiver_label)
825 {
826 rtx chain;
827
828 /* Mark the FP as used when we get here, so we have to make sure it's
829 marked as used by this function. */
830 emit_use (hard_frame_pointer_rtx);
831
832 /* Mark the static chain as clobbered here so life information
833 doesn't get messed up for it. */
834 chain = targetm.calls.static_chain (current_function_decl, true);
835 if (chain && REG_P (chain))
836 emit_clobber (chain);
837
838 /* Now put in the code to restore the frame pointer, and argument
839 pointer, if needed. */
840 if (! targetm.have_nonlocal_goto ())
841 {
842 /* First adjust our frame pointer to its actual value. It was
843 previously set to the start of the virtual area corresponding to
844 the stacked variables when we branched here and now needs to be
845 adjusted to the actual hardware fp value.
846
847 Assignments to virtual registers are converted by
848 instantiate_virtual_regs into the corresponding assignment
849 to the underlying register (fp in this case) that makes
850 the original assignment true.
851 So the following insn will actually be decrementing fp by
852 STARTING_FRAME_OFFSET. */
853 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
854
855 /* Restoring the frame pointer also modifies the hard frame pointer.
856 Mark it used (so that the previous assignment remains live once
857 the frame pointer is eliminated) and clobbered (to represent the
858 implicit update from the assignment). */
859 emit_use (hard_frame_pointer_rtx);
860 emit_clobber (hard_frame_pointer_rtx);
861 }
862
863 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
864 {
865 #ifdef ELIMINABLE_REGS
866 /* If the argument pointer can be eliminated in favor of the
867 frame pointer, we don't need to restore it. We assume here
868 that if such an elimination is present, it can always be used.
869 This is the case on all known machines; if we don't make this
870 assumption, we do unnecessary saving on many machines. */
871 size_t i;
872 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
873
874 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
875 if (elim_regs[i].from == ARG_POINTER_REGNUM
876 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
877 break;
878
879 if (i == ARRAY_SIZE (elim_regs))
880 #endif
881 {
882 /* Now restore our arg pointer from the address at which it
883 was saved in our stack frame. */
884 emit_move_insn (crtl->args.internal_arg_pointer,
885 copy_to_reg (get_arg_pointer_save_area ()));
886 }
887 }
888
889 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
890 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
891 else if (targetm.have_nonlocal_goto_receiver ())
892 emit_insn (targetm.gen_nonlocal_goto_receiver ());
893 else
894 { /* Nothing */ }
895
896 /* We must not allow the code we just generated to be reordered by
897 scheduling. Specifically, the update of the frame pointer must
898 happen immediately, not later. */
899 emit_insn (gen_blockage ());
900 }
901
902 /* __builtin_longjmp is passed a pointer to an array of five words (not
903 all will be used on all machines). It operates similarly to the C
904 library function of the same name, but is more efficient. Much of
905 the code below is copied from the handling of non-local gotos. */
906
907 static void
908 expand_builtin_longjmp (rtx buf_addr, rtx value)
909 {
910 rtx fp, lab, stack;
911 rtx_insn *insn, *last;
912 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
913
914 /* DRAP is needed for stack realign if longjmp is expanded to current
915 function */
916 if (SUPPORTS_STACK_ALIGNMENT)
917 crtl->need_drap = true;
918
919 if (setjmp_alias_set == -1)
920 setjmp_alias_set = new_alias_set ();
921
922 buf_addr = convert_memory_address (Pmode, buf_addr);
923
924 buf_addr = force_reg (Pmode, buf_addr);
925
926 /* We require that the user must pass a second argument of 1, because
927 that is what builtin_setjmp will return. */
928 gcc_assert (value == const1_rtx);
929
930 last = get_last_insn ();
931 if (targetm.have_builtin_longjmp ())
932 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
933 else
934 {
935 fp = gen_rtx_MEM (Pmode, buf_addr);
936 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
937 GET_MODE_SIZE (Pmode)));
938
939 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
940 2 * GET_MODE_SIZE (Pmode)));
941 set_mem_alias_set (fp, setjmp_alias_set);
942 set_mem_alias_set (lab, setjmp_alias_set);
943 set_mem_alias_set (stack, setjmp_alias_set);
944
945 /* Pick up FP, label, and SP from the block and jump. This code is
946 from expand_goto in stmt.c; see there for detailed comments. */
947 if (targetm.have_nonlocal_goto ())
948 /* We have to pass a value to the nonlocal_goto pattern that will
949 get copied into the static_chain pointer, but it does not matter
950 what that value is, because builtin_setjmp does not use it. */
951 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
952 else
953 {
954 lab = copy_to_reg (lab);
955
956 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
957 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
958
959 emit_move_insn (hard_frame_pointer_rtx, fp);
960 emit_stack_restore (SAVE_NONLOCAL, stack);
961
962 emit_use (hard_frame_pointer_rtx);
963 emit_use (stack_pointer_rtx);
964 emit_indirect_jump (lab);
965 }
966 }
967
968 /* Search backwards and mark the jump insn as a non-local goto.
969 Note that this precludes the use of __builtin_longjmp to a
970 __builtin_setjmp target in the same function. However, we've
971 already cautioned the user that these functions are for
972 internal exception handling use only. */
973 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
974 {
975 gcc_assert (insn != last);
976
977 if (JUMP_P (insn))
978 {
979 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
980 break;
981 }
982 else if (CALL_P (insn))
983 break;
984 }
985 }
986
987 static inline bool
988 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
989 {
990 return (iter->i < iter->n);
991 }
992
993 /* This function validates the types of a function call argument list
994 against a specified list of tree_codes. If the last specifier is a 0,
995 that represents an ellipses, otherwise the last specifier must be a
996 VOID_TYPE. */
997
998 static bool
999 validate_arglist (const_tree callexpr, ...)
1000 {
1001 enum tree_code code;
1002 bool res = 0;
1003 va_list ap;
1004 const_call_expr_arg_iterator iter;
1005 const_tree arg;
1006
1007 va_start (ap, callexpr);
1008 init_const_call_expr_arg_iterator (callexpr, &iter);
1009
1010 do
1011 {
1012 code = (enum tree_code) va_arg (ap, int);
1013 switch (code)
1014 {
1015 case 0:
1016 /* This signifies an ellipses, any further arguments are all ok. */
1017 res = true;
1018 goto end;
1019 case VOID_TYPE:
1020 /* This signifies an endlink, if no arguments remain, return
1021 true, otherwise return false. */
1022 res = !more_const_call_expr_args_p (&iter);
1023 goto end;
1024 default:
1025 /* If no parameters remain or the parameter's code does not
1026 match the specified code, return false. Otherwise continue
1027 checking any remaining arguments. */
1028 arg = next_const_call_expr_arg (&iter);
1029 if (!validate_arg (arg, code))
1030 goto end;
1031 break;
1032 }
1033 }
1034 while (1);
1035
1036 /* We need gotos here since we can only have one VA_CLOSE in a
1037 function. */
1038 end: ;
1039 va_end (ap);
1040
1041 return res;
1042 }
1043
1044 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1045 and the address of the save area. */
1046
1047 static rtx
1048 expand_builtin_nonlocal_goto (tree exp)
1049 {
1050 tree t_label, t_save_area;
1051 rtx r_label, r_save_area, r_fp, r_sp;
1052 rtx_insn *insn;
1053
1054 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1055 return NULL_RTX;
1056
1057 t_label = CALL_EXPR_ARG (exp, 0);
1058 t_save_area = CALL_EXPR_ARG (exp, 1);
1059
1060 r_label = expand_normal (t_label);
1061 r_label = convert_memory_address (Pmode, r_label);
1062 r_save_area = expand_normal (t_save_area);
1063 r_save_area = convert_memory_address (Pmode, r_save_area);
1064 /* Copy the address of the save location to a register just in case it was
1065 based on the frame pointer. */
1066 r_save_area = copy_to_reg (r_save_area);
1067 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1068 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1069 plus_constant (Pmode, r_save_area,
1070 GET_MODE_SIZE (Pmode)));
1071
1072 crtl->has_nonlocal_goto = 1;
1073
1074 /* ??? We no longer need to pass the static chain value, afaik. */
1075 if (targetm.have_nonlocal_goto ())
1076 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1077 else
1078 {
1079 r_label = copy_to_reg (r_label);
1080
1081 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1082 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1083
1084 /* Restore frame pointer for containing function. */
1085 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1086 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1087
1088 /* USE of hard_frame_pointer_rtx added for consistency;
1089 not clear if really needed. */
1090 emit_use (hard_frame_pointer_rtx);
1091 emit_use (stack_pointer_rtx);
1092
1093 /* If the architecture is using a GP register, we must
1094 conservatively assume that the target function makes use of it.
1095 The prologue of functions with nonlocal gotos must therefore
1096 initialize the GP register to the appropriate value, and we
1097 must then make sure that this value is live at the point
1098 of the jump. (Note that this doesn't necessarily apply
1099 to targets with a nonlocal_goto pattern; they are free
1100 to implement it in their own way. Note also that this is
1101 a no-op if the GP register is a global invariant.) */
1102 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1103 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1104 emit_use (pic_offset_table_rtx);
1105
1106 emit_indirect_jump (r_label);
1107 }
1108
1109 /* Search backwards to the jump insn and mark it as a
1110 non-local goto. */
1111 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1112 {
1113 if (JUMP_P (insn))
1114 {
1115 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1116 break;
1117 }
1118 else if (CALL_P (insn))
1119 break;
1120 }
1121
1122 return const0_rtx;
1123 }
1124
1125 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1126 (not all will be used on all machines) that was passed to __builtin_setjmp.
1127 It updates the stack pointer in that block to the current value. This is
1128 also called directly by the SJLJ exception handling code. */
1129
1130 void
1131 expand_builtin_update_setjmp_buf (rtx buf_addr)
1132 {
1133 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1134 rtx stack_save
1135 = gen_rtx_MEM (sa_mode,
1136 memory_address
1137 (sa_mode,
1138 plus_constant (Pmode, buf_addr,
1139 2 * GET_MODE_SIZE (Pmode))));
1140
1141 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1142 }
1143
1144 /* Expand a call to __builtin_prefetch. For a target that does not support
1145 data prefetch, evaluate the memory address argument in case it has side
1146 effects. */
1147
1148 static void
1149 expand_builtin_prefetch (tree exp)
1150 {
1151 tree arg0, arg1, arg2;
1152 int nargs;
1153 rtx op0, op1, op2;
1154
1155 if (!validate_arglist (exp, POINTER_TYPE, 0))
1156 return;
1157
1158 arg0 = CALL_EXPR_ARG (exp, 0);
1159
1160 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1161 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1162 locality). */
1163 nargs = call_expr_nargs (exp);
1164 if (nargs > 1)
1165 arg1 = CALL_EXPR_ARG (exp, 1);
1166 else
1167 arg1 = integer_zero_node;
1168 if (nargs > 2)
1169 arg2 = CALL_EXPR_ARG (exp, 2);
1170 else
1171 arg2 = integer_three_node;
1172
1173 /* Argument 0 is an address. */
1174 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1175
1176 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1177 if (TREE_CODE (arg1) != INTEGER_CST)
1178 {
1179 error ("second argument to %<__builtin_prefetch%> must be a constant");
1180 arg1 = integer_zero_node;
1181 }
1182 op1 = expand_normal (arg1);
1183 /* Argument 1 must be either zero or one. */
1184 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1185 {
1186 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1187 " using zero");
1188 op1 = const0_rtx;
1189 }
1190
1191 /* Argument 2 (locality) must be a compile-time constant int. */
1192 if (TREE_CODE (arg2) != INTEGER_CST)
1193 {
1194 error ("third argument to %<__builtin_prefetch%> must be a constant");
1195 arg2 = integer_zero_node;
1196 }
1197 op2 = expand_normal (arg2);
1198 /* Argument 2 must be 0, 1, 2, or 3. */
1199 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1200 {
1201 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1202 op2 = const0_rtx;
1203 }
1204
1205 if (targetm.have_prefetch ())
1206 {
1207 struct expand_operand ops[3];
1208
1209 create_address_operand (&ops[0], op0);
1210 create_integer_operand (&ops[1], INTVAL (op1));
1211 create_integer_operand (&ops[2], INTVAL (op2));
1212 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1213 return;
1214 }
1215
1216 /* Don't do anything with direct references to volatile memory, but
1217 generate code to handle other side effects. */
1218 if (!MEM_P (op0) && side_effects_p (op0))
1219 emit_insn (op0);
1220 }
1221
1222 /* Get a MEM rtx for expression EXP which is the address of an operand
1223 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1224 the maximum length of the block of memory that might be accessed or
1225 NULL if unknown. */
1226
1227 static rtx
1228 get_memory_rtx (tree exp, tree len)
1229 {
1230 tree orig_exp = exp;
1231 rtx addr, mem;
1232
1233 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1234 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1235 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1236 exp = TREE_OPERAND (exp, 0);
1237
1238 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1239 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1240
1241 /* Get an expression we can use to find the attributes to assign to MEM.
1242 First remove any nops. */
1243 while (CONVERT_EXPR_P (exp)
1244 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1245 exp = TREE_OPERAND (exp, 0);
1246
1247 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1248 (as builtin stringops may alias with anything). */
1249 exp = fold_build2 (MEM_REF,
1250 build_array_type (char_type_node,
1251 build_range_type (sizetype,
1252 size_one_node, len)),
1253 exp, build_int_cst (ptr_type_node, 0));
1254
1255 /* If the MEM_REF has no acceptable address, try to get the base object
1256 from the original address we got, and build an all-aliasing
1257 unknown-sized access to that one. */
1258 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1259 set_mem_attributes (mem, exp, 0);
1260 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1261 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1262 0))))
1263 {
1264 exp = build_fold_addr_expr (exp);
1265 exp = fold_build2 (MEM_REF,
1266 build_array_type (char_type_node,
1267 build_range_type (sizetype,
1268 size_zero_node,
1269 NULL)),
1270 exp, build_int_cst (ptr_type_node, 0));
1271 set_mem_attributes (mem, exp, 0);
1272 }
1273 set_mem_alias_set (mem, 0);
1274 return mem;
1275 }
1276 \f
1277 /* Built-in functions to perform an untyped call and return. */
1278
1279 #define apply_args_mode \
1280 (this_target_builtins->x_apply_args_mode)
1281 #define apply_result_mode \
1282 (this_target_builtins->x_apply_result_mode)
1283
1284 /* Return the size required for the block returned by __builtin_apply_args,
1285 and initialize apply_args_mode. */
1286
1287 static int
1288 apply_args_size (void)
1289 {
1290 static int size = -1;
1291 int align;
1292 unsigned int regno;
1293 machine_mode mode;
1294
1295 /* The values computed by this function never change. */
1296 if (size < 0)
1297 {
1298 /* The first value is the incoming arg-pointer. */
1299 size = GET_MODE_SIZE (Pmode);
1300
1301 /* The second value is the structure value address unless this is
1302 passed as an "invisible" first argument. */
1303 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1304 size += GET_MODE_SIZE (Pmode);
1305
1306 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1307 if (FUNCTION_ARG_REGNO_P (regno))
1308 {
1309 mode = targetm.calls.get_raw_arg_mode (regno);
1310
1311 gcc_assert (mode != VOIDmode);
1312
1313 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1314 if (size % align != 0)
1315 size = CEIL (size, align) * align;
1316 size += GET_MODE_SIZE (mode);
1317 apply_args_mode[regno] = mode;
1318 }
1319 else
1320 {
1321 apply_args_mode[regno] = VOIDmode;
1322 }
1323 }
1324 return size;
1325 }
1326
1327 /* Return the size required for the block returned by __builtin_apply,
1328 and initialize apply_result_mode. */
1329
1330 static int
1331 apply_result_size (void)
1332 {
1333 static int size = -1;
1334 int align, regno;
1335 machine_mode mode;
1336
1337 /* The values computed by this function never change. */
1338 if (size < 0)
1339 {
1340 size = 0;
1341
1342 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1343 if (targetm.calls.function_value_regno_p (regno))
1344 {
1345 mode = targetm.calls.get_raw_result_mode (regno);
1346
1347 gcc_assert (mode != VOIDmode);
1348
1349 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1350 if (size % align != 0)
1351 size = CEIL (size, align) * align;
1352 size += GET_MODE_SIZE (mode);
1353 apply_result_mode[regno] = mode;
1354 }
1355 else
1356 apply_result_mode[regno] = VOIDmode;
1357
1358 /* Allow targets that use untyped_call and untyped_return to override
1359 the size so that machine-specific information can be stored here. */
1360 #ifdef APPLY_RESULT_SIZE
1361 size = APPLY_RESULT_SIZE;
1362 #endif
1363 }
1364 return size;
1365 }
1366
1367 /* Create a vector describing the result block RESULT. If SAVEP is true,
1368 the result block is used to save the values; otherwise it is used to
1369 restore the values. */
1370
1371 static rtx
1372 result_vector (int savep, rtx result)
1373 {
1374 int regno, size, align, nelts;
1375 machine_mode mode;
1376 rtx reg, mem;
1377 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1378
1379 size = nelts = 0;
1380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1381 if ((mode = apply_result_mode[regno]) != VOIDmode)
1382 {
1383 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1384 if (size % align != 0)
1385 size = CEIL (size, align) * align;
1386 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1387 mem = adjust_address (result, mode, size);
1388 savevec[nelts++] = (savep
1389 ? gen_rtx_SET (mem, reg)
1390 : gen_rtx_SET (reg, mem));
1391 size += GET_MODE_SIZE (mode);
1392 }
1393 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1394 }
1395
1396 /* Save the state required to perform an untyped call with the same
1397 arguments as were passed to the current function. */
1398
1399 static rtx
1400 expand_builtin_apply_args_1 (void)
1401 {
1402 rtx registers, tem;
1403 int size, align, regno;
1404 machine_mode mode;
1405 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1406
1407 /* Create a block where the arg-pointer, structure value address,
1408 and argument registers can be saved. */
1409 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1410
1411 /* Walk past the arg-pointer and structure value address. */
1412 size = GET_MODE_SIZE (Pmode);
1413 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1414 size += GET_MODE_SIZE (Pmode);
1415
1416 /* Save each register used in calling a function to the block. */
1417 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1418 if ((mode = apply_args_mode[regno]) != VOIDmode)
1419 {
1420 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1421 if (size % align != 0)
1422 size = CEIL (size, align) * align;
1423
1424 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1425
1426 emit_move_insn (adjust_address (registers, mode, size), tem);
1427 size += GET_MODE_SIZE (mode);
1428 }
1429
1430 /* Save the arg pointer to the block. */
1431 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1432 /* We need the pointer as the caller actually passed them to us, not
1433 as we might have pretended they were passed. Make sure it's a valid
1434 operand, as emit_move_insn isn't expected to handle a PLUS. */
1435 if (STACK_GROWS_DOWNWARD)
1436 tem
1437 = force_operand (plus_constant (Pmode, tem,
1438 crtl->args.pretend_args_size),
1439 NULL_RTX);
1440 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1441
1442 size = GET_MODE_SIZE (Pmode);
1443
1444 /* Save the structure value address unless this is passed as an
1445 "invisible" first argument. */
1446 if (struct_incoming_value)
1447 {
1448 emit_move_insn (adjust_address (registers, Pmode, size),
1449 copy_to_reg (struct_incoming_value));
1450 size += GET_MODE_SIZE (Pmode);
1451 }
1452
1453 /* Return the address of the block. */
1454 return copy_addr_to_reg (XEXP (registers, 0));
1455 }
1456
1457 /* __builtin_apply_args returns block of memory allocated on
1458 the stack into which is stored the arg pointer, structure
1459 value address, static chain, and all the registers that might
1460 possibly be used in performing a function call. The code is
1461 moved to the start of the function so the incoming values are
1462 saved. */
1463
1464 static rtx
1465 expand_builtin_apply_args (void)
1466 {
1467 /* Don't do __builtin_apply_args more than once in a function.
1468 Save the result of the first call and reuse it. */
1469 if (apply_args_value != 0)
1470 return apply_args_value;
1471 {
1472 /* When this function is called, it means that registers must be
1473 saved on entry to this function. So we migrate the
1474 call to the first insn of this function. */
1475 rtx temp;
1476
1477 start_sequence ();
1478 temp = expand_builtin_apply_args_1 ();
1479 rtx_insn *seq = get_insns ();
1480 end_sequence ();
1481
1482 apply_args_value = temp;
1483
1484 /* Put the insns after the NOTE that starts the function.
1485 If this is inside a start_sequence, make the outer-level insn
1486 chain current, so the code is placed at the start of the
1487 function. If internal_arg_pointer is a non-virtual pseudo,
1488 it needs to be placed after the function that initializes
1489 that pseudo. */
1490 push_topmost_sequence ();
1491 if (REG_P (crtl->args.internal_arg_pointer)
1492 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1493 emit_insn_before (seq, parm_birth_insn);
1494 else
1495 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1496 pop_topmost_sequence ();
1497 return temp;
1498 }
1499 }
1500
1501 /* Perform an untyped call and save the state required to perform an
1502 untyped return of whatever value was returned by the given function. */
1503
1504 static rtx
1505 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1506 {
1507 int size, align, regno;
1508 machine_mode mode;
1509 rtx incoming_args, result, reg, dest, src;
1510 rtx_call_insn *call_insn;
1511 rtx old_stack_level = 0;
1512 rtx call_fusage = 0;
1513 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1514
1515 arguments = convert_memory_address (Pmode, arguments);
1516
1517 /* Create a block where the return registers can be saved. */
1518 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1519
1520 /* Fetch the arg pointer from the ARGUMENTS block. */
1521 incoming_args = gen_reg_rtx (Pmode);
1522 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1523 if (!STACK_GROWS_DOWNWARD)
1524 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1525 incoming_args, 0, OPTAB_LIB_WIDEN);
1526
1527 /* Push a new argument block and copy the arguments. Do not allow
1528 the (potential) memcpy call below to interfere with our stack
1529 manipulations. */
1530 do_pending_stack_adjust ();
1531 NO_DEFER_POP;
1532
1533 /* Save the stack with nonlocal if available. */
1534 if (targetm.have_save_stack_nonlocal ())
1535 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1536 else
1537 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1538
1539 /* Allocate a block of memory onto the stack and copy the memory
1540 arguments to the outgoing arguments address. We can pass TRUE
1541 as the 4th argument because we just saved the stack pointer
1542 and will restore it right after the call. */
1543 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1544
1545 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1546 may have already set current_function_calls_alloca to true.
1547 current_function_calls_alloca won't be set if argsize is zero,
1548 so we have to guarantee need_drap is true here. */
1549 if (SUPPORTS_STACK_ALIGNMENT)
1550 crtl->need_drap = true;
1551
1552 dest = virtual_outgoing_args_rtx;
1553 if (!STACK_GROWS_DOWNWARD)
1554 {
1555 if (CONST_INT_P (argsize))
1556 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1557 else
1558 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1559 }
1560 dest = gen_rtx_MEM (BLKmode, dest);
1561 set_mem_align (dest, PARM_BOUNDARY);
1562 src = gen_rtx_MEM (BLKmode, incoming_args);
1563 set_mem_align (src, PARM_BOUNDARY);
1564 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1565
1566 /* Refer to the argument block. */
1567 apply_args_size ();
1568 arguments = gen_rtx_MEM (BLKmode, arguments);
1569 set_mem_align (arguments, PARM_BOUNDARY);
1570
1571 /* Walk past the arg-pointer and structure value address. */
1572 size = GET_MODE_SIZE (Pmode);
1573 if (struct_value)
1574 size += GET_MODE_SIZE (Pmode);
1575
1576 /* Restore each of the registers previously saved. Make USE insns
1577 for each of these registers for use in making the call. */
1578 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1579 if ((mode = apply_args_mode[regno]) != VOIDmode)
1580 {
1581 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1582 if (size % align != 0)
1583 size = CEIL (size, align) * align;
1584 reg = gen_rtx_REG (mode, regno);
1585 emit_move_insn (reg, adjust_address (arguments, mode, size));
1586 use_reg (&call_fusage, reg);
1587 size += GET_MODE_SIZE (mode);
1588 }
1589
1590 /* Restore the structure value address unless this is passed as an
1591 "invisible" first argument. */
1592 size = GET_MODE_SIZE (Pmode);
1593 if (struct_value)
1594 {
1595 rtx value = gen_reg_rtx (Pmode);
1596 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1597 emit_move_insn (struct_value, value);
1598 if (REG_P (struct_value))
1599 use_reg (&call_fusage, struct_value);
1600 size += GET_MODE_SIZE (Pmode);
1601 }
1602
1603 /* All arguments and registers used for the call are set up by now! */
1604 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1605
1606 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1607 and we don't want to load it into a register as an optimization,
1608 because prepare_call_address already did it if it should be done. */
1609 if (GET_CODE (function) != SYMBOL_REF)
1610 function = memory_address (FUNCTION_MODE, function);
1611
1612 /* Generate the actual call instruction and save the return value. */
1613 if (targetm.have_untyped_call ())
1614 {
1615 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1616 emit_call_insn (targetm.gen_untyped_call (mem, result,
1617 result_vector (1, result)));
1618 }
1619 else if (targetm.have_call_value ())
1620 {
1621 rtx valreg = 0;
1622
1623 /* Locate the unique return register. It is not possible to
1624 express a call that sets more than one return register using
1625 call_value; use untyped_call for that. In fact, untyped_call
1626 only needs to save the return registers in the given block. */
1627 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1628 if ((mode = apply_result_mode[regno]) != VOIDmode)
1629 {
1630 gcc_assert (!valreg); /* have_untyped_call required. */
1631
1632 valreg = gen_rtx_REG (mode, regno);
1633 }
1634
1635 emit_insn (targetm.gen_call_value (valreg,
1636 gen_rtx_MEM (FUNCTION_MODE, function),
1637 const0_rtx, NULL_RTX, const0_rtx));
1638
1639 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1640 }
1641 else
1642 gcc_unreachable ();
1643
1644 /* Find the CALL insn we just emitted, and attach the register usage
1645 information. */
1646 call_insn = last_call_insn ();
1647 add_function_usage_to (call_insn, call_fusage);
1648
1649 /* Restore the stack. */
1650 if (targetm.have_save_stack_nonlocal ())
1651 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1652 else
1653 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1654 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1655
1656 OK_DEFER_POP;
1657
1658 /* Return the address of the result block. */
1659 result = copy_addr_to_reg (XEXP (result, 0));
1660 return convert_memory_address (ptr_mode, result);
1661 }
1662
1663 /* Perform an untyped return. */
1664
1665 static void
1666 expand_builtin_return (rtx result)
1667 {
1668 int size, align, regno;
1669 machine_mode mode;
1670 rtx reg;
1671 rtx_insn *call_fusage = 0;
1672
1673 result = convert_memory_address (Pmode, result);
1674
1675 apply_result_size ();
1676 result = gen_rtx_MEM (BLKmode, result);
1677
1678 if (targetm.have_untyped_return ())
1679 {
1680 rtx vector = result_vector (0, result);
1681 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1682 emit_barrier ();
1683 return;
1684 }
1685
1686 /* Restore the return value and note that each value is used. */
1687 size = 0;
1688 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1689 if ((mode = apply_result_mode[regno]) != VOIDmode)
1690 {
1691 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1692 if (size % align != 0)
1693 size = CEIL (size, align) * align;
1694 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1695 emit_move_insn (reg, adjust_address (result, mode, size));
1696
1697 push_to_sequence (call_fusage);
1698 emit_use (reg);
1699 call_fusage = get_insns ();
1700 end_sequence ();
1701 size += GET_MODE_SIZE (mode);
1702 }
1703
1704 /* Put the USE insns before the return. */
1705 emit_insn (call_fusage);
1706
1707 /* Return whatever values was restored by jumping directly to the end
1708 of the function. */
1709 expand_naked_return ();
1710 }
1711
1712 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1713
1714 static enum type_class
1715 type_to_class (tree type)
1716 {
1717 switch (TREE_CODE (type))
1718 {
1719 case VOID_TYPE: return void_type_class;
1720 case INTEGER_TYPE: return integer_type_class;
1721 case ENUMERAL_TYPE: return enumeral_type_class;
1722 case BOOLEAN_TYPE: return boolean_type_class;
1723 case POINTER_TYPE: return pointer_type_class;
1724 case REFERENCE_TYPE: return reference_type_class;
1725 case OFFSET_TYPE: return offset_type_class;
1726 case REAL_TYPE: return real_type_class;
1727 case COMPLEX_TYPE: return complex_type_class;
1728 case FUNCTION_TYPE: return function_type_class;
1729 case METHOD_TYPE: return method_type_class;
1730 case RECORD_TYPE: return record_type_class;
1731 case UNION_TYPE:
1732 case QUAL_UNION_TYPE: return union_type_class;
1733 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1734 ? string_type_class : array_type_class);
1735 case LANG_TYPE: return lang_type_class;
1736 default: return no_type_class;
1737 }
1738 }
1739
1740 /* Expand a call EXP to __builtin_classify_type. */
1741
1742 static rtx
1743 expand_builtin_classify_type (tree exp)
1744 {
1745 if (call_expr_nargs (exp))
1746 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1747 return GEN_INT (no_type_class);
1748 }
1749
1750 /* This helper macro, meant to be used in mathfn_built_in below,
1751 determines which among a set of three builtin math functions is
1752 appropriate for a given type mode. The `F' and `L' cases are
1753 automatically generated from the `double' case. */
1754 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1755 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1756 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1757 fcodel = BUILT_IN_MATHFN##L ; break;
1758 /* Similar to above, but appends _R after any F/L suffix. */
1759 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1760 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1761 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1762 fcodel = BUILT_IN_MATHFN##L_R ; break;
1763
1764 /* Return a function equivalent to FN but operating on floating-point
1765 values of type TYPE, or END_BUILTINS if no such function exists.
1766 This is purely an operation on built-in function codes; it does not
1767 guarantee that the target actually has an implementation of the
1768 function. */
1769
1770 static built_in_function
1771 mathfn_built_in_2 (tree type, built_in_function fn)
1772 {
1773 built_in_function fcode, fcodef, fcodel;
1774
1775 switch (fn)
1776 {
1777 CASE_MATHFN (BUILT_IN_ACOS)
1778 CASE_MATHFN (BUILT_IN_ACOSH)
1779 CASE_MATHFN (BUILT_IN_ASIN)
1780 CASE_MATHFN (BUILT_IN_ASINH)
1781 CASE_MATHFN (BUILT_IN_ATAN)
1782 CASE_MATHFN (BUILT_IN_ATAN2)
1783 CASE_MATHFN (BUILT_IN_ATANH)
1784 CASE_MATHFN (BUILT_IN_CBRT)
1785 CASE_MATHFN (BUILT_IN_CEIL)
1786 CASE_MATHFN (BUILT_IN_CEXPI)
1787 CASE_MATHFN (BUILT_IN_COPYSIGN)
1788 CASE_MATHFN (BUILT_IN_COS)
1789 CASE_MATHFN (BUILT_IN_COSH)
1790 CASE_MATHFN (BUILT_IN_DREM)
1791 CASE_MATHFN (BUILT_IN_ERF)
1792 CASE_MATHFN (BUILT_IN_ERFC)
1793 CASE_MATHFN (BUILT_IN_EXP)
1794 CASE_MATHFN (BUILT_IN_EXP10)
1795 CASE_MATHFN (BUILT_IN_EXP2)
1796 CASE_MATHFN (BUILT_IN_EXPM1)
1797 CASE_MATHFN (BUILT_IN_FABS)
1798 CASE_MATHFN (BUILT_IN_FDIM)
1799 CASE_MATHFN (BUILT_IN_FLOOR)
1800 CASE_MATHFN (BUILT_IN_FMA)
1801 CASE_MATHFN (BUILT_IN_FMAX)
1802 CASE_MATHFN (BUILT_IN_FMIN)
1803 CASE_MATHFN (BUILT_IN_FMOD)
1804 CASE_MATHFN (BUILT_IN_FREXP)
1805 CASE_MATHFN (BUILT_IN_GAMMA)
1806 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1807 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1808 CASE_MATHFN (BUILT_IN_HYPOT)
1809 CASE_MATHFN (BUILT_IN_ILOGB)
1810 CASE_MATHFN (BUILT_IN_ICEIL)
1811 CASE_MATHFN (BUILT_IN_IFLOOR)
1812 CASE_MATHFN (BUILT_IN_INF)
1813 CASE_MATHFN (BUILT_IN_IRINT)
1814 CASE_MATHFN (BUILT_IN_IROUND)
1815 CASE_MATHFN (BUILT_IN_ISINF)
1816 CASE_MATHFN (BUILT_IN_J0)
1817 CASE_MATHFN (BUILT_IN_J1)
1818 CASE_MATHFN (BUILT_IN_JN)
1819 CASE_MATHFN (BUILT_IN_LCEIL)
1820 CASE_MATHFN (BUILT_IN_LDEXP)
1821 CASE_MATHFN (BUILT_IN_LFLOOR)
1822 CASE_MATHFN (BUILT_IN_LGAMMA)
1823 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1824 CASE_MATHFN (BUILT_IN_LLCEIL)
1825 CASE_MATHFN (BUILT_IN_LLFLOOR)
1826 CASE_MATHFN (BUILT_IN_LLRINT)
1827 CASE_MATHFN (BUILT_IN_LLROUND)
1828 CASE_MATHFN (BUILT_IN_LOG)
1829 CASE_MATHFN (BUILT_IN_LOG10)
1830 CASE_MATHFN (BUILT_IN_LOG1P)
1831 CASE_MATHFN (BUILT_IN_LOG2)
1832 CASE_MATHFN (BUILT_IN_LOGB)
1833 CASE_MATHFN (BUILT_IN_LRINT)
1834 CASE_MATHFN (BUILT_IN_LROUND)
1835 CASE_MATHFN (BUILT_IN_MODF)
1836 CASE_MATHFN (BUILT_IN_NAN)
1837 CASE_MATHFN (BUILT_IN_NANS)
1838 CASE_MATHFN (BUILT_IN_NEARBYINT)
1839 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1840 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1841 CASE_MATHFN (BUILT_IN_POW)
1842 CASE_MATHFN (BUILT_IN_POWI)
1843 CASE_MATHFN (BUILT_IN_POW10)
1844 CASE_MATHFN (BUILT_IN_REMAINDER)
1845 CASE_MATHFN (BUILT_IN_REMQUO)
1846 CASE_MATHFN (BUILT_IN_RINT)
1847 CASE_MATHFN (BUILT_IN_ROUND)
1848 CASE_MATHFN (BUILT_IN_SCALB)
1849 CASE_MATHFN (BUILT_IN_SCALBLN)
1850 CASE_MATHFN (BUILT_IN_SCALBN)
1851 CASE_MATHFN (BUILT_IN_SIGNBIT)
1852 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1853 CASE_MATHFN (BUILT_IN_SIN)
1854 CASE_MATHFN (BUILT_IN_SINCOS)
1855 CASE_MATHFN (BUILT_IN_SINH)
1856 CASE_MATHFN (BUILT_IN_SQRT)
1857 CASE_MATHFN (BUILT_IN_TAN)
1858 CASE_MATHFN (BUILT_IN_TANH)
1859 CASE_MATHFN (BUILT_IN_TGAMMA)
1860 CASE_MATHFN (BUILT_IN_TRUNC)
1861 CASE_MATHFN (BUILT_IN_Y0)
1862 CASE_MATHFN (BUILT_IN_Y1)
1863 CASE_MATHFN (BUILT_IN_YN)
1864
1865 default:
1866 return END_BUILTINS;
1867 }
1868
1869 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1870 return fcode;
1871 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1872 return fcodef;
1873 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1874 return fcodel;
1875 else
1876 return END_BUILTINS;
1877 }
1878
1879 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1880 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1881 otherwise use the explicit declaration. If we can't do the conversion,
1882 return null. */
1883
1884 static tree
1885 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1886 {
1887 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1888 if (fcode2 == END_BUILTINS)
1889 return NULL_TREE;
1890
1891 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1892 return NULL_TREE;
1893
1894 return builtin_decl_explicit (fcode2);
1895 }
1896
1897 /* Like mathfn_built_in_1(), but always use the implicit array. */
1898
1899 tree
1900 mathfn_built_in (tree type, enum built_in_function fn)
1901 {
1902 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1903 }
1904
1905 /* If errno must be maintained, expand the RTL to check if the result,
1906 TARGET, of a built-in function call, EXP, is NaN, and if so set
1907 errno to EDOM. */
1908
1909 static void
1910 expand_errno_check (tree exp, rtx target)
1911 {
1912 rtx_code_label *lab = gen_label_rtx ();
1913
1914 /* Test the result; if it is NaN, set errno=EDOM because
1915 the argument was not in the domain. */
1916 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1917 NULL_RTX, NULL, lab,
1918 /* The jump is very likely. */
1919 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1920
1921 #ifdef TARGET_EDOM
1922 /* If this built-in doesn't throw an exception, set errno directly. */
1923 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1924 {
1925 #ifdef GEN_ERRNO_RTX
1926 rtx errno_rtx = GEN_ERRNO_RTX;
1927 #else
1928 rtx errno_rtx
1929 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1930 #endif
1931 emit_move_insn (errno_rtx,
1932 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1933 emit_label (lab);
1934 return;
1935 }
1936 #endif
1937
1938 /* Make sure the library call isn't expanded as a tail call. */
1939 CALL_EXPR_TAILCALL (exp) = 0;
1940
1941 /* We can't set errno=EDOM directly; let the library call do it.
1942 Pop the arguments right away in case the call gets deleted. */
1943 NO_DEFER_POP;
1944 expand_call (exp, target, 0);
1945 OK_DEFER_POP;
1946 emit_label (lab);
1947 }
1948
1949 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1950 Return NULL_RTX if a normal call should be emitted rather than expanding
1951 the function in-line. EXP is the expression that is a call to the builtin
1952 function; if convenient, the result should be placed in TARGET.
1953 SUBTARGET may be used as the target for computing one of EXP's operands. */
1954
1955 static rtx
1956 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1957 {
1958 optab builtin_optab;
1959 rtx op0;
1960 rtx_insn *insns;
1961 tree fndecl = get_callee_fndecl (exp);
1962 machine_mode mode;
1963 bool errno_set = false;
1964 bool try_widening = false;
1965 tree arg;
1966
1967 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1968 return NULL_RTX;
1969
1970 arg = CALL_EXPR_ARG (exp, 0);
1971
1972 switch (DECL_FUNCTION_CODE (fndecl))
1973 {
1974 CASE_FLT_FN (BUILT_IN_SQRT):
1975 errno_set = ! tree_expr_nonnegative_p (arg);
1976 try_widening = true;
1977 builtin_optab = sqrt_optab;
1978 break;
1979 CASE_FLT_FN (BUILT_IN_EXP):
1980 errno_set = true; builtin_optab = exp_optab; break;
1981 CASE_FLT_FN (BUILT_IN_EXP10):
1982 CASE_FLT_FN (BUILT_IN_POW10):
1983 errno_set = true; builtin_optab = exp10_optab; break;
1984 CASE_FLT_FN (BUILT_IN_EXP2):
1985 errno_set = true; builtin_optab = exp2_optab; break;
1986 CASE_FLT_FN (BUILT_IN_EXPM1):
1987 errno_set = true; builtin_optab = expm1_optab; break;
1988 CASE_FLT_FN (BUILT_IN_LOGB):
1989 errno_set = true; builtin_optab = logb_optab; break;
1990 CASE_FLT_FN (BUILT_IN_LOG):
1991 errno_set = true; builtin_optab = log_optab; break;
1992 CASE_FLT_FN (BUILT_IN_LOG10):
1993 errno_set = true; builtin_optab = log10_optab; break;
1994 CASE_FLT_FN (BUILT_IN_LOG2):
1995 errno_set = true; builtin_optab = log2_optab; break;
1996 CASE_FLT_FN (BUILT_IN_LOG1P):
1997 errno_set = true; builtin_optab = log1p_optab; break;
1998 CASE_FLT_FN (BUILT_IN_ASIN):
1999 builtin_optab = asin_optab; break;
2000 CASE_FLT_FN (BUILT_IN_ACOS):
2001 builtin_optab = acos_optab; break;
2002 CASE_FLT_FN (BUILT_IN_TAN):
2003 builtin_optab = tan_optab; break;
2004 CASE_FLT_FN (BUILT_IN_ATAN):
2005 builtin_optab = atan_optab; break;
2006 CASE_FLT_FN (BUILT_IN_FLOOR):
2007 builtin_optab = floor_optab; break;
2008 CASE_FLT_FN (BUILT_IN_CEIL):
2009 builtin_optab = ceil_optab; break;
2010 CASE_FLT_FN (BUILT_IN_TRUNC):
2011 builtin_optab = btrunc_optab; break;
2012 CASE_FLT_FN (BUILT_IN_ROUND):
2013 builtin_optab = round_optab; break;
2014 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2015 builtin_optab = nearbyint_optab;
2016 if (flag_trapping_math)
2017 break;
2018 /* Else fallthrough and expand as rint. */
2019 CASE_FLT_FN (BUILT_IN_RINT):
2020 builtin_optab = rint_optab; break;
2021 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2022 builtin_optab = significand_optab; break;
2023 default:
2024 gcc_unreachable ();
2025 }
2026
2027 /* Make a suitable register to place result in. */
2028 mode = TYPE_MODE (TREE_TYPE (exp));
2029
2030 if (! flag_errno_math || ! HONOR_NANS (mode))
2031 errno_set = false;
2032
2033 /* Before working hard, check whether the instruction is available, but try
2034 to widen the mode for specific operations. */
2035 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2036 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2037 && (!errno_set || !optimize_insn_for_size_p ()))
2038 {
2039 rtx result = gen_reg_rtx (mode);
2040
2041 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2042 need to expand the argument again. This way, we will not perform
2043 side-effects more the once. */
2044 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2045
2046 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2047
2048 start_sequence ();
2049
2050 /* Compute into RESULT.
2051 Set RESULT to wherever the result comes back. */
2052 result = expand_unop (mode, builtin_optab, op0, result, 0);
2053
2054 if (result != 0)
2055 {
2056 if (errno_set)
2057 expand_errno_check (exp, result);
2058
2059 /* Output the entire sequence. */
2060 insns = get_insns ();
2061 end_sequence ();
2062 emit_insn (insns);
2063 return result;
2064 }
2065
2066 /* If we were unable to expand via the builtin, stop the sequence
2067 (without outputting the insns) and call to the library function
2068 with the stabilized argument list. */
2069 end_sequence ();
2070 }
2071
2072 return expand_call (exp, target, target == const0_rtx);
2073 }
2074
2075 /* Expand a call to the builtin binary math functions (pow and atan2).
2076 Return NULL_RTX if a normal call should be emitted rather than expanding the
2077 function in-line. EXP is the expression that is a call to the builtin
2078 function; if convenient, the result should be placed in TARGET.
2079 SUBTARGET may be used as the target for computing one of EXP's
2080 operands. */
2081
2082 static rtx
2083 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2084 {
2085 optab builtin_optab;
2086 rtx op0, op1, result;
2087 rtx_insn *insns;
2088 int op1_type = REAL_TYPE;
2089 tree fndecl = get_callee_fndecl (exp);
2090 tree arg0, arg1;
2091 machine_mode mode;
2092 bool errno_set = true;
2093
2094 switch (DECL_FUNCTION_CODE (fndecl))
2095 {
2096 CASE_FLT_FN (BUILT_IN_SCALBN):
2097 CASE_FLT_FN (BUILT_IN_SCALBLN):
2098 CASE_FLT_FN (BUILT_IN_LDEXP):
2099 op1_type = INTEGER_TYPE;
2100 default:
2101 break;
2102 }
2103
2104 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2105 return NULL_RTX;
2106
2107 arg0 = CALL_EXPR_ARG (exp, 0);
2108 arg1 = CALL_EXPR_ARG (exp, 1);
2109
2110 switch (DECL_FUNCTION_CODE (fndecl))
2111 {
2112 CASE_FLT_FN (BUILT_IN_POW):
2113 builtin_optab = pow_optab; break;
2114 CASE_FLT_FN (BUILT_IN_ATAN2):
2115 builtin_optab = atan2_optab; break;
2116 CASE_FLT_FN (BUILT_IN_SCALB):
2117 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2118 return 0;
2119 builtin_optab = scalb_optab; break;
2120 CASE_FLT_FN (BUILT_IN_SCALBN):
2121 CASE_FLT_FN (BUILT_IN_SCALBLN):
2122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2123 return 0;
2124 /* Fall through... */
2125 CASE_FLT_FN (BUILT_IN_LDEXP):
2126 builtin_optab = ldexp_optab; break;
2127 CASE_FLT_FN (BUILT_IN_FMOD):
2128 builtin_optab = fmod_optab; break;
2129 CASE_FLT_FN (BUILT_IN_REMAINDER):
2130 CASE_FLT_FN (BUILT_IN_DREM):
2131 builtin_optab = remainder_optab; break;
2132 default:
2133 gcc_unreachable ();
2134 }
2135
2136 /* Make a suitable register to place result in. */
2137 mode = TYPE_MODE (TREE_TYPE (exp));
2138
2139 /* Before working hard, check whether the instruction is available. */
2140 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2141 return NULL_RTX;
2142
2143 result = gen_reg_rtx (mode);
2144
2145 if (! flag_errno_math || ! HONOR_NANS (mode))
2146 errno_set = false;
2147
2148 if (errno_set && optimize_insn_for_size_p ())
2149 return 0;
2150
2151 /* Always stabilize the argument list. */
2152 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2153 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2154
2155 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2156 op1 = expand_normal (arg1);
2157
2158 start_sequence ();
2159
2160 /* Compute into RESULT.
2161 Set RESULT to wherever the result comes back. */
2162 result = expand_binop (mode, builtin_optab, op0, op1,
2163 result, 0, OPTAB_DIRECT);
2164
2165 /* If we were unable to expand via the builtin, stop the sequence
2166 (without outputting the insns) and call to the library function
2167 with the stabilized argument list. */
2168 if (result == 0)
2169 {
2170 end_sequence ();
2171 return expand_call (exp, target, target == const0_rtx);
2172 }
2173
2174 if (errno_set)
2175 expand_errno_check (exp, result);
2176
2177 /* Output the entire sequence. */
2178 insns = get_insns ();
2179 end_sequence ();
2180 emit_insn (insns);
2181
2182 return result;
2183 }
2184
2185 /* Expand a call to the builtin trinary math functions (fma).
2186 Return NULL_RTX if a normal call should be emitted rather than expanding the
2187 function in-line. EXP is the expression that is a call to the builtin
2188 function; if convenient, the result should be placed in TARGET.
2189 SUBTARGET may be used as the target for computing one of EXP's
2190 operands. */
2191
2192 static rtx
2193 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2194 {
2195 optab builtin_optab;
2196 rtx op0, op1, op2, result;
2197 rtx_insn *insns;
2198 tree fndecl = get_callee_fndecl (exp);
2199 tree arg0, arg1, arg2;
2200 machine_mode mode;
2201
2202 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2203 return NULL_RTX;
2204
2205 arg0 = CALL_EXPR_ARG (exp, 0);
2206 arg1 = CALL_EXPR_ARG (exp, 1);
2207 arg2 = CALL_EXPR_ARG (exp, 2);
2208
2209 switch (DECL_FUNCTION_CODE (fndecl))
2210 {
2211 CASE_FLT_FN (BUILT_IN_FMA):
2212 builtin_optab = fma_optab; break;
2213 default:
2214 gcc_unreachable ();
2215 }
2216
2217 /* Make a suitable register to place result in. */
2218 mode = TYPE_MODE (TREE_TYPE (exp));
2219
2220 /* Before working hard, check whether the instruction is available. */
2221 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2222 return NULL_RTX;
2223
2224 result = gen_reg_rtx (mode);
2225
2226 /* Always stabilize the argument list. */
2227 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2228 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2229 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2230
2231 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2232 op1 = expand_normal (arg1);
2233 op2 = expand_normal (arg2);
2234
2235 start_sequence ();
2236
2237 /* Compute into RESULT.
2238 Set RESULT to wherever the result comes back. */
2239 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2240 result, 0);
2241
2242 /* If we were unable to expand via the builtin, stop the sequence
2243 (without outputting the insns) and call to the library function
2244 with the stabilized argument list. */
2245 if (result == 0)
2246 {
2247 end_sequence ();
2248 return expand_call (exp, target, target == const0_rtx);
2249 }
2250
2251 /* Output the entire sequence. */
2252 insns = get_insns ();
2253 end_sequence ();
2254 emit_insn (insns);
2255
2256 return result;
2257 }
2258
2259 /* Expand a call to the builtin sin and cos math functions.
2260 Return NULL_RTX if a normal call should be emitted rather than expanding the
2261 function in-line. EXP is the expression that is a call to the builtin
2262 function; if convenient, the result should be placed in TARGET.
2263 SUBTARGET may be used as the target for computing one of EXP's
2264 operands. */
2265
2266 static rtx
2267 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2268 {
2269 optab builtin_optab;
2270 rtx op0;
2271 rtx_insn *insns;
2272 tree fndecl = get_callee_fndecl (exp);
2273 machine_mode mode;
2274 tree arg;
2275
2276 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2277 return NULL_RTX;
2278
2279 arg = CALL_EXPR_ARG (exp, 0);
2280
2281 switch (DECL_FUNCTION_CODE (fndecl))
2282 {
2283 CASE_FLT_FN (BUILT_IN_SIN):
2284 CASE_FLT_FN (BUILT_IN_COS):
2285 builtin_optab = sincos_optab; break;
2286 default:
2287 gcc_unreachable ();
2288 }
2289
2290 /* Make a suitable register to place result in. */
2291 mode = TYPE_MODE (TREE_TYPE (exp));
2292
2293 /* Check if sincos insn is available, otherwise fallback
2294 to sin or cos insn. */
2295 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2296 switch (DECL_FUNCTION_CODE (fndecl))
2297 {
2298 CASE_FLT_FN (BUILT_IN_SIN):
2299 builtin_optab = sin_optab; break;
2300 CASE_FLT_FN (BUILT_IN_COS):
2301 builtin_optab = cos_optab; break;
2302 default:
2303 gcc_unreachable ();
2304 }
2305
2306 /* Before working hard, check whether the instruction is available. */
2307 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2308 {
2309 rtx result = gen_reg_rtx (mode);
2310
2311 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2312 need to expand the argument again. This way, we will not perform
2313 side-effects more the once. */
2314 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2315
2316 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2317
2318 start_sequence ();
2319
2320 /* Compute into RESULT.
2321 Set RESULT to wherever the result comes back. */
2322 if (builtin_optab == sincos_optab)
2323 {
2324 int ok;
2325
2326 switch (DECL_FUNCTION_CODE (fndecl))
2327 {
2328 CASE_FLT_FN (BUILT_IN_SIN):
2329 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2330 break;
2331 CASE_FLT_FN (BUILT_IN_COS):
2332 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2333 break;
2334 default:
2335 gcc_unreachable ();
2336 }
2337 gcc_assert (ok);
2338 }
2339 else
2340 result = expand_unop (mode, builtin_optab, op0, result, 0);
2341
2342 if (result != 0)
2343 {
2344 /* Output the entire sequence. */
2345 insns = get_insns ();
2346 end_sequence ();
2347 emit_insn (insns);
2348 return result;
2349 }
2350
2351 /* If we were unable to expand via the builtin, stop the sequence
2352 (without outputting the insns) and call to the library function
2353 with the stabilized argument list. */
2354 end_sequence ();
2355 }
2356
2357 return expand_call (exp, target, target == const0_rtx);
2358 }
2359
2360 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2361 return an RTL instruction code that implements the functionality.
2362 If that isn't possible or available return CODE_FOR_nothing. */
2363
2364 static enum insn_code
2365 interclass_mathfn_icode (tree arg, tree fndecl)
2366 {
2367 bool errno_set = false;
2368 optab builtin_optab = unknown_optab;
2369 machine_mode mode;
2370
2371 switch (DECL_FUNCTION_CODE (fndecl))
2372 {
2373 CASE_FLT_FN (BUILT_IN_ILOGB):
2374 errno_set = true; builtin_optab = ilogb_optab; break;
2375 CASE_FLT_FN (BUILT_IN_ISINF):
2376 builtin_optab = isinf_optab; break;
2377 case BUILT_IN_ISNORMAL:
2378 case BUILT_IN_ISFINITE:
2379 CASE_FLT_FN (BUILT_IN_FINITE):
2380 case BUILT_IN_FINITED32:
2381 case BUILT_IN_FINITED64:
2382 case BUILT_IN_FINITED128:
2383 case BUILT_IN_ISINFD32:
2384 case BUILT_IN_ISINFD64:
2385 case BUILT_IN_ISINFD128:
2386 /* These builtins have no optabs (yet). */
2387 break;
2388 default:
2389 gcc_unreachable ();
2390 }
2391
2392 /* There's no easy way to detect the case we need to set EDOM. */
2393 if (flag_errno_math && errno_set)
2394 return CODE_FOR_nothing;
2395
2396 /* Optab mode depends on the mode of the input argument. */
2397 mode = TYPE_MODE (TREE_TYPE (arg));
2398
2399 if (builtin_optab)
2400 return optab_handler (builtin_optab, mode);
2401 return CODE_FOR_nothing;
2402 }
2403
2404 /* Expand a call to one of the builtin math functions that operate on
2405 floating point argument and output an integer result (ilogb, isinf,
2406 isnan, etc).
2407 Return 0 if a normal call should be emitted rather than expanding the
2408 function in-line. EXP is the expression that is a call to the builtin
2409 function; if convenient, the result should be placed in TARGET. */
2410
2411 static rtx
2412 expand_builtin_interclass_mathfn (tree exp, rtx target)
2413 {
2414 enum insn_code icode = CODE_FOR_nothing;
2415 rtx op0;
2416 tree fndecl = get_callee_fndecl (exp);
2417 machine_mode mode;
2418 tree arg;
2419
2420 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2421 return NULL_RTX;
2422
2423 arg = CALL_EXPR_ARG (exp, 0);
2424 icode = interclass_mathfn_icode (arg, fndecl);
2425 mode = TYPE_MODE (TREE_TYPE (arg));
2426
2427 if (icode != CODE_FOR_nothing)
2428 {
2429 struct expand_operand ops[1];
2430 rtx_insn *last = get_last_insn ();
2431 tree orig_arg = arg;
2432
2433 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2434 need to expand the argument again. This way, we will not perform
2435 side-effects more the once. */
2436 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2437
2438 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2439
2440 if (mode != GET_MODE (op0))
2441 op0 = convert_to_mode (mode, op0, 0);
2442
2443 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2444 if (maybe_legitimize_operands (icode, 0, 1, ops)
2445 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2446 return ops[0].value;
2447
2448 delete_insns_since (last);
2449 CALL_EXPR_ARG (exp, 0) = orig_arg;
2450 }
2451
2452 return NULL_RTX;
2453 }
2454
2455 /* Expand a call to the builtin sincos math function.
2456 Return NULL_RTX if a normal call should be emitted rather than expanding the
2457 function in-line. EXP is the expression that is a call to the builtin
2458 function. */
2459
2460 static rtx
2461 expand_builtin_sincos (tree exp)
2462 {
2463 rtx op0, op1, op2, target1, target2;
2464 machine_mode mode;
2465 tree arg, sinp, cosp;
2466 int result;
2467 location_t loc = EXPR_LOCATION (exp);
2468 tree alias_type, alias_off;
2469
2470 if (!validate_arglist (exp, REAL_TYPE,
2471 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2472 return NULL_RTX;
2473
2474 arg = CALL_EXPR_ARG (exp, 0);
2475 sinp = CALL_EXPR_ARG (exp, 1);
2476 cosp = CALL_EXPR_ARG (exp, 2);
2477
2478 /* Make a suitable register to place result in. */
2479 mode = TYPE_MODE (TREE_TYPE (arg));
2480
2481 /* Check if sincos insn is available, otherwise emit the call. */
2482 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2483 return NULL_RTX;
2484
2485 target1 = gen_reg_rtx (mode);
2486 target2 = gen_reg_rtx (mode);
2487
2488 op0 = expand_normal (arg);
2489 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2490 alias_off = build_int_cst (alias_type, 0);
2491 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2492 sinp, alias_off));
2493 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2494 cosp, alias_off));
2495
2496 /* Compute into target1 and target2.
2497 Set TARGET to wherever the result comes back. */
2498 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2499 gcc_assert (result);
2500
2501 /* Move target1 and target2 to the memory locations indicated
2502 by op1 and op2. */
2503 emit_move_insn (op1, target1);
2504 emit_move_insn (op2, target2);
2505
2506 return const0_rtx;
2507 }
2508
2509 /* Expand a call to the internal cexpi builtin to the sincos math function.
2510 EXP is the expression that is a call to the builtin function; if convenient,
2511 the result should be placed in TARGET. */
2512
2513 static rtx
2514 expand_builtin_cexpi (tree exp, rtx target)
2515 {
2516 tree fndecl = get_callee_fndecl (exp);
2517 tree arg, type;
2518 machine_mode mode;
2519 rtx op0, op1, op2;
2520 location_t loc = EXPR_LOCATION (exp);
2521
2522 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2523 return NULL_RTX;
2524
2525 arg = CALL_EXPR_ARG (exp, 0);
2526 type = TREE_TYPE (arg);
2527 mode = TYPE_MODE (TREE_TYPE (arg));
2528
2529 /* Try expanding via a sincos optab, fall back to emitting a libcall
2530 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2531 is only generated from sincos, cexp or if we have either of them. */
2532 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2533 {
2534 op1 = gen_reg_rtx (mode);
2535 op2 = gen_reg_rtx (mode);
2536
2537 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2538
2539 /* Compute into op1 and op2. */
2540 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2541 }
2542 else if (targetm.libc_has_function (function_sincos))
2543 {
2544 tree call, fn = NULL_TREE;
2545 tree top1, top2;
2546 rtx op1a, op2a;
2547
2548 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2549 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2550 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2551 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2552 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2553 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2554 else
2555 gcc_unreachable ();
2556
2557 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2558 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2559 op1a = copy_addr_to_reg (XEXP (op1, 0));
2560 op2a = copy_addr_to_reg (XEXP (op2, 0));
2561 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2562 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2563
2564 /* Make sure not to fold the sincos call again. */
2565 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2566 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2567 call, 3, arg, top1, top2));
2568 }
2569 else
2570 {
2571 tree call, fn = NULL_TREE, narg;
2572 tree ctype = build_complex_type (type);
2573
2574 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2575 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2576 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2577 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2578 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2579 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2580 else
2581 gcc_unreachable ();
2582
2583 /* If we don't have a decl for cexp create one. This is the
2584 friendliest fallback if the user calls __builtin_cexpi
2585 without full target C99 function support. */
2586 if (fn == NULL_TREE)
2587 {
2588 tree fntype;
2589 const char *name = NULL;
2590
2591 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2592 name = "cexpf";
2593 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2594 name = "cexp";
2595 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2596 name = "cexpl";
2597
2598 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2599 fn = build_fn_decl (name, fntype);
2600 }
2601
2602 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2603 build_real (type, dconst0), arg);
2604
2605 /* Make sure not to fold the cexp call again. */
2606 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2607 return expand_expr (build_call_nary (ctype, call, 1, narg),
2608 target, VOIDmode, EXPAND_NORMAL);
2609 }
2610
2611 /* Now build the proper return type. */
2612 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2613 make_tree (TREE_TYPE (arg), op2),
2614 make_tree (TREE_TYPE (arg), op1)),
2615 target, VOIDmode, EXPAND_NORMAL);
2616 }
2617
2618 /* Conveniently construct a function call expression. FNDECL names the
2619 function to be called, N is the number of arguments, and the "..."
2620 parameters are the argument expressions. Unlike build_call_exr
2621 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2622
2623 static tree
2624 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2625 {
2626 va_list ap;
2627 tree fntype = TREE_TYPE (fndecl);
2628 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2629
2630 va_start (ap, n);
2631 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2632 va_end (ap);
2633 SET_EXPR_LOCATION (fn, loc);
2634 return fn;
2635 }
2636
2637 /* Expand a call to one of the builtin rounding functions gcc defines
2638 as an extension (lfloor and lceil). As these are gcc extensions we
2639 do not need to worry about setting errno to EDOM.
2640 If expanding via optab fails, lower expression to (int)(floor(x)).
2641 EXP is the expression that is a call to the builtin function;
2642 if convenient, the result should be placed in TARGET. */
2643
2644 static rtx
2645 expand_builtin_int_roundingfn (tree exp, rtx target)
2646 {
2647 convert_optab builtin_optab;
2648 rtx op0, tmp;
2649 rtx_insn *insns;
2650 tree fndecl = get_callee_fndecl (exp);
2651 enum built_in_function fallback_fn;
2652 tree fallback_fndecl;
2653 machine_mode mode;
2654 tree arg;
2655
2656 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2657 gcc_unreachable ();
2658
2659 arg = CALL_EXPR_ARG (exp, 0);
2660
2661 switch (DECL_FUNCTION_CODE (fndecl))
2662 {
2663 CASE_FLT_FN (BUILT_IN_ICEIL):
2664 CASE_FLT_FN (BUILT_IN_LCEIL):
2665 CASE_FLT_FN (BUILT_IN_LLCEIL):
2666 builtin_optab = lceil_optab;
2667 fallback_fn = BUILT_IN_CEIL;
2668 break;
2669
2670 CASE_FLT_FN (BUILT_IN_IFLOOR):
2671 CASE_FLT_FN (BUILT_IN_LFLOOR):
2672 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2673 builtin_optab = lfloor_optab;
2674 fallback_fn = BUILT_IN_FLOOR;
2675 break;
2676
2677 default:
2678 gcc_unreachable ();
2679 }
2680
2681 /* Make a suitable register to place result in. */
2682 mode = TYPE_MODE (TREE_TYPE (exp));
2683
2684 target = gen_reg_rtx (mode);
2685
2686 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2687 need to expand the argument again. This way, we will not perform
2688 side-effects more the once. */
2689 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2690
2691 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2692
2693 start_sequence ();
2694
2695 /* Compute into TARGET. */
2696 if (expand_sfix_optab (target, op0, builtin_optab))
2697 {
2698 /* Output the entire sequence. */
2699 insns = get_insns ();
2700 end_sequence ();
2701 emit_insn (insns);
2702 return target;
2703 }
2704
2705 /* If we were unable to expand via the builtin, stop the sequence
2706 (without outputting the insns). */
2707 end_sequence ();
2708
2709 /* Fall back to floating point rounding optab. */
2710 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2711
2712 /* For non-C99 targets we may end up without a fallback fndecl here
2713 if the user called __builtin_lfloor directly. In this case emit
2714 a call to the floor/ceil variants nevertheless. This should result
2715 in the best user experience for not full C99 targets. */
2716 if (fallback_fndecl == NULL_TREE)
2717 {
2718 tree fntype;
2719 const char *name = NULL;
2720
2721 switch (DECL_FUNCTION_CODE (fndecl))
2722 {
2723 case BUILT_IN_ICEIL:
2724 case BUILT_IN_LCEIL:
2725 case BUILT_IN_LLCEIL:
2726 name = "ceil";
2727 break;
2728 case BUILT_IN_ICEILF:
2729 case BUILT_IN_LCEILF:
2730 case BUILT_IN_LLCEILF:
2731 name = "ceilf";
2732 break;
2733 case BUILT_IN_ICEILL:
2734 case BUILT_IN_LCEILL:
2735 case BUILT_IN_LLCEILL:
2736 name = "ceill";
2737 break;
2738 case BUILT_IN_IFLOOR:
2739 case BUILT_IN_LFLOOR:
2740 case BUILT_IN_LLFLOOR:
2741 name = "floor";
2742 break;
2743 case BUILT_IN_IFLOORF:
2744 case BUILT_IN_LFLOORF:
2745 case BUILT_IN_LLFLOORF:
2746 name = "floorf";
2747 break;
2748 case BUILT_IN_IFLOORL:
2749 case BUILT_IN_LFLOORL:
2750 case BUILT_IN_LLFLOORL:
2751 name = "floorl";
2752 break;
2753 default:
2754 gcc_unreachable ();
2755 }
2756
2757 fntype = build_function_type_list (TREE_TYPE (arg),
2758 TREE_TYPE (arg), NULL_TREE);
2759 fallback_fndecl = build_fn_decl (name, fntype);
2760 }
2761
2762 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2763
2764 tmp = expand_normal (exp);
2765 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2766
2767 /* Truncate the result of floating point optab to integer
2768 via expand_fix (). */
2769 target = gen_reg_rtx (mode);
2770 expand_fix (target, tmp, 0);
2771
2772 return target;
2773 }
2774
2775 /* Expand a call to one of the builtin math functions doing integer
2776 conversion (lrint).
2777 Return 0 if a normal call should be emitted rather than expanding the
2778 function in-line. EXP is the expression that is a call to the builtin
2779 function; if convenient, the result should be placed in TARGET. */
2780
2781 static rtx
2782 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2783 {
2784 convert_optab builtin_optab;
2785 rtx op0;
2786 rtx_insn *insns;
2787 tree fndecl = get_callee_fndecl (exp);
2788 tree arg;
2789 machine_mode mode;
2790 enum built_in_function fallback_fn = BUILT_IN_NONE;
2791
2792 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2793 gcc_unreachable ();
2794
2795 arg = CALL_EXPR_ARG (exp, 0);
2796
2797 switch (DECL_FUNCTION_CODE (fndecl))
2798 {
2799 CASE_FLT_FN (BUILT_IN_IRINT):
2800 fallback_fn = BUILT_IN_LRINT;
2801 /* FALLTHRU */
2802 CASE_FLT_FN (BUILT_IN_LRINT):
2803 CASE_FLT_FN (BUILT_IN_LLRINT):
2804 builtin_optab = lrint_optab;
2805 break;
2806
2807 CASE_FLT_FN (BUILT_IN_IROUND):
2808 fallback_fn = BUILT_IN_LROUND;
2809 /* FALLTHRU */
2810 CASE_FLT_FN (BUILT_IN_LROUND):
2811 CASE_FLT_FN (BUILT_IN_LLROUND):
2812 builtin_optab = lround_optab;
2813 break;
2814
2815 default:
2816 gcc_unreachable ();
2817 }
2818
2819 /* There's no easy way to detect the case we need to set EDOM. */
2820 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2821 return NULL_RTX;
2822
2823 /* Make a suitable register to place result in. */
2824 mode = TYPE_MODE (TREE_TYPE (exp));
2825
2826 /* There's no easy way to detect the case we need to set EDOM. */
2827 if (!flag_errno_math)
2828 {
2829 rtx result = gen_reg_rtx (mode);
2830
2831 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2832 need to expand the argument again. This way, we will not perform
2833 side-effects more the once. */
2834 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2835
2836 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2837
2838 start_sequence ();
2839
2840 if (expand_sfix_optab (result, op0, builtin_optab))
2841 {
2842 /* Output the entire sequence. */
2843 insns = get_insns ();
2844 end_sequence ();
2845 emit_insn (insns);
2846 return result;
2847 }
2848
2849 /* If we were unable to expand via the builtin, stop the sequence
2850 (without outputting the insns) and call to the library function
2851 with the stabilized argument list. */
2852 end_sequence ();
2853 }
2854
2855 if (fallback_fn != BUILT_IN_NONE)
2856 {
2857 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2858 targets, (int) round (x) should never be transformed into
2859 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2860 a call to lround in the hope that the target provides at least some
2861 C99 functions. This should result in the best user experience for
2862 not full C99 targets. */
2863 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2864 fallback_fn, 0);
2865
2866 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2867 fallback_fndecl, 1, arg);
2868
2869 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2870 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2871 return convert_to_mode (mode, target, 0);
2872 }
2873
2874 return expand_call (exp, target, target == const0_rtx);
2875 }
2876
2877 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2878 a normal call should be emitted rather than expanding the function
2879 in-line. EXP is the expression that is a call to the builtin
2880 function; if convenient, the result should be placed in TARGET. */
2881
2882 static rtx
2883 expand_builtin_powi (tree exp, rtx target)
2884 {
2885 tree arg0, arg1;
2886 rtx op0, op1;
2887 machine_mode mode;
2888 machine_mode mode2;
2889
2890 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2891 return NULL_RTX;
2892
2893 arg0 = CALL_EXPR_ARG (exp, 0);
2894 arg1 = CALL_EXPR_ARG (exp, 1);
2895 mode = TYPE_MODE (TREE_TYPE (exp));
2896
2897 /* Emit a libcall to libgcc. */
2898
2899 /* Mode of the 2nd argument must match that of an int. */
2900 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2901
2902 if (target == NULL_RTX)
2903 target = gen_reg_rtx (mode);
2904
2905 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2906 if (GET_MODE (op0) != mode)
2907 op0 = convert_to_mode (mode, op0, 0);
2908 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2909 if (GET_MODE (op1) != mode2)
2910 op1 = convert_to_mode (mode2, op1, 0);
2911
2912 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2913 target, LCT_CONST, mode, 2,
2914 op0, mode, op1, mode2);
2915
2916 return target;
2917 }
2918
2919 /* Expand expression EXP which is a call to the strlen builtin. Return
2920 NULL_RTX if we failed the caller should emit a normal call, otherwise
2921 try to get the result in TARGET, if convenient. */
2922
2923 static rtx
2924 expand_builtin_strlen (tree exp, rtx target,
2925 machine_mode target_mode)
2926 {
2927 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2928 return NULL_RTX;
2929 else
2930 {
2931 struct expand_operand ops[4];
2932 rtx pat;
2933 tree len;
2934 tree src = CALL_EXPR_ARG (exp, 0);
2935 rtx src_reg;
2936 rtx_insn *before_strlen;
2937 machine_mode insn_mode = target_mode;
2938 enum insn_code icode = CODE_FOR_nothing;
2939 unsigned int align;
2940
2941 /* If the length can be computed at compile-time, return it. */
2942 len = c_strlen (src, 0);
2943 if (len)
2944 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2945
2946 /* If the length can be computed at compile-time and is constant
2947 integer, but there are side-effects in src, evaluate
2948 src for side-effects, then return len.
2949 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2950 can be optimized into: i++; x = 3; */
2951 len = c_strlen (src, 1);
2952 if (len && TREE_CODE (len) == INTEGER_CST)
2953 {
2954 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2955 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2956 }
2957
2958 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2959
2960 /* If SRC is not a pointer type, don't do this operation inline. */
2961 if (align == 0)
2962 return NULL_RTX;
2963
2964 /* Bail out if we can't compute strlen in the right mode. */
2965 while (insn_mode != VOIDmode)
2966 {
2967 icode = optab_handler (strlen_optab, insn_mode);
2968 if (icode != CODE_FOR_nothing)
2969 break;
2970
2971 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2972 }
2973 if (insn_mode == VOIDmode)
2974 return NULL_RTX;
2975
2976 /* Make a place to hold the source address. We will not expand
2977 the actual source until we are sure that the expansion will
2978 not fail -- there are trees that cannot be expanded twice. */
2979 src_reg = gen_reg_rtx (Pmode);
2980
2981 /* Mark the beginning of the strlen sequence so we can emit the
2982 source operand later. */
2983 before_strlen = get_last_insn ();
2984
2985 create_output_operand (&ops[0], target, insn_mode);
2986 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2987 create_integer_operand (&ops[2], 0);
2988 create_integer_operand (&ops[3], align);
2989 if (!maybe_expand_insn (icode, 4, ops))
2990 return NULL_RTX;
2991
2992 /* Now that we are assured of success, expand the source. */
2993 start_sequence ();
2994 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2995 if (pat != src_reg)
2996 {
2997 #ifdef POINTERS_EXTEND_UNSIGNED
2998 if (GET_MODE (pat) != Pmode)
2999 pat = convert_to_mode (Pmode, pat,
3000 POINTERS_EXTEND_UNSIGNED);
3001 #endif
3002 emit_move_insn (src_reg, pat);
3003 }
3004 pat = get_insns ();
3005 end_sequence ();
3006
3007 if (before_strlen)
3008 emit_insn_after (pat, before_strlen);
3009 else
3010 emit_insn_before (pat, get_insns ());
3011
3012 /* Return the value in the proper mode for this function. */
3013 if (GET_MODE (ops[0].value) == target_mode)
3014 target = ops[0].value;
3015 else if (target != 0)
3016 convert_move (target, ops[0].value, 0);
3017 else
3018 target = convert_to_mode (target_mode, ops[0].value, 0);
3019
3020 return target;
3021 }
3022 }
3023
3024 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3025 bytes from constant string DATA + OFFSET and return it as target
3026 constant. */
3027
3028 static rtx
3029 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3030 machine_mode mode)
3031 {
3032 const char *str = (const char *) data;
3033
3034 gcc_assert (offset >= 0
3035 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3036 <= strlen (str) + 1));
3037
3038 return c_readstr (str + offset, mode);
3039 }
3040
3041 /* LEN specify length of the block of memcpy/memset operation.
3042 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3043 In some cases we can make very likely guess on max size, then we
3044 set it into PROBABLE_MAX_SIZE. */
3045
3046 static void
3047 determine_block_size (tree len, rtx len_rtx,
3048 unsigned HOST_WIDE_INT *min_size,
3049 unsigned HOST_WIDE_INT *max_size,
3050 unsigned HOST_WIDE_INT *probable_max_size)
3051 {
3052 if (CONST_INT_P (len_rtx))
3053 {
3054 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3055 return;
3056 }
3057 else
3058 {
3059 wide_int min, max;
3060 enum value_range_type range_type = VR_UNDEFINED;
3061
3062 /* Determine bounds from the type. */
3063 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3064 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3065 else
3066 *min_size = 0;
3067 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3068 *probable_max_size = *max_size
3069 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3070 else
3071 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3072
3073 if (TREE_CODE (len) == SSA_NAME)
3074 range_type = get_range_info (len, &min, &max);
3075 if (range_type == VR_RANGE)
3076 {
3077 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3078 *min_size = min.to_uhwi ();
3079 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3080 *probable_max_size = *max_size = max.to_uhwi ();
3081 }
3082 else if (range_type == VR_ANTI_RANGE)
3083 {
3084 /* Anti range 0...N lets us to determine minimal size to N+1. */
3085 if (min == 0)
3086 {
3087 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3088 *min_size = max.to_uhwi () + 1;
3089 }
3090 /* Code like
3091
3092 int n;
3093 if (n < 100)
3094 memcpy (a, b, n)
3095
3096 Produce anti range allowing negative values of N. We still
3097 can use the information and make a guess that N is not negative.
3098 */
3099 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3100 *probable_max_size = min.to_uhwi () - 1;
3101 }
3102 }
3103 gcc_checking_assert (*max_size <=
3104 (unsigned HOST_WIDE_INT)
3105 GET_MODE_MASK (GET_MODE (len_rtx)));
3106 }
3107
3108 /* Helper function to do the actual work for expand_builtin_memcpy. */
3109
3110 static rtx
3111 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3112 {
3113 const char *src_str;
3114 unsigned int src_align = get_pointer_alignment (src);
3115 unsigned int dest_align = get_pointer_alignment (dest);
3116 rtx dest_mem, src_mem, dest_addr, len_rtx;
3117 HOST_WIDE_INT expected_size = -1;
3118 unsigned int expected_align = 0;
3119 unsigned HOST_WIDE_INT min_size;
3120 unsigned HOST_WIDE_INT max_size;
3121 unsigned HOST_WIDE_INT probable_max_size;
3122
3123 /* If DEST is not a pointer type, call the normal function. */
3124 if (dest_align == 0)
3125 return NULL_RTX;
3126
3127 /* If either SRC is not a pointer type, don't do this
3128 operation in-line. */
3129 if (src_align == 0)
3130 return NULL_RTX;
3131
3132 if (currently_expanding_gimple_stmt)
3133 stringop_block_profile (currently_expanding_gimple_stmt,
3134 &expected_align, &expected_size);
3135
3136 if (expected_align < dest_align)
3137 expected_align = dest_align;
3138 dest_mem = get_memory_rtx (dest, len);
3139 set_mem_align (dest_mem, dest_align);
3140 len_rtx = expand_normal (len);
3141 determine_block_size (len, len_rtx, &min_size, &max_size,
3142 &probable_max_size);
3143 src_str = c_getstr (src);
3144
3145 /* If SRC is a string constant and block move would be done
3146 by pieces, we can avoid loading the string from memory
3147 and only stored the computed constants. */
3148 if (src_str
3149 && CONST_INT_P (len_rtx)
3150 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3151 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3152 CONST_CAST (char *, src_str),
3153 dest_align, false))
3154 {
3155 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3156 builtin_memcpy_read_str,
3157 CONST_CAST (char *, src_str),
3158 dest_align, false, 0);
3159 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3160 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3161 return dest_mem;
3162 }
3163
3164 src_mem = get_memory_rtx (src, len);
3165 set_mem_align (src_mem, src_align);
3166
3167 /* Copy word part most expediently. */
3168 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3169 CALL_EXPR_TAILCALL (exp)
3170 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3171 expected_align, expected_size,
3172 min_size, max_size, probable_max_size);
3173
3174 if (dest_addr == 0)
3175 {
3176 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3177 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3178 }
3179
3180 return dest_addr;
3181 }
3182
3183 /* Expand a call EXP to the memcpy builtin.
3184 Return NULL_RTX if we failed, the caller should emit a normal call,
3185 otherwise try to get the result in TARGET, if convenient (and in
3186 mode MODE if that's convenient). */
3187
3188 static rtx
3189 expand_builtin_memcpy (tree exp, rtx target)
3190 {
3191 if (!validate_arglist (exp,
3192 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3193 return NULL_RTX;
3194 else
3195 {
3196 tree dest = CALL_EXPR_ARG (exp, 0);
3197 tree src = CALL_EXPR_ARG (exp, 1);
3198 tree len = CALL_EXPR_ARG (exp, 2);
3199 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3200 }
3201 }
3202
3203 /* Expand an instrumented call EXP to the memcpy builtin.
3204 Return NULL_RTX if we failed, the caller should emit a normal call,
3205 otherwise try to get the result in TARGET, if convenient (and in
3206 mode MODE if that's convenient). */
3207
3208 static rtx
3209 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3210 {
3211 if (!validate_arglist (exp,
3212 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3213 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3214 INTEGER_TYPE, VOID_TYPE))
3215 return NULL_RTX;
3216 else
3217 {
3218 tree dest = CALL_EXPR_ARG (exp, 0);
3219 tree src = CALL_EXPR_ARG (exp, 2);
3220 tree len = CALL_EXPR_ARG (exp, 4);
3221 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3222
3223 /* Return src bounds with the result. */
3224 if (res)
3225 {
3226 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3227 expand_normal (CALL_EXPR_ARG (exp, 1)));
3228 res = chkp_join_splitted_slot (res, bnd);
3229 }
3230 return res;
3231 }
3232 }
3233
3234 /* Expand a call EXP to the mempcpy builtin.
3235 Return NULL_RTX if we failed; the caller should emit a normal call,
3236 otherwise try to get the result in TARGET, if convenient (and in
3237 mode MODE if that's convenient). If ENDP is 0 return the
3238 destination pointer, if ENDP is 1 return the end pointer ala
3239 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3240 stpcpy. */
3241
3242 static rtx
3243 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3244 {
3245 if (!validate_arglist (exp,
3246 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3247 return NULL_RTX;
3248 else
3249 {
3250 tree dest = CALL_EXPR_ARG (exp, 0);
3251 tree src = CALL_EXPR_ARG (exp, 1);
3252 tree len = CALL_EXPR_ARG (exp, 2);
3253 return expand_builtin_mempcpy_args (dest, src, len,
3254 target, mode, /*endp=*/ 1,
3255 exp);
3256 }
3257 }
3258
3259 /* Expand an instrumented call EXP to the mempcpy builtin.
3260 Return NULL_RTX if we failed, the caller should emit a normal call,
3261 otherwise try to get the result in TARGET, if convenient (and in
3262 mode MODE if that's convenient). */
3263
3264 static rtx
3265 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3266 {
3267 if (!validate_arglist (exp,
3268 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3269 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3270 INTEGER_TYPE, VOID_TYPE))
3271 return NULL_RTX;
3272 else
3273 {
3274 tree dest = CALL_EXPR_ARG (exp, 0);
3275 tree src = CALL_EXPR_ARG (exp, 2);
3276 tree len = CALL_EXPR_ARG (exp, 4);
3277 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3278 mode, 1, exp);
3279
3280 /* Return src bounds with the result. */
3281 if (res)
3282 {
3283 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3284 expand_normal (CALL_EXPR_ARG (exp, 1)));
3285 res = chkp_join_splitted_slot (res, bnd);
3286 }
3287 return res;
3288 }
3289 }
3290
3291 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3292 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3293 so that this can also be called without constructing an actual CALL_EXPR.
3294 The other arguments and return value are the same as for
3295 expand_builtin_mempcpy. */
3296
3297 static rtx
3298 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3299 rtx target, machine_mode mode, int endp,
3300 tree orig_exp)
3301 {
3302 tree fndecl = get_callee_fndecl (orig_exp);
3303
3304 /* If return value is ignored, transform mempcpy into memcpy. */
3305 if (target == const0_rtx
3306 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3307 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3308 {
3309 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3310 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3311 dest, src, len);
3312 return expand_expr (result, target, mode, EXPAND_NORMAL);
3313 }
3314 else if (target == const0_rtx
3315 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3316 {
3317 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3318 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3319 dest, src, len);
3320 return expand_expr (result, target, mode, EXPAND_NORMAL);
3321 }
3322 else
3323 {
3324 const char *src_str;
3325 unsigned int src_align = get_pointer_alignment (src);
3326 unsigned int dest_align = get_pointer_alignment (dest);
3327 rtx dest_mem, src_mem, len_rtx;
3328
3329 /* If either SRC or DEST is not a pointer type, don't do this
3330 operation in-line. */
3331 if (dest_align == 0 || src_align == 0)
3332 return NULL_RTX;
3333
3334 /* If LEN is not constant, call the normal function. */
3335 if (! tree_fits_uhwi_p (len))
3336 return NULL_RTX;
3337
3338 len_rtx = expand_normal (len);
3339 src_str = c_getstr (src);
3340
3341 /* If SRC is a string constant and block move would be done
3342 by pieces, we can avoid loading the string from memory
3343 and only stored the computed constants. */
3344 if (src_str
3345 && CONST_INT_P (len_rtx)
3346 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3347 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3348 CONST_CAST (char *, src_str),
3349 dest_align, false))
3350 {
3351 dest_mem = get_memory_rtx (dest, len);
3352 set_mem_align (dest_mem, dest_align);
3353 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3354 builtin_memcpy_read_str,
3355 CONST_CAST (char *, src_str),
3356 dest_align, false, endp);
3357 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3358 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3359 return dest_mem;
3360 }
3361
3362 if (CONST_INT_P (len_rtx)
3363 && can_move_by_pieces (INTVAL (len_rtx),
3364 MIN (dest_align, src_align)))
3365 {
3366 dest_mem = get_memory_rtx (dest, len);
3367 set_mem_align (dest_mem, dest_align);
3368 src_mem = get_memory_rtx (src, len);
3369 set_mem_align (src_mem, src_align);
3370 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3371 MIN (dest_align, src_align), endp);
3372 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3373 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3374 return dest_mem;
3375 }
3376
3377 return NULL_RTX;
3378 }
3379 }
3380
3381 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3382 we failed, the caller should emit a normal call, otherwise try to
3383 get the result in TARGET, if convenient. If ENDP is 0 return the
3384 destination pointer, if ENDP is 1 return the end pointer ala
3385 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3386 stpcpy. */
3387
3388 static rtx
3389 expand_movstr (tree dest, tree src, rtx target, int endp)
3390 {
3391 struct expand_operand ops[3];
3392 rtx dest_mem;
3393 rtx src_mem;
3394
3395 if (!targetm.have_movstr ())
3396 return NULL_RTX;
3397
3398 dest_mem = get_memory_rtx (dest, NULL);
3399 src_mem = get_memory_rtx (src, NULL);
3400 if (!endp)
3401 {
3402 target = force_reg (Pmode, XEXP (dest_mem, 0));
3403 dest_mem = replace_equiv_address (dest_mem, target);
3404 }
3405
3406 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3407 create_fixed_operand (&ops[1], dest_mem);
3408 create_fixed_operand (&ops[2], src_mem);
3409 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3410 return NULL_RTX;
3411
3412 if (endp && target != const0_rtx)
3413 {
3414 target = ops[0].value;
3415 /* movstr is supposed to set end to the address of the NUL
3416 terminator. If the caller requested a mempcpy-like return value,
3417 adjust it. */
3418 if (endp == 1)
3419 {
3420 rtx tem = plus_constant (GET_MODE (target),
3421 gen_lowpart (GET_MODE (target), target), 1);
3422 emit_move_insn (target, force_operand (tem, NULL_RTX));
3423 }
3424 }
3425 return target;
3426 }
3427
3428 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3429 NULL_RTX if we failed the caller should emit a normal call, otherwise
3430 try to get the result in TARGET, if convenient (and in mode MODE if that's
3431 convenient). */
3432
3433 static rtx
3434 expand_builtin_strcpy (tree exp, rtx target)
3435 {
3436 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3437 {
3438 tree dest = CALL_EXPR_ARG (exp, 0);
3439 tree src = CALL_EXPR_ARG (exp, 1);
3440 return expand_builtin_strcpy_args (dest, src, target);
3441 }
3442 return NULL_RTX;
3443 }
3444
3445 /* Helper function to do the actual work for expand_builtin_strcpy. The
3446 arguments to the builtin_strcpy call DEST and SRC are broken out
3447 so that this can also be called without constructing an actual CALL_EXPR.
3448 The other arguments and return value are the same as for
3449 expand_builtin_strcpy. */
3450
3451 static rtx
3452 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3453 {
3454 return expand_movstr (dest, src, target, /*endp=*/0);
3455 }
3456
3457 /* Expand a call EXP to the stpcpy builtin.
3458 Return NULL_RTX if we failed the caller should emit a normal call,
3459 otherwise try to get the result in TARGET, if convenient (and in
3460 mode MODE if that's convenient). */
3461
3462 static rtx
3463 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3464 {
3465 tree dst, src;
3466 location_t loc = EXPR_LOCATION (exp);
3467
3468 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3469 return NULL_RTX;
3470
3471 dst = CALL_EXPR_ARG (exp, 0);
3472 src = CALL_EXPR_ARG (exp, 1);
3473
3474 /* If return value is ignored, transform stpcpy into strcpy. */
3475 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3476 {
3477 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3478 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3479 return expand_expr (result, target, mode, EXPAND_NORMAL);
3480 }
3481 else
3482 {
3483 tree len, lenp1;
3484 rtx ret;
3485
3486 /* Ensure we get an actual string whose length can be evaluated at
3487 compile-time, not an expression containing a string. This is
3488 because the latter will potentially produce pessimized code
3489 when used to produce the return value. */
3490 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3491 return expand_movstr (dst, src, target, /*endp=*/2);
3492
3493 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3494 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3495 target, mode, /*endp=*/2,
3496 exp);
3497
3498 if (ret)
3499 return ret;
3500
3501 if (TREE_CODE (len) == INTEGER_CST)
3502 {
3503 rtx len_rtx = expand_normal (len);
3504
3505 if (CONST_INT_P (len_rtx))
3506 {
3507 ret = expand_builtin_strcpy_args (dst, src, target);
3508
3509 if (ret)
3510 {
3511 if (! target)
3512 {
3513 if (mode != VOIDmode)
3514 target = gen_reg_rtx (mode);
3515 else
3516 target = gen_reg_rtx (GET_MODE (ret));
3517 }
3518 if (GET_MODE (target) != GET_MODE (ret))
3519 ret = gen_lowpart (GET_MODE (target), ret);
3520
3521 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3522 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3523 gcc_assert (ret);
3524
3525 return target;
3526 }
3527 }
3528 }
3529
3530 return expand_movstr (dst, src, target, /*endp=*/2);
3531 }
3532 }
3533
3534 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3535 bytes from constant string DATA + OFFSET and return it as target
3536 constant. */
3537
3538 rtx
3539 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3540 machine_mode mode)
3541 {
3542 const char *str = (const char *) data;
3543
3544 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3545 return const0_rtx;
3546
3547 return c_readstr (str + offset, mode);
3548 }
3549
3550 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3551 NULL_RTX if we failed the caller should emit a normal call. */
3552
3553 static rtx
3554 expand_builtin_strncpy (tree exp, rtx target)
3555 {
3556 location_t loc = EXPR_LOCATION (exp);
3557
3558 if (validate_arglist (exp,
3559 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3560 {
3561 tree dest = CALL_EXPR_ARG (exp, 0);
3562 tree src = CALL_EXPR_ARG (exp, 1);
3563 tree len = CALL_EXPR_ARG (exp, 2);
3564 tree slen = c_strlen (src, 1);
3565
3566 /* We must be passed a constant len and src parameter. */
3567 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3568 return NULL_RTX;
3569
3570 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3571
3572 /* We're required to pad with trailing zeros if the requested
3573 len is greater than strlen(s2)+1. In that case try to
3574 use store_by_pieces, if it fails, punt. */
3575 if (tree_int_cst_lt (slen, len))
3576 {
3577 unsigned int dest_align = get_pointer_alignment (dest);
3578 const char *p = c_getstr (src);
3579 rtx dest_mem;
3580
3581 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3582 || !can_store_by_pieces (tree_to_uhwi (len),
3583 builtin_strncpy_read_str,
3584 CONST_CAST (char *, p),
3585 dest_align, false))
3586 return NULL_RTX;
3587
3588 dest_mem = get_memory_rtx (dest, len);
3589 store_by_pieces (dest_mem, tree_to_uhwi (len),
3590 builtin_strncpy_read_str,
3591 CONST_CAST (char *, p), dest_align, false, 0);
3592 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3593 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3594 return dest_mem;
3595 }
3596 }
3597 return NULL_RTX;
3598 }
3599
3600 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3601 bytes from constant string DATA + OFFSET and return it as target
3602 constant. */
3603
3604 rtx
3605 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3606 machine_mode mode)
3607 {
3608 const char *c = (const char *) data;
3609 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3610
3611 memset (p, *c, GET_MODE_SIZE (mode));
3612
3613 return c_readstr (p, mode);
3614 }
3615
3616 /* Callback routine for store_by_pieces. Return the RTL of a register
3617 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3618 char value given in the RTL register data. For example, if mode is
3619 4 bytes wide, return the RTL for 0x01010101*data. */
3620
3621 static rtx
3622 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3623 machine_mode mode)
3624 {
3625 rtx target, coeff;
3626 size_t size;
3627 char *p;
3628
3629 size = GET_MODE_SIZE (mode);
3630 if (size == 1)
3631 return (rtx) data;
3632
3633 p = XALLOCAVEC (char, size);
3634 memset (p, 1, size);
3635 coeff = c_readstr (p, mode);
3636
3637 target = convert_to_mode (mode, (rtx) data, 1);
3638 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3639 return force_reg (mode, target);
3640 }
3641
3642 /* Expand expression EXP, which is a call to the memset builtin. Return
3643 NULL_RTX if we failed the caller should emit a normal call, otherwise
3644 try to get the result in TARGET, if convenient (and in mode MODE if that's
3645 convenient). */
3646
3647 static rtx
3648 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3649 {
3650 if (!validate_arglist (exp,
3651 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3652 return NULL_RTX;
3653 else
3654 {
3655 tree dest = CALL_EXPR_ARG (exp, 0);
3656 tree val = CALL_EXPR_ARG (exp, 1);
3657 tree len = CALL_EXPR_ARG (exp, 2);
3658 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3659 }
3660 }
3661
3662 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3663 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3664 try to get the result in TARGET, if convenient (and in mode MODE if that's
3665 convenient). */
3666
3667 static rtx
3668 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3669 {
3670 if (!validate_arglist (exp,
3671 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3672 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3673 return NULL_RTX;
3674 else
3675 {
3676 tree dest = CALL_EXPR_ARG (exp, 0);
3677 tree val = CALL_EXPR_ARG (exp, 2);
3678 tree len = CALL_EXPR_ARG (exp, 3);
3679 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3680
3681 /* Return src bounds with the result. */
3682 if (res)
3683 {
3684 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3685 expand_normal (CALL_EXPR_ARG (exp, 1)));
3686 res = chkp_join_splitted_slot (res, bnd);
3687 }
3688 return res;
3689 }
3690 }
3691
3692 /* Helper function to do the actual work for expand_builtin_memset. The
3693 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3694 so that this can also be called without constructing an actual CALL_EXPR.
3695 The other arguments and return value are the same as for
3696 expand_builtin_memset. */
3697
3698 static rtx
3699 expand_builtin_memset_args (tree dest, tree val, tree len,
3700 rtx target, machine_mode mode, tree orig_exp)
3701 {
3702 tree fndecl, fn;
3703 enum built_in_function fcode;
3704 machine_mode val_mode;
3705 char c;
3706 unsigned int dest_align;
3707 rtx dest_mem, dest_addr, len_rtx;
3708 HOST_WIDE_INT expected_size = -1;
3709 unsigned int expected_align = 0;
3710 unsigned HOST_WIDE_INT min_size;
3711 unsigned HOST_WIDE_INT max_size;
3712 unsigned HOST_WIDE_INT probable_max_size;
3713
3714 dest_align = get_pointer_alignment (dest);
3715
3716 /* If DEST is not a pointer type, don't do this operation in-line. */
3717 if (dest_align == 0)
3718 return NULL_RTX;
3719
3720 if (currently_expanding_gimple_stmt)
3721 stringop_block_profile (currently_expanding_gimple_stmt,
3722 &expected_align, &expected_size);
3723
3724 if (expected_align < dest_align)
3725 expected_align = dest_align;
3726
3727 /* If the LEN parameter is zero, return DEST. */
3728 if (integer_zerop (len))
3729 {
3730 /* Evaluate and ignore VAL in case it has side-effects. */
3731 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3732 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3733 }
3734
3735 /* Stabilize the arguments in case we fail. */
3736 dest = builtin_save_expr (dest);
3737 val = builtin_save_expr (val);
3738 len = builtin_save_expr (len);
3739
3740 len_rtx = expand_normal (len);
3741 determine_block_size (len, len_rtx, &min_size, &max_size,
3742 &probable_max_size);
3743 dest_mem = get_memory_rtx (dest, len);
3744 val_mode = TYPE_MODE (unsigned_char_type_node);
3745
3746 if (TREE_CODE (val) != INTEGER_CST)
3747 {
3748 rtx val_rtx;
3749
3750 val_rtx = expand_normal (val);
3751 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3752
3753 /* Assume that we can memset by pieces if we can store
3754 * the coefficients by pieces (in the required modes).
3755 * We can't pass builtin_memset_gen_str as that emits RTL. */
3756 c = 1;
3757 if (tree_fits_uhwi_p (len)
3758 && can_store_by_pieces (tree_to_uhwi (len),
3759 builtin_memset_read_str, &c, dest_align,
3760 true))
3761 {
3762 val_rtx = force_reg (val_mode, val_rtx);
3763 store_by_pieces (dest_mem, tree_to_uhwi (len),
3764 builtin_memset_gen_str, val_rtx, dest_align,
3765 true, 0);
3766 }
3767 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3768 dest_align, expected_align,
3769 expected_size, min_size, max_size,
3770 probable_max_size))
3771 goto do_libcall;
3772
3773 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3774 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3775 return dest_mem;
3776 }
3777
3778 if (target_char_cast (val, &c))
3779 goto do_libcall;
3780
3781 if (c)
3782 {
3783 if (tree_fits_uhwi_p (len)
3784 && can_store_by_pieces (tree_to_uhwi (len),
3785 builtin_memset_read_str, &c, dest_align,
3786 true))
3787 store_by_pieces (dest_mem, tree_to_uhwi (len),
3788 builtin_memset_read_str, &c, dest_align, true, 0);
3789 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3790 gen_int_mode (c, val_mode),
3791 dest_align, expected_align,
3792 expected_size, min_size, max_size,
3793 probable_max_size))
3794 goto do_libcall;
3795
3796 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3797 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3798 return dest_mem;
3799 }
3800
3801 set_mem_align (dest_mem, dest_align);
3802 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3803 CALL_EXPR_TAILCALL (orig_exp)
3804 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3805 expected_align, expected_size,
3806 min_size, max_size,
3807 probable_max_size);
3808
3809 if (dest_addr == 0)
3810 {
3811 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3812 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3813 }
3814
3815 return dest_addr;
3816
3817 do_libcall:
3818 fndecl = get_callee_fndecl (orig_exp);
3819 fcode = DECL_FUNCTION_CODE (fndecl);
3820 if (fcode == BUILT_IN_MEMSET
3821 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3822 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3823 dest, val, len);
3824 else if (fcode == BUILT_IN_BZERO)
3825 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3826 dest, len);
3827 else
3828 gcc_unreachable ();
3829 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3830 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3831 return expand_call (fn, target, target == const0_rtx);
3832 }
3833
3834 /* Expand expression EXP, which is a call to the bzero builtin. Return
3835 NULL_RTX if we failed the caller should emit a normal call. */
3836
3837 static rtx
3838 expand_builtin_bzero (tree exp)
3839 {
3840 tree dest, size;
3841 location_t loc = EXPR_LOCATION (exp);
3842
3843 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3844 return NULL_RTX;
3845
3846 dest = CALL_EXPR_ARG (exp, 0);
3847 size = CALL_EXPR_ARG (exp, 1);
3848
3849 /* New argument list transforming bzero(ptr x, int y) to
3850 memset(ptr x, int 0, size_t y). This is done this way
3851 so that if it isn't expanded inline, we fallback to
3852 calling bzero instead of memset. */
3853
3854 return expand_builtin_memset_args (dest, integer_zero_node,
3855 fold_convert_loc (loc,
3856 size_type_node, size),
3857 const0_rtx, VOIDmode, exp);
3858 }
3859
3860 /* Try to expand cmpstr operation ICODE with the given operands.
3861 Return the result rtx on success, otherwise return null. */
3862
3863 static rtx
3864 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3865 HOST_WIDE_INT align)
3866 {
3867 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3868
3869 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3870 target = NULL_RTX;
3871
3872 struct expand_operand ops[4];
3873 create_output_operand (&ops[0], target, insn_mode);
3874 create_fixed_operand (&ops[1], arg1_rtx);
3875 create_fixed_operand (&ops[2], arg2_rtx);
3876 create_integer_operand (&ops[3], align);
3877 if (maybe_expand_insn (icode, 4, ops))
3878 return ops[0].value;
3879 return NULL_RTX;
3880 }
3881
3882 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3883 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3884 otherwise return null. */
3885
3886 static rtx
3887 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3888 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3889 HOST_WIDE_INT align)
3890 {
3891 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3892
3893 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3894 target = NULL_RTX;
3895
3896 struct expand_operand ops[5];
3897 create_output_operand (&ops[0], target, insn_mode);
3898 create_fixed_operand (&ops[1], arg1_rtx);
3899 create_fixed_operand (&ops[2], arg2_rtx);
3900 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3901 TYPE_UNSIGNED (arg3_type));
3902 create_integer_operand (&ops[4], align);
3903 if (maybe_expand_insn (icode, 5, ops))
3904 return ops[0].value;
3905 return NULL_RTX;
3906 }
3907
3908 /* Expand expression EXP, which is a call to the memcmp built-in function.
3909 Return NULL_RTX if we failed and the caller should emit a normal call,
3910 otherwise try to get the result in TARGET, if convenient. */
3911
3912 static rtx
3913 expand_builtin_memcmp (tree exp, rtx target)
3914 {
3915 if (!validate_arglist (exp,
3916 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3917 return NULL_RTX;
3918
3919 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3920 implementing memcmp because it will stop if it encounters two
3921 zero bytes. */
3922 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3923 if (icode == CODE_FOR_nothing)
3924 return NULL_RTX;
3925
3926 tree arg1 = CALL_EXPR_ARG (exp, 0);
3927 tree arg2 = CALL_EXPR_ARG (exp, 1);
3928 tree len = CALL_EXPR_ARG (exp, 2);
3929
3930 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3931 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3932
3933 /* If we don't have POINTER_TYPE, call the function. */
3934 if (arg1_align == 0 || arg2_align == 0)
3935 return NULL_RTX;
3936
3937 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3938 location_t loc = EXPR_LOCATION (exp);
3939 rtx arg1_rtx = get_memory_rtx (arg1, len);
3940 rtx arg2_rtx = get_memory_rtx (arg2, len);
3941 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3942
3943 /* Set MEM_SIZE as appropriate. */
3944 if (CONST_INT_P (arg3_rtx))
3945 {
3946 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3947 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3948 }
3949
3950 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3951 TREE_TYPE (len), arg3_rtx,
3952 MIN (arg1_align, arg2_align));
3953 if (result)
3954 {
3955 /* Return the value in the proper mode for this function. */
3956 if (GET_MODE (result) == mode)
3957 return result;
3958
3959 if (target != 0)
3960 {
3961 convert_move (target, result, 0);
3962 return target;
3963 }
3964
3965 return convert_to_mode (mode, result, 0);
3966 }
3967
3968 result = target;
3969 if (! (result != 0
3970 && REG_P (result) && GET_MODE (result) == mode
3971 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3972 result = gen_reg_rtx (mode);
3973
3974 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3975 TYPE_MODE (integer_type_node), 3,
3976 XEXP (arg1_rtx, 0), Pmode,
3977 XEXP (arg2_rtx, 0), Pmode,
3978 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3979 TYPE_UNSIGNED (sizetype)),
3980 TYPE_MODE (sizetype));
3981 return result;
3982 }
3983
3984 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3985 if we failed the caller should emit a normal call, otherwise try to get
3986 the result in TARGET, if convenient. */
3987
3988 static rtx
3989 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3990 {
3991 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3992 return NULL_RTX;
3993
3994 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3995 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3996 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3997 {
3998 rtx arg1_rtx, arg2_rtx;
3999 tree fndecl, fn;
4000 tree arg1 = CALL_EXPR_ARG (exp, 0);
4001 tree arg2 = CALL_EXPR_ARG (exp, 1);
4002 rtx result = NULL_RTX;
4003
4004 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4005 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4006
4007 /* If we don't have POINTER_TYPE, call the function. */
4008 if (arg1_align == 0 || arg2_align == 0)
4009 return NULL_RTX;
4010
4011 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4012 arg1 = builtin_save_expr (arg1);
4013 arg2 = builtin_save_expr (arg2);
4014
4015 arg1_rtx = get_memory_rtx (arg1, NULL);
4016 arg2_rtx = get_memory_rtx (arg2, NULL);
4017
4018 /* Try to call cmpstrsi. */
4019 if (cmpstr_icode != CODE_FOR_nothing)
4020 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4021 MIN (arg1_align, arg2_align));
4022
4023 /* Try to determine at least one length and call cmpstrnsi. */
4024 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4025 {
4026 tree len;
4027 rtx arg3_rtx;
4028
4029 tree len1 = c_strlen (arg1, 1);
4030 tree len2 = c_strlen (arg2, 1);
4031
4032 if (len1)
4033 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4034 if (len2)
4035 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4036
4037 /* If we don't have a constant length for the first, use the length
4038 of the second, if we know it. We don't require a constant for
4039 this case; some cost analysis could be done if both are available
4040 but neither is constant. For now, assume they're equally cheap,
4041 unless one has side effects. If both strings have constant lengths,
4042 use the smaller. */
4043
4044 if (!len1)
4045 len = len2;
4046 else if (!len2)
4047 len = len1;
4048 else if (TREE_SIDE_EFFECTS (len1))
4049 len = len2;
4050 else if (TREE_SIDE_EFFECTS (len2))
4051 len = len1;
4052 else if (TREE_CODE (len1) != INTEGER_CST)
4053 len = len2;
4054 else if (TREE_CODE (len2) != INTEGER_CST)
4055 len = len1;
4056 else if (tree_int_cst_lt (len1, len2))
4057 len = len1;
4058 else
4059 len = len2;
4060
4061 /* If both arguments have side effects, we cannot optimize. */
4062 if (len && !TREE_SIDE_EFFECTS (len))
4063 {
4064 arg3_rtx = expand_normal (len);
4065 result = expand_cmpstrn_or_cmpmem
4066 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4067 arg3_rtx, MIN (arg1_align, arg2_align));
4068 }
4069 }
4070
4071 if (result)
4072 {
4073 /* Return the value in the proper mode for this function. */
4074 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4075 if (GET_MODE (result) == mode)
4076 return result;
4077 if (target == 0)
4078 return convert_to_mode (mode, result, 0);
4079 convert_move (target, result, 0);
4080 return target;
4081 }
4082
4083 /* Expand the library call ourselves using a stabilized argument
4084 list to avoid re-evaluating the function's arguments twice. */
4085 fndecl = get_callee_fndecl (exp);
4086 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4087 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4088 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4089 return expand_call (fn, target, target == const0_rtx);
4090 }
4091 return NULL_RTX;
4092 }
4093
4094 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4095 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4096 the result in TARGET, if convenient. */
4097
4098 static rtx
4099 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4100 ATTRIBUTE_UNUSED machine_mode mode)
4101 {
4102 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4103
4104 if (!validate_arglist (exp,
4105 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4106 return NULL_RTX;
4107
4108 /* If c_strlen can determine an expression for one of the string
4109 lengths, and it doesn't have side effects, then emit cmpstrnsi
4110 using length MIN(strlen(string)+1, arg3). */
4111 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4112 if (cmpstrn_icode != CODE_FOR_nothing)
4113 {
4114 tree len, len1, len2;
4115 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4116 rtx result;
4117 tree fndecl, fn;
4118 tree arg1 = CALL_EXPR_ARG (exp, 0);
4119 tree arg2 = CALL_EXPR_ARG (exp, 1);
4120 tree arg3 = CALL_EXPR_ARG (exp, 2);
4121
4122 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4123 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4124
4125 len1 = c_strlen (arg1, 1);
4126 len2 = c_strlen (arg2, 1);
4127
4128 if (len1)
4129 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4130 if (len2)
4131 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4132
4133 /* If we don't have a constant length for the first, use the length
4134 of the second, if we know it. We don't require a constant for
4135 this case; some cost analysis could be done if both are available
4136 but neither is constant. For now, assume they're equally cheap,
4137 unless one has side effects. If both strings have constant lengths,
4138 use the smaller. */
4139
4140 if (!len1)
4141 len = len2;
4142 else if (!len2)
4143 len = len1;
4144 else if (TREE_SIDE_EFFECTS (len1))
4145 len = len2;
4146 else if (TREE_SIDE_EFFECTS (len2))
4147 len = len1;
4148 else if (TREE_CODE (len1) != INTEGER_CST)
4149 len = len2;
4150 else if (TREE_CODE (len2) != INTEGER_CST)
4151 len = len1;
4152 else if (tree_int_cst_lt (len1, len2))
4153 len = len1;
4154 else
4155 len = len2;
4156
4157 /* If both arguments have side effects, we cannot optimize. */
4158 if (!len || TREE_SIDE_EFFECTS (len))
4159 return NULL_RTX;
4160
4161 /* The actual new length parameter is MIN(len,arg3). */
4162 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4163 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4164
4165 /* If we don't have POINTER_TYPE, call the function. */
4166 if (arg1_align == 0 || arg2_align == 0)
4167 return NULL_RTX;
4168
4169 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4170 arg1 = builtin_save_expr (arg1);
4171 arg2 = builtin_save_expr (arg2);
4172 len = builtin_save_expr (len);
4173
4174 arg1_rtx = get_memory_rtx (arg1, len);
4175 arg2_rtx = get_memory_rtx (arg2, len);
4176 arg3_rtx = expand_normal (len);
4177 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4178 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4179 MIN (arg1_align, arg2_align));
4180 if (result)
4181 {
4182 /* Return the value in the proper mode for this function. */
4183 mode = TYPE_MODE (TREE_TYPE (exp));
4184 if (GET_MODE (result) == mode)
4185 return result;
4186 if (target == 0)
4187 return convert_to_mode (mode, result, 0);
4188 convert_move (target, result, 0);
4189 return target;
4190 }
4191
4192 /* Expand the library call ourselves using a stabilized argument
4193 list to avoid re-evaluating the function's arguments twice. */
4194 fndecl = get_callee_fndecl (exp);
4195 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4196 arg1, arg2, len);
4197 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4198 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4199 return expand_call (fn, target, target == const0_rtx);
4200 }
4201 return NULL_RTX;
4202 }
4203
4204 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4205 if that's convenient. */
4206
4207 rtx
4208 expand_builtin_saveregs (void)
4209 {
4210 rtx val;
4211 rtx_insn *seq;
4212
4213 /* Don't do __builtin_saveregs more than once in a function.
4214 Save the result of the first call and reuse it. */
4215 if (saveregs_value != 0)
4216 return saveregs_value;
4217
4218 /* When this function is called, it means that registers must be
4219 saved on entry to this function. So we migrate the call to the
4220 first insn of this function. */
4221
4222 start_sequence ();
4223
4224 /* Do whatever the machine needs done in this case. */
4225 val = targetm.calls.expand_builtin_saveregs ();
4226
4227 seq = get_insns ();
4228 end_sequence ();
4229
4230 saveregs_value = val;
4231
4232 /* Put the insns after the NOTE that starts the function. If this
4233 is inside a start_sequence, make the outer-level insn chain current, so
4234 the code is placed at the start of the function. */
4235 push_topmost_sequence ();
4236 emit_insn_after (seq, entry_of_function ());
4237 pop_topmost_sequence ();
4238
4239 return val;
4240 }
4241
4242 /* Expand a call to __builtin_next_arg. */
4243
4244 static rtx
4245 expand_builtin_next_arg (void)
4246 {
4247 /* Checking arguments is already done in fold_builtin_next_arg
4248 that must be called before this function. */
4249 return expand_binop (ptr_mode, add_optab,
4250 crtl->args.internal_arg_pointer,
4251 crtl->args.arg_offset_rtx,
4252 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4253 }
4254
4255 /* Make it easier for the backends by protecting the valist argument
4256 from multiple evaluations. */
4257
4258 static tree
4259 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4260 {
4261 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4262
4263 /* The current way of determining the type of valist is completely
4264 bogus. We should have the information on the va builtin instead. */
4265 if (!vatype)
4266 vatype = targetm.fn_abi_va_list (cfun->decl);
4267
4268 if (TREE_CODE (vatype) == ARRAY_TYPE)
4269 {
4270 if (TREE_SIDE_EFFECTS (valist))
4271 valist = save_expr (valist);
4272
4273 /* For this case, the backends will be expecting a pointer to
4274 vatype, but it's possible we've actually been given an array
4275 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4276 So fix it. */
4277 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4278 {
4279 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4280 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4281 }
4282 }
4283 else
4284 {
4285 tree pt = build_pointer_type (vatype);
4286
4287 if (! needs_lvalue)
4288 {
4289 if (! TREE_SIDE_EFFECTS (valist))
4290 return valist;
4291
4292 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4293 TREE_SIDE_EFFECTS (valist) = 1;
4294 }
4295
4296 if (TREE_SIDE_EFFECTS (valist))
4297 valist = save_expr (valist);
4298 valist = fold_build2_loc (loc, MEM_REF,
4299 vatype, valist, build_int_cst (pt, 0));
4300 }
4301
4302 return valist;
4303 }
4304
4305 /* The "standard" definition of va_list is void*. */
4306
4307 tree
4308 std_build_builtin_va_list (void)
4309 {
4310 return ptr_type_node;
4311 }
4312
4313 /* The "standard" abi va_list is va_list_type_node. */
4314
4315 tree
4316 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4317 {
4318 return va_list_type_node;
4319 }
4320
4321 /* The "standard" type of va_list is va_list_type_node. */
4322
4323 tree
4324 std_canonical_va_list_type (tree type)
4325 {
4326 tree wtype, htype;
4327
4328 if (INDIRECT_REF_P (type))
4329 type = TREE_TYPE (type);
4330 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4331 type = TREE_TYPE (type);
4332 wtype = va_list_type_node;
4333 htype = type;
4334 /* Treat structure va_list types. */
4335 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4336 htype = TREE_TYPE (htype);
4337 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4338 {
4339 /* If va_list is an array type, the argument may have decayed
4340 to a pointer type, e.g. by being passed to another function.
4341 In that case, unwrap both types so that we can compare the
4342 underlying records. */
4343 if (TREE_CODE (htype) == ARRAY_TYPE
4344 || POINTER_TYPE_P (htype))
4345 {
4346 wtype = TREE_TYPE (wtype);
4347 htype = TREE_TYPE (htype);
4348 }
4349 }
4350 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4351 return va_list_type_node;
4352
4353 return NULL_TREE;
4354 }
4355
4356 /* The "standard" implementation of va_start: just assign `nextarg' to
4357 the variable. */
4358
4359 void
4360 std_expand_builtin_va_start (tree valist, rtx nextarg)
4361 {
4362 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4363 convert_move (va_r, nextarg, 0);
4364
4365 /* We do not have any valid bounds for the pointer, so
4366 just store zero bounds for it. */
4367 if (chkp_function_instrumented_p (current_function_decl))
4368 chkp_expand_bounds_reset_for_mem (valist,
4369 make_tree (TREE_TYPE (valist),
4370 nextarg));
4371 }
4372
4373 /* Expand EXP, a call to __builtin_va_start. */
4374
4375 static rtx
4376 expand_builtin_va_start (tree exp)
4377 {
4378 rtx nextarg;
4379 tree valist;
4380 location_t loc = EXPR_LOCATION (exp);
4381
4382 if (call_expr_nargs (exp) < 2)
4383 {
4384 error_at (loc, "too few arguments to function %<va_start%>");
4385 return const0_rtx;
4386 }
4387
4388 if (fold_builtin_next_arg (exp, true))
4389 return const0_rtx;
4390
4391 nextarg = expand_builtin_next_arg ();
4392 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4393
4394 if (targetm.expand_builtin_va_start)
4395 targetm.expand_builtin_va_start (valist, nextarg);
4396 else
4397 std_expand_builtin_va_start (valist, nextarg);
4398
4399 return const0_rtx;
4400 }
4401
4402 /* Expand EXP, a call to __builtin_va_end. */
4403
4404 static rtx
4405 expand_builtin_va_end (tree exp)
4406 {
4407 tree valist = CALL_EXPR_ARG (exp, 0);
4408
4409 /* Evaluate for side effects, if needed. I hate macros that don't
4410 do that. */
4411 if (TREE_SIDE_EFFECTS (valist))
4412 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4413
4414 return const0_rtx;
4415 }
4416
4417 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4418 builtin rather than just as an assignment in stdarg.h because of the
4419 nastiness of array-type va_list types. */
4420
4421 static rtx
4422 expand_builtin_va_copy (tree exp)
4423 {
4424 tree dst, src, t;
4425 location_t loc = EXPR_LOCATION (exp);
4426
4427 dst = CALL_EXPR_ARG (exp, 0);
4428 src = CALL_EXPR_ARG (exp, 1);
4429
4430 dst = stabilize_va_list_loc (loc, dst, 1);
4431 src = stabilize_va_list_loc (loc, src, 0);
4432
4433 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4434
4435 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4436 {
4437 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4438 TREE_SIDE_EFFECTS (t) = 1;
4439 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4440 }
4441 else
4442 {
4443 rtx dstb, srcb, size;
4444
4445 /* Evaluate to pointers. */
4446 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4447 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4448 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4449 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4450
4451 dstb = convert_memory_address (Pmode, dstb);
4452 srcb = convert_memory_address (Pmode, srcb);
4453
4454 /* "Dereference" to BLKmode memories. */
4455 dstb = gen_rtx_MEM (BLKmode, dstb);
4456 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4457 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4458 srcb = gen_rtx_MEM (BLKmode, srcb);
4459 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4460 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4461
4462 /* Copy. */
4463 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4464 }
4465
4466 return const0_rtx;
4467 }
4468
4469 /* Expand a call to one of the builtin functions __builtin_frame_address or
4470 __builtin_return_address. */
4471
4472 static rtx
4473 expand_builtin_frame_address (tree fndecl, tree exp)
4474 {
4475 /* The argument must be a nonnegative integer constant.
4476 It counts the number of frames to scan up the stack.
4477 The value is either the frame pointer value or the return
4478 address saved in that frame. */
4479 if (call_expr_nargs (exp) == 0)
4480 /* Warning about missing arg was already issued. */
4481 return const0_rtx;
4482 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4483 {
4484 error ("invalid argument to %qD", fndecl);
4485 return const0_rtx;
4486 }
4487 else
4488 {
4489 /* Number of frames to scan up the stack. */
4490 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4491
4492 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4493
4494 /* Some ports cannot access arbitrary stack frames. */
4495 if (tem == NULL)
4496 {
4497 warning (0, "unsupported argument to %qD", fndecl);
4498 return const0_rtx;
4499 }
4500
4501 if (count)
4502 {
4503 /* Warn since no effort is made to ensure that any frame
4504 beyond the current one exists or can be safely reached. */
4505 warning (OPT_Wframe_address, "calling %qD with "
4506 "a nonzero argument is unsafe", fndecl);
4507 }
4508
4509 /* For __builtin_frame_address, return what we've got. */
4510 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4511 return tem;
4512
4513 if (!REG_P (tem)
4514 && ! CONSTANT_P (tem))
4515 tem = copy_addr_to_reg (tem);
4516 return tem;
4517 }
4518 }
4519
4520 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4521 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4522 is the same as for allocate_dynamic_stack_space. */
4523
4524 static rtx
4525 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4526 {
4527 rtx op0;
4528 rtx result;
4529 bool valid_arglist;
4530 unsigned int align;
4531 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4532 == BUILT_IN_ALLOCA_WITH_ALIGN);
4533
4534 valid_arglist
4535 = (alloca_with_align
4536 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4537 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4538
4539 if (!valid_arglist)
4540 return NULL_RTX;
4541
4542 /* Compute the argument. */
4543 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4544
4545 /* Compute the alignment. */
4546 align = (alloca_with_align
4547 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4548 : BIGGEST_ALIGNMENT);
4549
4550 /* Allocate the desired space. */
4551 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4552 result = convert_memory_address (ptr_mode, result);
4553
4554 return result;
4555 }
4556
4557 /* Expand a call to bswap builtin in EXP.
4558 Return NULL_RTX if a normal call should be emitted rather than expanding the
4559 function in-line. If convenient, the result should be placed in TARGET.
4560 SUBTARGET may be used as the target for computing one of EXP's operands. */
4561
4562 static rtx
4563 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4564 rtx subtarget)
4565 {
4566 tree arg;
4567 rtx op0;
4568
4569 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4570 return NULL_RTX;
4571
4572 arg = CALL_EXPR_ARG (exp, 0);
4573 op0 = expand_expr (arg,
4574 subtarget && GET_MODE (subtarget) == target_mode
4575 ? subtarget : NULL_RTX,
4576 target_mode, EXPAND_NORMAL);
4577 if (GET_MODE (op0) != target_mode)
4578 op0 = convert_to_mode (target_mode, op0, 1);
4579
4580 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4581
4582 gcc_assert (target);
4583
4584 return convert_to_mode (target_mode, target, 1);
4585 }
4586
4587 /* Expand a call to a unary builtin in EXP.
4588 Return NULL_RTX if a normal call should be emitted rather than expanding the
4589 function in-line. If convenient, the result should be placed in TARGET.
4590 SUBTARGET may be used as the target for computing one of EXP's operands. */
4591
4592 static rtx
4593 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4594 rtx subtarget, optab op_optab)
4595 {
4596 rtx op0;
4597
4598 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4599 return NULL_RTX;
4600
4601 /* Compute the argument. */
4602 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4603 (subtarget
4604 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4605 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4606 VOIDmode, EXPAND_NORMAL);
4607 /* Compute op, into TARGET if possible.
4608 Set TARGET to wherever the result comes back. */
4609 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4610 op_optab, op0, target, op_optab != clrsb_optab);
4611 gcc_assert (target);
4612
4613 return convert_to_mode (target_mode, target, 0);
4614 }
4615
4616 /* Expand a call to __builtin_expect. We just return our argument
4617 as the builtin_expect semantic should've been already executed by
4618 tree branch prediction pass. */
4619
4620 static rtx
4621 expand_builtin_expect (tree exp, rtx target)
4622 {
4623 tree arg;
4624
4625 if (call_expr_nargs (exp) < 2)
4626 return const0_rtx;
4627 arg = CALL_EXPR_ARG (exp, 0);
4628
4629 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4630 /* When guessing was done, the hints should be already stripped away. */
4631 gcc_assert (!flag_guess_branch_prob
4632 || optimize == 0 || seen_error ());
4633 return target;
4634 }
4635
4636 /* Expand a call to __builtin_assume_aligned. We just return our first
4637 argument as the builtin_assume_aligned semantic should've been already
4638 executed by CCP. */
4639
4640 static rtx
4641 expand_builtin_assume_aligned (tree exp, rtx target)
4642 {
4643 if (call_expr_nargs (exp) < 2)
4644 return const0_rtx;
4645 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4646 EXPAND_NORMAL);
4647 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4648 && (call_expr_nargs (exp) < 3
4649 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4650 return target;
4651 }
4652
4653 void
4654 expand_builtin_trap (void)
4655 {
4656 if (targetm.have_trap ())
4657 {
4658 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4659 /* For trap insns when not accumulating outgoing args force
4660 REG_ARGS_SIZE note to prevent crossjumping of calls with
4661 different args sizes. */
4662 if (!ACCUMULATE_OUTGOING_ARGS)
4663 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4664 }
4665 else
4666 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4667 emit_barrier ();
4668 }
4669
4670 /* Expand a call to __builtin_unreachable. We do nothing except emit
4671 a barrier saying that control flow will not pass here.
4672
4673 It is the responsibility of the program being compiled to ensure
4674 that control flow does never reach __builtin_unreachable. */
4675 static void
4676 expand_builtin_unreachable (void)
4677 {
4678 emit_barrier ();
4679 }
4680
4681 /* Expand EXP, a call to fabs, fabsf or fabsl.
4682 Return NULL_RTX if a normal call should be emitted rather than expanding
4683 the function inline. If convenient, the result should be placed
4684 in TARGET. SUBTARGET may be used as the target for computing
4685 the operand. */
4686
4687 static rtx
4688 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4689 {
4690 machine_mode mode;
4691 tree arg;
4692 rtx op0;
4693
4694 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4695 return NULL_RTX;
4696
4697 arg = CALL_EXPR_ARG (exp, 0);
4698 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4699 mode = TYPE_MODE (TREE_TYPE (arg));
4700 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4701 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4702 }
4703
4704 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4705 Return NULL is a normal call should be emitted rather than expanding the
4706 function inline. If convenient, the result should be placed in TARGET.
4707 SUBTARGET may be used as the target for computing the operand. */
4708
4709 static rtx
4710 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4711 {
4712 rtx op0, op1;
4713 tree arg;
4714
4715 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4716 return NULL_RTX;
4717
4718 arg = CALL_EXPR_ARG (exp, 0);
4719 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4720
4721 arg = CALL_EXPR_ARG (exp, 1);
4722 op1 = expand_normal (arg);
4723
4724 return expand_copysign (op0, op1, target);
4725 }
4726
4727 /* Expand a call to __builtin___clear_cache. */
4728
4729 static rtx
4730 expand_builtin___clear_cache (tree exp)
4731 {
4732 if (!targetm.code_for_clear_cache)
4733 {
4734 #ifdef CLEAR_INSN_CACHE
4735 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4736 does something. Just do the default expansion to a call to
4737 __clear_cache(). */
4738 return NULL_RTX;
4739 #else
4740 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4741 does nothing. There is no need to call it. Do nothing. */
4742 return const0_rtx;
4743 #endif /* CLEAR_INSN_CACHE */
4744 }
4745
4746 /* We have a "clear_cache" insn, and it will handle everything. */
4747 tree begin, end;
4748 rtx begin_rtx, end_rtx;
4749
4750 /* We must not expand to a library call. If we did, any
4751 fallback library function in libgcc that might contain a call to
4752 __builtin___clear_cache() would recurse infinitely. */
4753 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4754 {
4755 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4756 return const0_rtx;
4757 }
4758
4759 if (targetm.have_clear_cache ())
4760 {
4761 struct expand_operand ops[2];
4762
4763 begin = CALL_EXPR_ARG (exp, 0);
4764 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4765
4766 end = CALL_EXPR_ARG (exp, 1);
4767 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4768
4769 create_address_operand (&ops[0], begin_rtx);
4770 create_address_operand (&ops[1], end_rtx);
4771 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4772 return const0_rtx;
4773 }
4774 return const0_rtx;
4775 }
4776
4777 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4778
4779 static rtx
4780 round_trampoline_addr (rtx tramp)
4781 {
4782 rtx temp, addend, mask;
4783
4784 /* If we don't need too much alignment, we'll have been guaranteed
4785 proper alignment by get_trampoline_type. */
4786 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4787 return tramp;
4788
4789 /* Round address up to desired boundary. */
4790 temp = gen_reg_rtx (Pmode);
4791 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4792 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4793
4794 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4795 temp, 0, OPTAB_LIB_WIDEN);
4796 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4797 temp, 0, OPTAB_LIB_WIDEN);
4798
4799 return tramp;
4800 }
4801
4802 static rtx
4803 expand_builtin_init_trampoline (tree exp, bool onstack)
4804 {
4805 tree t_tramp, t_func, t_chain;
4806 rtx m_tramp, r_tramp, r_chain, tmp;
4807
4808 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4809 POINTER_TYPE, VOID_TYPE))
4810 return NULL_RTX;
4811
4812 t_tramp = CALL_EXPR_ARG (exp, 0);
4813 t_func = CALL_EXPR_ARG (exp, 1);
4814 t_chain = CALL_EXPR_ARG (exp, 2);
4815
4816 r_tramp = expand_normal (t_tramp);
4817 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4818 MEM_NOTRAP_P (m_tramp) = 1;
4819
4820 /* If ONSTACK, the TRAMP argument should be the address of a field
4821 within the local function's FRAME decl. Either way, let's see if
4822 we can fill in the MEM_ATTRs for this memory. */
4823 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4824 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4825
4826 /* Creator of a heap trampoline is responsible for making sure the
4827 address is aligned to at least STACK_BOUNDARY. Normally malloc
4828 will ensure this anyhow. */
4829 tmp = round_trampoline_addr (r_tramp);
4830 if (tmp != r_tramp)
4831 {
4832 m_tramp = change_address (m_tramp, BLKmode, tmp);
4833 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4834 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4835 }
4836
4837 /* The FUNC argument should be the address of the nested function.
4838 Extract the actual function decl to pass to the hook. */
4839 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4840 t_func = TREE_OPERAND (t_func, 0);
4841 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4842
4843 r_chain = expand_normal (t_chain);
4844
4845 /* Generate insns to initialize the trampoline. */
4846 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4847
4848 if (onstack)
4849 {
4850 trampolines_created = 1;
4851
4852 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4853 "trampoline generated for nested function %qD", t_func);
4854 }
4855
4856 return const0_rtx;
4857 }
4858
4859 static rtx
4860 expand_builtin_adjust_trampoline (tree exp)
4861 {
4862 rtx tramp;
4863
4864 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4865 return NULL_RTX;
4866
4867 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4868 tramp = round_trampoline_addr (tramp);
4869 if (targetm.calls.trampoline_adjust_address)
4870 tramp = targetm.calls.trampoline_adjust_address (tramp);
4871
4872 return tramp;
4873 }
4874
4875 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4876 function. The function first checks whether the back end provides
4877 an insn to implement signbit for the respective mode. If not, it
4878 checks whether the floating point format of the value is such that
4879 the sign bit can be extracted. If that is not the case, error out.
4880 EXP is the expression that is a call to the builtin function; if
4881 convenient, the result should be placed in TARGET. */
4882 static rtx
4883 expand_builtin_signbit (tree exp, rtx target)
4884 {
4885 const struct real_format *fmt;
4886 machine_mode fmode, imode, rmode;
4887 tree arg;
4888 int word, bitpos;
4889 enum insn_code icode;
4890 rtx temp;
4891 location_t loc = EXPR_LOCATION (exp);
4892
4893 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4894 return NULL_RTX;
4895
4896 arg = CALL_EXPR_ARG (exp, 0);
4897 fmode = TYPE_MODE (TREE_TYPE (arg));
4898 rmode = TYPE_MODE (TREE_TYPE (exp));
4899 fmt = REAL_MODE_FORMAT (fmode);
4900
4901 arg = builtin_save_expr (arg);
4902
4903 /* Expand the argument yielding a RTX expression. */
4904 temp = expand_normal (arg);
4905
4906 /* Check if the back end provides an insn that handles signbit for the
4907 argument's mode. */
4908 icode = optab_handler (signbit_optab, fmode);
4909 if (icode != CODE_FOR_nothing)
4910 {
4911 rtx_insn *last = get_last_insn ();
4912 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4913 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4914 return target;
4915 delete_insns_since (last);
4916 }
4917
4918 /* For floating point formats without a sign bit, implement signbit
4919 as "ARG < 0.0". */
4920 bitpos = fmt->signbit_ro;
4921 if (bitpos < 0)
4922 {
4923 /* But we can't do this if the format supports signed zero. */
4924 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4925
4926 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4927 build_real (TREE_TYPE (arg), dconst0));
4928 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4929 }
4930
4931 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4932 {
4933 imode = int_mode_for_mode (fmode);
4934 gcc_assert (imode != BLKmode);
4935 temp = gen_lowpart (imode, temp);
4936 }
4937 else
4938 {
4939 imode = word_mode;
4940 /* Handle targets with different FP word orders. */
4941 if (FLOAT_WORDS_BIG_ENDIAN)
4942 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4943 else
4944 word = bitpos / BITS_PER_WORD;
4945 temp = operand_subword_force (temp, word, fmode);
4946 bitpos = bitpos % BITS_PER_WORD;
4947 }
4948
4949 /* Force the intermediate word_mode (or narrower) result into a
4950 register. This avoids attempting to create paradoxical SUBREGs
4951 of floating point modes below. */
4952 temp = force_reg (imode, temp);
4953
4954 /* If the bitpos is within the "result mode" lowpart, the operation
4955 can be implement with a single bitwise AND. Otherwise, we need
4956 a right shift and an AND. */
4957
4958 if (bitpos < GET_MODE_BITSIZE (rmode))
4959 {
4960 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4961
4962 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4963 temp = gen_lowpart (rmode, temp);
4964 temp = expand_binop (rmode, and_optab, temp,
4965 immed_wide_int_const (mask, rmode),
4966 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4967 }
4968 else
4969 {
4970 /* Perform a logical right shift to place the signbit in the least
4971 significant bit, then truncate the result to the desired mode
4972 and mask just this bit. */
4973 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4974 temp = gen_lowpart (rmode, temp);
4975 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4976 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4977 }
4978
4979 return temp;
4980 }
4981
4982 /* Expand fork or exec calls. TARGET is the desired target of the
4983 call. EXP is the call. FN is the
4984 identificator of the actual function. IGNORE is nonzero if the
4985 value is to be ignored. */
4986
4987 static rtx
4988 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4989 {
4990 tree id, decl;
4991 tree call;
4992
4993 /* If we are not profiling, just call the function. */
4994 if (!profile_arc_flag)
4995 return NULL_RTX;
4996
4997 /* Otherwise call the wrapper. This should be equivalent for the rest of
4998 compiler, so the code does not diverge, and the wrapper may run the
4999 code necessary for keeping the profiling sane. */
5000
5001 switch (DECL_FUNCTION_CODE (fn))
5002 {
5003 case BUILT_IN_FORK:
5004 id = get_identifier ("__gcov_fork");
5005 break;
5006
5007 case BUILT_IN_EXECL:
5008 id = get_identifier ("__gcov_execl");
5009 break;
5010
5011 case BUILT_IN_EXECV:
5012 id = get_identifier ("__gcov_execv");
5013 break;
5014
5015 case BUILT_IN_EXECLP:
5016 id = get_identifier ("__gcov_execlp");
5017 break;
5018
5019 case BUILT_IN_EXECLE:
5020 id = get_identifier ("__gcov_execle");
5021 break;
5022
5023 case BUILT_IN_EXECVP:
5024 id = get_identifier ("__gcov_execvp");
5025 break;
5026
5027 case BUILT_IN_EXECVE:
5028 id = get_identifier ("__gcov_execve");
5029 break;
5030
5031 default:
5032 gcc_unreachable ();
5033 }
5034
5035 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5036 FUNCTION_DECL, id, TREE_TYPE (fn));
5037 DECL_EXTERNAL (decl) = 1;
5038 TREE_PUBLIC (decl) = 1;
5039 DECL_ARTIFICIAL (decl) = 1;
5040 TREE_NOTHROW (decl) = 1;
5041 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5042 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5043 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5044 return expand_call (call, target, ignore);
5045 }
5046
5047
5048 \f
5049 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5050 the pointer in these functions is void*, the tree optimizers may remove
5051 casts. The mode computed in expand_builtin isn't reliable either, due
5052 to __sync_bool_compare_and_swap.
5053
5054 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5055 group of builtins. This gives us log2 of the mode size. */
5056
5057 static inline machine_mode
5058 get_builtin_sync_mode (int fcode_diff)
5059 {
5060 /* The size is not negotiable, so ask not to get BLKmode in return
5061 if the target indicates that a smaller size would be better. */
5062 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5063 }
5064
5065 /* Expand the memory expression LOC and return the appropriate memory operand
5066 for the builtin_sync operations. */
5067
5068 static rtx
5069 get_builtin_sync_mem (tree loc, machine_mode mode)
5070 {
5071 rtx addr, mem;
5072
5073 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5074 addr = convert_memory_address (Pmode, addr);
5075
5076 /* Note that we explicitly do not want any alias information for this
5077 memory, so that we kill all other live memories. Otherwise we don't
5078 satisfy the full barrier semantics of the intrinsic. */
5079 mem = validize_mem (gen_rtx_MEM (mode, addr));
5080
5081 /* The alignment needs to be at least according to that of the mode. */
5082 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5083 get_pointer_alignment (loc)));
5084 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5085 MEM_VOLATILE_P (mem) = 1;
5086
5087 return mem;
5088 }
5089
5090 /* Make sure an argument is in the right mode.
5091 EXP is the tree argument.
5092 MODE is the mode it should be in. */
5093
5094 static rtx
5095 expand_expr_force_mode (tree exp, machine_mode mode)
5096 {
5097 rtx val;
5098 machine_mode old_mode;
5099
5100 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5101 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5102 of CONST_INTs, where we know the old_mode only from the call argument. */
5103
5104 old_mode = GET_MODE (val);
5105 if (old_mode == VOIDmode)
5106 old_mode = TYPE_MODE (TREE_TYPE (exp));
5107 val = convert_modes (mode, old_mode, val, 1);
5108 return val;
5109 }
5110
5111
5112 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5113 EXP is the CALL_EXPR. CODE is the rtx code
5114 that corresponds to the arithmetic or logical operation from the name;
5115 an exception here is that NOT actually means NAND. TARGET is an optional
5116 place for us to store the results; AFTER is true if this is the
5117 fetch_and_xxx form. */
5118
5119 static rtx
5120 expand_builtin_sync_operation (machine_mode mode, tree exp,
5121 enum rtx_code code, bool after,
5122 rtx target)
5123 {
5124 rtx val, mem;
5125 location_t loc = EXPR_LOCATION (exp);
5126
5127 if (code == NOT && warn_sync_nand)
5128 {
5129 tree fndecl = get_callee_fndecl (exp);
5130 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5131
5132 static bool warned_f_a_n, warned_n_a_f;
5133
5134 switch (fcode)
5135 {
5136 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5137 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5138 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5139 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5140 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5141 if (warned_f_a_n)
5142 break;
5143
5144 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5145 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5146 warned_f_a_n = true;
5147 break;
5148
5149 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5150 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5151 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5152 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5153 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5154 if (warned_n_a_f)
5155 break;
5156
5157 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5158 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5159 warned_n_a_f = true;
5160 break;
5161
5162 default:
5163 gcc_unreachable ();
5164 }
5165 }
5166
5167 /* Expand the operands. */
5168 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5169 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5170
5171 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5172 after);
5173 }
5174
5175 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5176 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5177 true if this is the boolean form. TARGET is a place for us to store the
5178 results; this is NOT optional if IS_BOOL is true. */
5179
5180 static rtx
5181 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5182 bool is_bool, rtx target)
5183 {
5184 rtx old_val, new_val, mem;
5185 rtx *pbool, *poval;
5186
5187 /* Expand the operands. */
5188 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5189 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5190 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5191
5192 pbool = poval = NULL;
5193 if (target != const0_rtx)
5194 {
5195 if (is_bool)
5196 pbool = &target;
5197 else
5198 poval = &target;
5199 }
5200 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5201 false, MEMMODEL_SYNC_SEQ_CST,
5202 MEMMODEL_SYNC_SEQ_CST))
5203 return NULL_RTX;
5204
5205 return target;
5206 }
5207
5208 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5209 general form is actually an atomic exchange, and some targets only
5210 support a reduced form with the second argument being a constant 1.
5211 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5212 the results. */
5213
5214 static rtx
5215 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5216 rtx target)
5217 {
5218 rtx val, mem;
5219
5220 /* Expand the operands. */
5221 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5222 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5223
5224 return expand_sync_lock_test_and_set (target, mem, val);
5225 }
5226
5227 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5228
5229 static void
5230 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5231 {
5232 rtx mem;
5233
5234 /* Expand the operands. */
5235 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5236
5237 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5238 }
5239
5240 /* Given an integer representing an ``enum memmodel'', verify its
5241 correctness and return the memory model enum. */
5242
5243 static enum memmodel
5244 get_memmodel (tree exp)
5245 {
5246 rtx op;
5247 unsigned HOST_WIDE_INT val;
5248
5249 /* If the parameter is not a constant, it's a run time value so we'll just
5250 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5251 if (TREE_CODE (exp) != INTEGER_CST)
5252 return MEMMODEL_SEQ_CST;
5253
5254 op = expand_normal (exp);
5255
5256 val = INTVAL (op);
5257 if (targetm.memmodel_check)
5258 val = targetm.memmodel_check (val);
5259 else if (val & ~MEMMODEL_MASK)
5260 {
5261 warning (OPT_Winvalid_memory_model,
5262 "Unknown architecture specifier in memory model to builtin.");
5263 return MEMMODEL_SEQ_CST;
5264 }
5265
5266 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5267 if (memmodel_base (val) >= MEMMODEL_LAST)
5268 {
5269 warning (OPT_Winvalid_memory_model,
5270 "invalid memory model argument to builtin");
5271 return MEMMODEL_SEQ_CST;
5272 }
5273
5274 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5275 be conservative and promote consume to acquire. */
5276 if (val == MEMMODEL_CONSUME)
5277 val = MEMMODEL_ACQUIRE;
5278
5279 return (enum memmodel) val;
5280 }
5281
5282 /* Expand the __atomic_exchange intrinsic:
5283 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5284 EXP is the CALL_EXPR.
5285 TARGET is an optional place for us to store the results. */
5286
5287 static rtx
5288 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5289 {
5290 rtx val, mem;
5291 enum memmodel model;
5292
5293 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5294
5295 if (!flag_inline_atomics)
5296 return NULL_RTX;
5297
5298 /* Expand the operands. */
5299 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5300 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5301
5302 return expand_atomic_exchange (target, mem, val, model);
5303 }
5304
5305 /* Expand the __atomic_compare_exchange intrinsic:
5306 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5307 TYPE desired, BOOL weak,
5308 enum memmodel success,
5309 enum memmodel failure)
5310 EXP is the CALL_EXPR.
5311 TARGET is an optional place for us to store the results. */
5312
5313 static rtx
5314 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5315 rtx target)
5316 {
5317 rtx expect, desired, mem, oldval;
5318 rtx_code_label *label;
5319 enum memmodel success, failure;
5320 tree weak;
5321 bool is_weak;
5322
5323 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5324 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5325
5326 if (failure > success)
5327 {
5328 warning (OPT_Winvalid_memory_model,
5329 "failure memory model cannot be stronger than success memory "
5330 "model for %<__atomic_compare_exchange%>");
5331 success = MEMMODEL_SEQ_CST;
5332 }
5333
5334 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5335 {
5336 warning (OPT_Winvalid_memory_model,
5337 "invalid failure memory model for "
5338 "%<__atomic_compare_exchange%>");
5339 failure = MEMMODEL_SEQ_CST;
5340 success = MEMMODEL_SEQ_CST;
5341 }
5342
5343
5344 if (!flag_inline_atomics)
5345 return NULL_RTX;
5346
5347 /* Expand the operands. */
5348 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5349
5350 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5351 expect = convert_memory_address (Pmode, expect);
5352 expect = gen_rtx_MEM (mode, expect);
5353 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5354
5355 weak = CALL_EXPR_ARG (exp, 3);
5356 is_weak = false;
5357 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5358 is_weak = true;
5359
5360 if (target == const0_rtx)
5361 target = NULL;
5362
5363 /* Lest the rtl backend create a race condition with an imporoper store
5364 to memory, always create a new pseudo for OLDVAL. */
5365 oldval = NULL;
5366
5367 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5368 is_weak, success, failure))
5369 return NULL_RTX;
5370
5371 /* Conditionally store back to EXPECT, lest we create a race condition
5372 with an improper store to memory. */
5373 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5374 the normal case where EXPECT is totally private, i.e. a register. At
5375 which point the store can be unconditional. */
5376 label = gen_label_rtx ();
5377 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5378 GET_MODE (target), 1, label);
5379 emit_move_insn (expect, oldval);
5380 emit_label (label);
5381
5382 return target;
5383 }
5384
5385 /* Expand the __atomic_load intrinsic:
5386 TYPE __atomic_load (TYPE *object, enum memmodel)
5387 EXP is the CALL_EXPR.
5388 TARGET is an optional place for us to store the results. */
5389
5390 static rtx
5391 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5392 {
5393 rtx mem;
5394 enum memmodel model;
5395
5396 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5397 if (is_mm_release (model) || is_mm_acq_rel (model))
5398 {
5399 warning (OPT_Winvalid_memory_model,
5400 "invalid memory model for %<__atomic_load%>");
5401 model = MEMMODEL_SEQ_CST;
5402 }
5403
5404 if (!flag_inline_atomics)
5405 return NULL_RTX;
5406
5407 /* Expand the operand. */
5408 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5409
5410 return expand_atomic_load (target, mem, model);
5411 }
5412
5413
5414 /* Expand the __atomic_store intrinsic:
5415 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5416 EXP is the CALL_EXPR.
5417 TARGET is an optional place for us to store the results. */
5418
5419 static rtx
5420 expand_builtin_atomic_store (machine_mode mode, tree exp)
5421 {
5422 rtx mem, val;
5423 enum memmodel model;
5424
5425 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5426 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5427 || is_mm_release (model)))
5428 {
5429 warning (OPT_Winvalid_memory_model,
5430 "invalid memory model for %<__atomic_store%>");
5431 model = MEMMODEL_SEQ_CST;
5432 }
5433
5434 if (!flag_inline_atomics)
5435 return NULL_RTX;
5436
5437 /* Expand the operands. */
5438 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5439 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5440
5441 return expand_atomic_store (mem, val, model, false);
5442 }
5443
5444 /* Expand the __atomic_fetch_XXX intrinsic:
5445 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5446 EXP is the CALL_EXPR.
5447 TARGET is an optional place for us to store the results.
5448 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5449 FETCH_AFTER is true if returning the result of the operation.
5450 FETCH_AFTER is false if returning the value before the operation.
5451 IGNORE is true if the result is not used.
5452 EXT_CALL is the correct builtin for an external call if this cannot be
5453 resolved to an instruction sequence. */
5454
5455 static rtx
5456 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5457 enum rtx_code code, bool fetch_after,
5458 bool ignore, enum built_in_function ext_call)
5459 {
5460 rtx val, mem, ret;
5461 enum memmodel model;
5462 tree fndecl;
5463 tree addr;
5464
5465 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5466
5467 /* Expand the operands. */
5468 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5469 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5470
5471 /* Only try generating instructions if inlining is turned on. */
5472 if (flag_inline_atomics)
5473 {
5474 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5475 if (ret)
5476 return ret;
5477 }
5478
5479 /* Return if a different routine isn't needed for the library call. */
5480 if (ext_call == BUILT_IN_NONE)
5481 return NULL_RTX;
5482
5483 /* Change the call to the specified function. */
5484 fndecl = get_callee_fndecl (exp);
5485 addr = CALL_EXPR_FN (exp);
5486 STRIP_NOPS (addr);
5487
5488 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5489 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5490
5491 /* Expand the call here so we can emit trailing code. */
5492 ret = expand_call (exp, target, ignore);
5493
5494 /* Replace the original function just in case it matters. */
5495 TREE_OPERAND (addr, 0) = fndecl;
5496
5497 /* Then issue the arithmetic correction to return the right result. */
5498 if (!ignore)
5499 {
5500 if (code == NOT)
5501 {
5502 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5503 OPTAB_LIB_WIDEN);
5504 ret = expand_simple_unop (mode, NOT, ret, target, true);
5505 }
5506 else
5507 ret = expand_simple_binop (mode, code, ret, val, target, true,
5508 OPTAB_LIB_WIDEN);
5509 }
5510 return ret;
5511 }
5512
5513 /* Expand an atomic clear operation.
5514 void _atomic_clear (BOOL *obj, enum memmodel)
5515 EXP is the call expression. */
5516
5517 static rtx
5518 expand_builtin_atomic_clear (tree exp)
5519 {
5520 machine_mode mode;
5521 rtx mem, ret;
5522 enum memmodel model;
5523
5524 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5525 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5526 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5527
5528 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5529 {
5530 warning (OPT_Winvalid_memory_model,
5531 "invalid memory model for %<__atomic_store%>");
5532 model = MEMMODEL_SEQ_CST;
5533 }
5534
5535 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5536 Failing that, a store is issued by __atomic_store. The only way this can
5537 fail is if the bool type is larger than a word size. Unlikely, but
5538 handle it anyway for completeness. Assume a single threaded model since
5539 there is no atomic support in this case, and no barriers are required. */
5540 ret = expand_atomic_store (mem, const0_rtx, model, true);
5541 if (!ret)
5542 emit_move_insn (mem, const0_rtx);
5543 return const0_rtx;
5544 }
5545
5546 /* Expand an atomic test_and_set operation.
5547 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5548 EXP is the call expression. */
5549
5550 static rtx
5551 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5552 {
5553 rtx mem;
5554 enum memmodel model;
5555 machine_mode mode;
5556
5557 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5558 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5559 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5560
5561 return expand_atomic_test_and_set (target, mem, model);
5562 }
5563
5564
5565 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5566 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5567
5568 static tree
5569 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5570 {
5571 int size;
5572 machine_mode mode;
5573 unsigned int mode_align, type_align;
5574
5575 if (TREE_CODE (arg0) != INTEGER_CST)
5576 return NULL_TREE;
5577
5578 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5579 mode = mode_for_size (size, MODE_INT, 0);
5580 mode_align = GET_MODE_ALIGNMENT (mode);
5581
5582 if (TREE_CODE (arg1) == INTEGER_CST)
5583 {
5584 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5585
5586 /* Either this argument is null, or it's a fake pointer encoding
5587 the alignment of the object. */
5588 val = val & -val;
5589 val *= BITS_PER_UNIT;
5590
5591 if (val == 0 || mode_align < val)
5592 type_align = mode_align;
5593 else
5594 type_align = val;
5595 }
5596 else
5597 {
5598 tree ttype = TREE_TYPE (arg1);
5599
5600 /* This function is usually invoked and folded immediately by the front
5601 end before anything else has a chance to look at it. The pointer
5602 parameter at this point is usually cast to a void *, so check for that
5603 and look past the cast. */
5604 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5605 && VOID_TYPE_P (TREE_TYPE (ttype)))
5606 arg1 = TREE_OPERAND (arg1, 0);
5607
5608 ttype = TREE_TYPE (arg1);
5609 gcc_assert (POINTER_TYPE_P (ttype));
5610
5611 /* Get the underlying type of the object. */
5612 ttype = TREE_TYPE (ttype);
5613 type_align = TYPE_ALIGN (ttype);
5614 }
5615
5616 /* If the object has smaller alignment, the lock free routines cannot
5617 be used. */
5618 if (type_align < mode_align)
5619 return boolean_false_node;
5620
5621 /* Check if a compare_and_swap pattern exists for the mode which represents
5622 the required size. The pattern is not allowed to fail, so the existence
5623 of the pattern indicates support is present. */
5624 if (can_compare_and_swap_p (mode, true))
5625 return boolean_true_node;
5626 else
5627 return boolean_false_node;
5628 }
5629
5630 /* Return true if the parameters to call EXP represent an object which will
5631 always generate lock free instructions. The first argument represents the
5632 size of the object, and the second parameter is a pointer to the object
5633 itself. If NULL is passed for the object, then the result is based on
5634 typical alignment for an object of the specified size. Otherwise return
5635 false. */
5636
5637 static rtx
5638 expand_builtin_atomic_always_lock_free (tree exp)
5639 {
5640 tree size;
5641 tree arg0 = CALL_EXPR_ARG (exp, 0);
5642 tree arg1 = CALL_EXPR_ARG (exp, 1);
5643
5644 if (TREE_CODE (arg0) != INTEGER_CST)
5645 {
5646 error ("non-constant argument 1 to __atomic_always_lock_free");
5647 return const0_rtx;
5648 }
5649
5650 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5651 if (size == boolean_true_node)
5652 return const1_rtx;
5653 return const0_rtx;
5654 }
5655
5656 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5657 is lock free on this architecture. */
5658
5659 static tree
5660 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5661 {
5662 if (!flag_inline_atomics)
5663 return NULL_TREE;
5664
5665 /* If it isn't always lock free, don't generate a result. */
5666 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5667 return boolean_true_node;
5668
5669 return NULL_TREE;
5670 }
5671
5672 /* Return true if the parameters to call EXP represent an object which will
5673 always generate lock free instructions. The first argument represents the
5674 size of the object, and the second parameter is a pointer to the object
5675 itself. If NULL is passed for the object, then the result is based on
5676 typical alignment for an object of the specified size. Otherwise return
5677 NULL*/
5678
5679 static rtx
5680 expand_builtin_atomic_is_lock_free (tree exp)
5681 {
5682 tree size;
5683 tree arg0 = CALL_EXPR_ARG (exp, 0);
5684 tree arg1 = CALL_EXPR_ARG (exp, 1);
5685
5686 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5687 {
5688 error ("non-integer argument 1 to __atomic_is_lock_free");
5689 return NULL_RTX;
5690 }
5691
5692 if (!flag_inline_atomics)
5693 return NULL_RTX;
5694
5695 /* If the value is known at compile time, return the RTX for it. */
5696 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5697 if (size == boolean_true_node)
5698 return const1_rtx;
5699
5700 return NULL_RTX;
5701 }
5702
5703 /* Expand the __atomic_thread_fence intrinsic:
5704 void __atomic_thread_fence (enum memmodel)
5705 EXP is the CALL_EXPR. */
5706
5707 static void
5708 expand_builtin_atomic_thread_fence (tree exp)
5709 {
5710 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5711 expand_mem_thread_fence (model);
5712 }
5713
5714 /* Expand the __atomic_signal_fence intrinsic:
5715 void __atomic_signal_fence (enum memmodel)
5716 EXP is the CALL_EXPR. */
5717
5718 static void
5719 expand_builtin_atomic_signal_fence (tree exp)
5720 {
5721 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5722 expand_mem_signal_fence (model);
5723 }
5724
5725 /* Expand the __sync_synchronize intrinsic. */
5726
5727 static void
5728 expand_builtin_sync_synchronize (void)
5729 {
5730 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5731 }
5732
5733 static rtx
5734 expand_builtin_thread_pointer (tree exp, rtx target)
5735 {
5736 enum insn_code icode;
5737 if (!validate_arglist (exp, VOID_TYPE))
5738 return const0_rtx;
5739 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5740 if (icode != CODE_FOR_nothing)
5741 {
5742 struct expand_operand op;
5743 /* If the target is not sutitable then create a new target. */
5744 if (target == NULL_RTX
5745 || !REG_P (target)
5746 || GET_MODE (target) != Pmode)
5747 target = gen_reg_rtx (Pmode);
5748 create_output_operand (&op, target, Pmode);
5749 expand_insn (icode, 1, &op);
5750 return target;
5751 }
5752 error ("__builtin_thread_pointer is not supported on this target");
5753 return const0_rtx;
5754 }
5755
5756 static void
5757 expand_builtin_set_thread_pointer (tree exp)
5758 {
5759 enum insn_code icode;
5760 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5761 return;
5762 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5763 if (icode != CODE_FOR_nothing)
5764 {
5765 struct expand_operand op;
5766 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5767 Pmode, EXPAND_NORMAL);
5768 create_input_operand (&op, val, Pmode);
5769 expand_insn (icode, 1, &op);
5770 return;
5771 }
5772 error ("__builtin_set_thread_pointer is not supported on this target");
5773 }
5774
5775 \f
5776 /* Emit code to restore the current value of stack. */
5777
5778 static void
5779 expand_stack_restore (tree var)
5780 {
5781 rtx_insn *prev;
5782 rtx sa = expand_normal (var);
5783
5784 sa = convert_memory_address (Pmode, sa);
5785
5786 prev = get_last_insn ();
5787 emit_stack_restore (SAVE_BLOCK, sa);
5788
5789 record_new_stack_level ();
5790
5791 fixup_args_size_notes (prev, get_last_insn (), 0);
5792 }
5793
5794 /* Emit code to save the current value of stack. */
5795
5796 static rtx
5797 expand_stack_save (void)
5798 {
5799 rtx ret = NULL_RTX;
5800
5801 emit_stack_save (SAVE_BLOCK, &ret);
5802 return ret;
5803 }
5804
5805
5806 /* Expand an expression EXP that calls a built-in function,
5807 with result going to TARGET if that's convenient
5808 (and in mode MODE if that's convenient).
5809 SUBTARGET may be used as the target for computing one of EXP's operands.
5810 IGNORE is nonzero if the value is to be ignored. */
5811
5812 rtx
5813 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5814 int ignore)
5815 {
5816 tree fndecl = get_callee_fndecl (exp);
5817 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5818 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5819 int flags;
5820
5821 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5822 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5823
5824 /* When ASan is enabled, we don't want to expand some memory/string
5825 builtins and rely on libsanitizer's hooks. This allows us to avoid
5826 redundant checks and be sure, that possible overflow will be detected
5827 by ASan. */
5828
5829 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5830 return expand_call (exp, target, ignore);
5831
5832 /* When not optimizing, generate calls to library functions for a certain
5833 set of builtins. */
5834 if (!optimize
5835 && !called_as_built_in (fndecl)
5836 && fcode != BUILT_IN_FORK
5837 && fcode != BUILT_IN_EXECL
5838 && fcode != BUILT_IN_EXECV
5839 && fcode != BUILT_IN_EXECLP
5840 && fcode != BUILT_IN_EXECLE
5841 && fcode != BUILT_IN_EXECVP
5842 && fcode != BUILT_IN_EXECVE
5843 && fcode != BUILT_IN_ALLOCA
5844 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5845 && fcode != BUILT_IN_FREE
5846 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5847 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5848 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5849 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5850 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5851 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5852 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5853 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5854 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5855 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5856 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5857 && fcode != BUILT_IN_CHKP_BNDRET)
5858 return expand_call (exp, target, ignore);
5859
5860 /* The built-in function expanders test for target == const0_rtx
5861 to determine whether the function's result will be ignored. */
5862 if (ignore)
5863 target = const0_rtx;
5864
5865 /* If the result of a pure or const built-in function is ignored, and
5866 none of its arguments are volatile, we can avoid expanding the
5867 built-in call and just evaluate the arguments for side-effects. */
5868 if (target == const0_rtx
5869 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5870 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5871 {
5872 bool volatilep = false;
5873 tree arg;
5874 call_expr_arg_iterator iter;
5875
5876 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5877 if (TREE_THIS_VOLATILE (arg))
5878 {
5879 volatilep = true;
5880 break;
5881 }
5882
5883 if (! volatilep)
5884 {
5885 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5886 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5887 return const0_rtx;
5888 }
5889 }
5890
5891 /* expand_builtin_with_bounds is supposed to be used for
5892 instrumented builtin calls. */
5893 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5894
5895 switch (fcode)
5896 {
5897 CASE_FLT_FN (BUILT_IN_FABS):
5898 case BUILT_IN_FABSD32:
5899 case BUILT_IN_FABSD64:
5900 case BUILT_IN_FABSD128:
5901 target = expand_builtin_fabs (exp, target, subtarget);
5902 if (target)
5903 return target;
5904 break;
5905
5906 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5907 target = expand_builtin_copysign (exp, target, subtarget);
5908 if (target)
5909 return target;
5910 break;
5911
5912 /* Just do a normal library call if we were unable to fold
5913 the values. */
5914 CASE_FLT_FN (BUILT_IN_CABS):
5915 break;
5916
5917 CASE_FLT_FN (BUILT_IN_EXP):
5918 CASE_FLT_FN (BUILT_IN_EXP10):
5919 CASE_FLT_FN (BUILT_IN_POW10):
5920 CASE_FLT_FN (BUILT_IN_EXP2):
5921 CASE_FLT_FN (BUILT_IN_EXPM1):
5922 CASE_FLT_FN (BUILT_IN_LOGB):
5923 CASE_FLT_FN (BUILT_IN_LOG):
5924 CASE_FLT_FN (BUILT_IN_LOG10):
5925 CASE_FLT_FN (BUILT_IN_LOG2):
5926 CASE_FLT_FN (BUILT_IN_LOG1P):
5927 CASE_FLT_FN (BUILT_IN_TAN):
5928 CASE_FLT_FN (BUILT_IN_ASIN):
5929 CASE_FLT_FN (BUILT_IN_ACOS):
5930 CASE_FLT_FN (BUILT_IN_ATAN):
5931 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5932 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5933 because of possible accuracy problems. */
5934 if (! flag_unsafe_math_optimizations)
5935 break;
5936 CASE_FLT_FN (BUILT_IN_SQRT):
5937 CASE_FLT_FN (BUILT_IN_FLOOR):
5938 CASE_FLT_FN (BUILT_IN_CEIL):
5939 CASE_FLT_FN (BUILT_IN_TRUNC):
5940 CASE_FLT_FN (BUILT_IN_ROUND):
5941 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5942 CASE_FLT_FN (BUILT_IN_RINT):
5943 target = expand_builtin_mathfn (exp, target, subtarget);
5944 if (target)
5945 return target;
5946 break;
5947
5948 CASE_FLT_FN (BUILT_IN_FMA):
5949 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5950 if (target)
5951 return target;
5952 break;
5953
5954 CASE_FLT_FN (BUILT_IN_ILOGB):
5955 if (! flag_unsafe_math_optimizations)
5956 break;
5957 CASE_FLT_FN (BUILT_IN_ISINF):
5958 CASE_FLT_FN (BUILT_IN_FINITE):
5959 case BUILT_IN_ISFINITE:
5960 case BUILT_IN_ISNORMAL:
5961 target = expand_builtin_interclass_mathfn (exp, target);
5962 if (target)
5963 return target;
5964 break;
5965
5966 CASE_FLT_FN (BUILT_IN_ICEIL):
5967 CASE_FLT_FN (BUILT_IN_LCEIL):
5968 CASE_FLT_FN (BUILT_IN_LLCEIL):
5969 CASE_FLT_FN (BUILT_IN_LFLOOR):
5970 CASE_FLT_FN (BUILT_IN_IFLOOR):
5971 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5972 target = expand_builtin_int_roundingfn (exp, target);
5973 if (target)
5974 return target;
5975 break;
5976
5977 CASE_FLT_FN (BUILT_IN_IRINT):
5978 CASE_FLT_FN (BUILT_IN_LRINT):
5979 CASE_FLT_FN (BUILT_IN_LLRINT):
5980 CASE_FLT_FN (BUILT_IN_IROUND):
5981 CASE_FLT_FN (BUILT_IN_LROUND):
5982 CASE_FLT_FN (BUILT_IN_LLROUND):
5983 target = expand_builtin_int_roundingfn_2 (exp, target);
5984 if (target)
5985 return target;
5986 break;
5987
5988 CASE_FLT_FN (BUILT_IN_POWI):
5989 target = expand_builtin_powi (exp, target);
5990 if (target)
5991 return target;
5992 break;
5993
5994 CASE_FLT_FN (BUILT_IN_ATAN2):
5995 CASE_FLT_FN (BUILT_IN_LDEXP):
5996 CASE_FLT_FN (BUILT_IN_SCALB):
5997 CASE_FLT_FN (BUILT_IN_SCALBN):
5998 CASE_FLT_FN (BUILT_IN_SCALBLN):
5999 if (! flag_unsafe_math_optimizations)
6000 break;
6001
6002 CASE_FLT_FN (BUILT_IN_FMOD):
6003 CASE_FLT_FN (BUILT_IN_REMAINDER):
6004 CASE_FLT_FN (BUILT_IN_DREM):
6005 CASE_FLT_FN (BUILT_IN_POW):
6006 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6007 if (target)
6008 return target;
6009 break;
6010
6011 CASE_FLT_FN (BUILT_IN_CEXPI):
6012 target = expand_builtin_cexpi (exp, target);
6013 gcc_assert (target);
6014 return target;
6015
6016 CASE_FLT_FN (BUILT_IN_SIN):
6017 CASE_FLT_FN (BUILT_IN_COS):
6018 if (! flag_unsafe_math_optimizations)
6019 break;
6020 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6021 if (target)
6022 return target;
6023 break;
6024
6025 CASE_FLT_FN (BUILT_IN_SINCOS):
6026 if (! flag_unsafe_math_optimizations)
6027 break;
6028 target = expand_builtin_sincos (exp);
6029 if (target)
6030 return target;
6031 break;
6032
6033 case BUILT_IN_APPLY_ARGS:
6034 return expand_builtin_apply_args ();
6035
6036 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6037 FUNCTION with a copy of the parameters described by
6038 ARGUMENTS, and ARGSIZE. It returns a block of memory
6039 allocated on the stack into which is stored all the registers
6040 that might possibly be used for returning the result of a
6041 function. ARGUMENTS is the value returned by
6042 __builtin_apply_args. ARGSIZE is the number of bytes of
6043 arguments that must be copied. ??? How should this value be
6044 computed? We'll also need a safe worst case value for varargs
6045 functions. */
6046 case BUILT_IN_APPLY:
6047 if (!validate_arglist (exp, POINTER_TYPE,
6048 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6049 && !validate_arglist (exp, REFERENCE_TYPE,
6050 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6051 return const0_rtx;
6052 else
6053 {
6054 rtx ops[3];
6055
6056 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6057 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6058 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6059
6060 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6061 }
6062
6063 /* __builtin_return (RESULT) causes the function to return the
6064 value described by RESULT. RESULT is address of the block of
6065 memory returned by __builtin_apply. */
6066 case BUILT_IN_RETURN:
6067 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6068 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6069 return const0_rtx;
6070
6071 case BUILT_IN_SAVEREGS:
6072 return expand_builtin_saveregs ();
6073
6074 case BUILT_IN_VA_ARG_PACK:
6075 /* All valid uses of __builtin_va_arg_pack () are removed during
6076 inlining. */
6077 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6078 return const0_rtx;
6079
6080 case BUILT_IN_VA_ARG_PACK_LEN:
6081 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6082 inlining. */
6083 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6084 return const0_rtx;
6085
6086 /* Return the address of the first anonymous stack arg. */
6087 case BUILT_IN_NEXT_ARG:
6088 if (fold_builtin_next_arg (exp, false))
6089 return const0_rtx;
6090 return expand_builtin_next_arg ();
6091
6092 case BUILT_IN_CLEAR_CACHE:
6093 target = expand_builtin___clear_cache (exp);
6094 if (target)
6095 return target;
6096 break;
6097
6098 case BUILT_IN_CLASSIFY_TYPE:
6099 return expand_builtin_classify_type (exp);
6100
6101 case BUILT_IN_CONSTANT_P:
6102 return const0_rtx;
6103
6104 case BUILT_IN_FRAME_ADDRESS:
6105 case BUILT_IN_RETURN_ADDRESS:
6106 return expand_builtin_frame_address (fndecl, exp);
6107
6108 /* Returns the address of the area where the structure is returned.
6109 0 otherwise. */
6110 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6111 if (call_expr_nargs (exp) != 0
6112 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6113 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6114 return const0_rtx;
6115 else
6116 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6117
6118 case BUILT_IN_ALLOCA:
6119 case BUILT_IN_ALLOCA_WITH_ALIGN:
6120 /* If the allocation stems from the declaration of a variable-sized
6121 object, it cannot accumulate. */
6122 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6123 if (target)
6124 return target;
6125 break;
6126
6127 case BUILT_IN_STACK_SAVE:
6128 return expand_stack_save ();
6129
6130 case BUILT_IN_STACK_RESTORE:
6131 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6132 return const0_rtx;
6133
6134 case BUILT_IN_BSWAP16:
6135 case BUILT_IN_BSWAP32:
6136 case BUILT_IN_BSWAP64:
6137 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6138 if (target)
6139 return target;
6140 break;
6141
6142 CASE_INT_FN (BUILT_IN_FFS):
6143 target = expand_builtin_unop (target_mode, exp, target,
6144 subtarget, ffs_optab);
6145 if (target)
6146 return target;
6147 break;
6148
6149 CASE_INT_FN (BUILT_IN_CLZ):
6150 target = expand_builtin_unop (target_mode, exp, target,
6151 subtarget, clz_optab);
6152 if (target)
6153 return target;
6154 break;
6155
6156 CASE_INT_FN (BUILT_IN_CTZ):
6157 target = expand_builtin_unop (target_mode, exp, target,
6158 subtarget, ctz_optab);
6159 if (target)
6160 return target;
6161 break;
6162
6163 CASE_INT_FN (BUILT_IN_CLRSB):
6164 target = expand_builtin_unop (target_mode, exp, target,
6165 subtarget, clrsb_optab);
6166 if (target)
6167 return target;
6168 break;
6169
6170 CASE_INT_FN (BUILT_IN_POPCOUNT):
6171 target = expand_builtin_unop (target_mode, exp, target,
6172 subtarget, popcount_optab);
6173 if (target)
6174 return target;
6175 break;
6176
6177 CASE_INT_FN (BUILT_IN_PARITY):
6178 target = expand_builtin_unop (target_mode, exp, target,
6179 subtarget, parity_optab);
6180 if (target)
6181 return target;
6182 break;
6183
6184 case BUILT_IN_STRLEN:
6185 target = expand_builtin_strlen (exp, target, target_mode);
6186 if (target)
6187 return target;
6188 break;
6189
6190 case BUILT_IN_STRCPY:
6191 target = expand_builtin_strcpy (exp, target);
6192 if (target)
6193 return target;
6194 break;
6195
6196 case BUILT_IN_STRNCPY:
6197 target = expand_builtin_strncpy (exp, target);
6198 if (target)
6199 return target;
6200 break;
6201
6202 case BUILT_IN_STPCPY:
6203 target = expand_builtin_stpcpy (exp, target, mode);
6204 if (target)
6205 return target;
6206 break;
6207
6208 case BUILT_IN_MEMCPY:
6209 target = expand_builtin_memcpy (exp, target);
6210 if (target)
6211 return target;
6212 break;
6213
6214 case BUILT_IN_MEMPCPY:
6215 target = expand_builtin_mempcpy (exp, target, mode);
6216 if (target)
6217 return target;
6218 break;
6219
6220 case BUILT_IN_MEMSET:
6221 target = expand_builtin_memset (exp, target, mode);
6222 if (target)
6223 return target;
6224 break;
6225
6226 case BUILT_IN_BZERO:
6227 target = expand_builtin_bzero (exp);
6228 if (target)
6229 return target;
6230 break;
6231
6232 case BUILT_IN_STRCMP:
6233 target = expand_builtin_strcmp (exp, target);
6234 if (target)
6235 return target;
6236 break;
6237
6238 case BUILT_IN_STRNCMP:
6239 target = expand_builtin_strncmp (exp, target, mode);
6240 if (target)
6241 return target;
6242 break;
6243
6244 case BUILT_IN_BCMP:
6245 case BUILT_IN_MEMCMP:
6246 target = expand_builtin_memcmp (exp, target);
6247 if (target)
6248 return target;
6249 break;
6250
6251 case BUILT_IN_SETJMP:
6252 /* This should have been lowered to the builtins below. */
6253 gcc_unreachable ();
6254
6255 case BUILT_IN_SETJMP_SETUP:
6256 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6257 and the receiver label. */
6258 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6259 {
6260 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6261 VOIDmode, EXPAND_NORMAL);
6262 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6263 rtx_insn *label_r = label_rtx (label);
6264
6265 /* This is copied from the handling of non-local gotos. */
6266 expand_builtin_setjmp_setup (buf_addr, label_r);
6267 nonlocal_goto_handler_labels
6268 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6269 nonlocal_goto_handler_labels);
6270 /* ??? Do not let expand_label treat us as such since we would
6271 not want to be both on the list of non-local labels and on
6272 the list of forced labels. */
6273 FORCED_LABEL (label) = 0;
6274 return const0_rtx;
6275 }
6276 break;
6277
6278 case BUILT_IN_SETJMP_RECEIVER:
6279 /* __builtin_setjmp_receiver is passed the receiver label. */
6280 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6281 {
6282 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6283 rtx_insn *label_r = label_rtx (label);
6284
6285 expand_builtin_setjmp_receiver (label_r);
6286 return const0_rtx;
6287 }
6288 break;
6289
6290 /* __builtin_longjmp is passed a pointer to an array of five words.
6291 It's similar to the C library longjmp function but works with
6292 __builtin_setjmp above. */
6293 case BUILT_IN_LONGJMP:
6294 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6295 {
6296 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6297 VOIDmode, EXPAND_NORMAL);
6298 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6299
6300 if (value != const1_rtx)
6301 {
6302 error ("%<__builtin_longjmp%> second argument must be 1");
6303 return const0_rtx;
6304 }
6305
6306 expand_builtin_longjmp (buf_addr, value);
6307 return const0_rtx;
6308 }
6309 break;
6310
6311 case BUILT_IN_NONLOCAL_GOTO:
6312 target = expand_builtin_nonlocal_goto (exp);
6313 if (target)
6314 return target;
6315 break;
6316
6317 /* This updates the setjmp buffer that is its argument with the value
6318 of the current stack pointer. */
6319 case BUILT_IN_UPDATE_SETJMP_BUF:
6320 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6321 {
6322 rtx buf_addr
6323 = expand_normal (CALL_EXPR_ARG (exp, 0));
6324
6325 expand_builtin_update_setjmp_buf (buf_addr);
6326 return const0_rtx;
6327 }
6328 break;
6329
6330 case BUILT_IN_TRAP:
6331 expand_builtin_trap ();
6332 return const0_rtx;
6333
6334 case BUILT_IN_UNREACHABLE:
6335 expand_builtin_unreachable ();
6336 return const0_rtx;
6337
6338 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6339 case BUILT_IN_SIGNBITD32:
6340 case BUILT_IN_SIGNBITD64:
6341 case BUILT_IN_SIGNBITD128:
6342 target = expand_builtin_signbit (exp, target);
6343 if (target)
6344 return target;
6345 break;
6346
6347 /* Various hooks for the DWARF 2 __throw routine. */
6348 case BUILT_IN_UNWIND_INIT:
6349 expand_builtin_unwind_init ();
6350 return const0_rtx;
6351 case BUILT_IN_DWARF_CFA:
6352 return virtual_cfa_rtx;
6353 #ifdef DWARF2_UNWIND_INFO
6354 case BUILT_IN_DWARF_SP_COLUMN:
6355 return expand_builtin_dwarf_sp_column ();
6356 case BUILT_IN_INIT_DWARF_REG_SIZES:
6357 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6358 return const0_rtx;
6359 #endif
6360 case BUILT_IN_FROB_RETURN_ADDR:
6361 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6362 case BUILT_IN_EXTRACT_RETURN_ADDR:
6363 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6364 case BUILT_IN_EH_RETURN:
6365 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6366 CALL_EXPR_ARG (exp, 1));
6367 return const0_rtx;
6368 case BUILT_IN_EH_RETURN_DATA_REGNO:
6369 return expand_builtin_eh_return_data_regno (exp);
6370 case BUILT_IN_EXTEND_POINTER:
6371 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6372 case BUILT_IN_EH_POINTER:
6373 return expand_builtin_eh_pointer (exp);
6374 case BUILT_IN_EH_FILTER:
6375 return expand_builtin_eh_filter (exp);
6376 case BUILT_IN_EH_COPY_VALUES:
6377 return expand_builtin_eh_copy_values (exp);
6378
6379 case BUILT_IN_VA_START:
6380 return expand_builtin_va_start (exp);
6381 case BUILT_IN_VA_END:
6382 return expand_builtin_va_end (exp);
6383 case BUILT_IN_VA_COPY:
6384 return expand_builtin_va_copy (exp);
6385 case BUILT_IN_EXPECT:
6386 return expand_builtin_expect (exp, target);
6387 case BUILT_IN_ASSUME_ALIGNED:
6388 return expand_builtin_assume_aligned (exp, target);
6389 case BUILT_IN_PREFETCH:
6390 expand_builtin_prefetch (exp);
6391 return const0_rtx;
6392
6393 case BUILT_IN_INIT_TRAMPOLINE:
6394 return expand_builtin_init_trampoline (exp, true);
6395 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6396 return expand_builtin_init_trampoline (exp, false);
6397 case BUILT_IN_ADJUST_TRAMPOLINE:
6398 return expand_builtin_adjust_trampoline (exp);
6399
6400 case BUILT_IN_FORK:
6401 case BUILT_IN_EXECL:
6402 case BUILT_IN_EXECV:
6403 case BUILT_IN_EXECLP:
6404 case BUILT_IN_EXECLE:
6405 case BUILT_IN_EXECVP:
6406 case BUILT_IN_EXECVE:
6407 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6408 if (target)
6409 return target;
6410 break;
6411
6412 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6413 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6414 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6415 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6416 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6417 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6418 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6419 if (target)
6420 return target;
6421 break;
6422
6423 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6424 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6425 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6426 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6427 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6428 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6429 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6430 if (target)
6431 return target;
6432 break;
6433
6434 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6435 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6436 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6437 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6438 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6439 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6440 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6441 if (target)
6442 return target;
6443 break;
6444
6445 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6446 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6447 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6448 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6449 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6450 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6451 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6452 if (target)
6453 return target;
6454 break;
6455
6456 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6457 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6458 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6459 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6460 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6461 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6462 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6463 if (target)
6464 return target;
6465 break;
6466
6467 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6468 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6469 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6470 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6471 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6472 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6473 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6474 if (target)
6475 return target;
6476 break;
6477
6478 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6479 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6480 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6481 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6482 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6483 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6484 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6485 if (target)
6486 return target;
6487 break;
6488
6489 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6490 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6491 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6492 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6493 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6494 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6495 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6496 if (target)
6497 return target;
6498 break;
6499
6500 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6501 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6502 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6503 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6504 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6505 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6506 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6507 if (target)
6508 return target;
6509 break;
6510
6511 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6512 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6513 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6514 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6515 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6516 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6517 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6518 if (target)
6519 return target;
6520 break;
6521
6522 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6523 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6524 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6525 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6526 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6527 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6528 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6529 if (target)
6530 return target;
6531 break;
6532
6533 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6534 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6535 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6536 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6537 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6538 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6539 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6540 if (target)
6541 return target;
6542 break;
6543
6544 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6545 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6546 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6547 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6548 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6549 if (mode == VOIDmode)
6550 mode = TYPE_MODE (boolean_type_node);
6551 if (!target || !register_operand (target, mode))
6552 target = gen_reg_rtx (mode);
6553
6554 mode = get_builtin_sync_mode
6555 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6556 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6557 if (target)
6558 return target;
6559 break;
6560
6561 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6562 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6563 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6564 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6565 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6566 mode = get_builtin_sync_mode
6567 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6568 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6569 if (target)
6570 return target;
6571 break;
6572
6573 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6574 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6575 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6576 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6577 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6578 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6579 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6580 if (target)
6581 return target;
6582 break;
6583
6584 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6585 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6586 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6587 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6588 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6589 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6590 expand_builtin_sync_lock_release (mode, exp);
6591 return const0_rtx;
6592
6593 case BUILT_IN_SYNC_SYNCHRONIZE:
6594 expand_builtin_sync_synchronize ();
6595 return const0_rtx;
6596
6597 case BUILT_IN_ATOMIC_EXCHANGE_1:
6598 case BUILT_IN_ATOMIC_EXCHANGE_2:
6599 case BUILT_IN_ATOMIC_EXCHANGE_4:
6600 case BUILT_IN_ATOMIC_EXCHANGE_8:
6601 case BUILT_IN_ATOMIC_EXCHANGE_16:
6602 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6603 target = expand_builtin_atomic_exchange (mode, exp, target);
6604 if (target)
6605 return target;
6606 break;
6607
6608 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6609 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6610 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6611 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6612 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6613 {
6614 unsigned int nargs, z;
6615 vec<tree, va_gc> *vec;
6616
6617 mode =
6618 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6619 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6620 if (target)
6621 return target;
6622
6623 /* If this is turned into an external library call, the weak parameter
6624 must be dropped to match the expected parameter list. */
6625 nargs = call_expr_nargs (exp);
6626 vec_alloc (vec, nargs - 1);
6627 for (z = 0; z < 3; z++)
6628 vec->quick_push (CALL_EXPR_ARG (exp, z));
6629 /* Skip the boolean weak parameter. */
6630 for (z = 4; z < 6; z++)
6631 vec->quick_push (CALL_EXPR_ARG (exp, z));
6632 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6633 break;
6634 }
6635
6636 case BUILT_IN_ATOMIC_LOAD_1:
6637 case BUILT_IN_ATOMIC_LOAD_2:
6638 case BUILT_IN_ATOMIC_LOAD_4:
6639 case BUILT_IN_ATOMIC_LOAD_8:
6640 case BUILT_IN_ATOMIC_LOAD_16:
6641 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6642 target = expand_builtin_atomic_load (mode, exp, target);
6643 if (target)
6644 return target;
6645 break;
6646
6647 case BUILT_IN_ATOMIC_STORE_1:
6648 case BUILT_IN_ATOMIC_STORE_2:
6649 case BUILT_IN_ATOMIC_STORE_4:
6650 case BUILT_IN_ATOMIC_STORE_8:
6651 case BUILT_IN_ATOMIC_STORE_16:
6652 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6653 target = expand_builtin_atomic_store (mode, exp);
6654 if (target)
6655 return const0_rtx;
6656 break;
6657
6658 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6659 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6660 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6661 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6662 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6663 {
6664 enum built_in_function lib;
6665 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6666 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6667 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6668 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6669 ignore, lib);
6670 if (target)
6671 return target;
6672 break;
6673 }
6674 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6675 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6676 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6677 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6678 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6679 {
6680 enum built_in_function lib;
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6682 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6683 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6684 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6685 ignore, lib);
6686 if (target)
6687 return target;
6688 break;
6689 }
6690 case BUILT_IN_ATOMIC_AND_FETCH_1:
6691 case BUILT_IN_ATOMIC_AND_FETCH_2:
6692 case BUILT_IN_ATOMIC_AND_FETCH_4:
6693 case BUILT_IN_ATOMIC_AND_FETCH_8:
6694 case BUILT_IN_ATOMIC_AND_FETCH_16:
6695 {
6696 enum built_in_function lib;
6697 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6698 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6699 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6700 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6701 ignore, lib);
6702 if (target)
6703 return target;
6704 break;
6705 }
6706 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6707 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6708 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6709 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6710 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6711 {
6712 enum built_in_function lib;
6713 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6714 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6715 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6716 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6717 ignore, lib);
6718 if (target)
6719 return target;
6720 break;
6721 }
6722 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6723 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6724 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6725 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6726 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6727 {
6728 enum built_in_function lib;
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6730 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6731 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6732 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6733 ignore, lib);
6734 if (target)
6735 return target;
6736 break;
6737 }
6738 case BUILT_IN_ATOMIC_OR_FETCH_1:
6739 case BUILT_IN_ATOMIC_OR_FETCH_2:
6740 case BUILT_IN_ATOMIC_OR_FETCH_4:
6741 case BUILT_IN_ATOMIC_OR_FETCH_8:
6742 case BUILT_IN_ATOMIC_OR_FETCH_16:
6743 {
6744 enum built_in_function lib;
6745 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6746 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6747 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6748 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6749 ignore, lib);
6750 if (target)
6751 return target;
6752 break;
6753 }
6754 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6755 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6756 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6757 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6758 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6759 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6760 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6761 ignore, BUILT_IN_NONE);
6762 if (target)
6763 return target;
6764 break;
6765
6766 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6767 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6768 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6769 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6770 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6771 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6772 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6773 ignore, BUILT_IN_NONE);
6774 if (target)
6775 return target;
6776 break;
6777
6778 case BUILT_IN_ATOMIC_FETCH_AND_1:
6779 case BUILT_IN_ATOMIC_FETCH_AND_2:
6780 case BUILT_IN_ATOMIC_FETCH_AND_4:
6781 case BUILT_IN_ATOMIC_FETCH_AND_8:
6782 case BUILT_IN_ATOMIC_FETCH_AND_16:
6783 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6784 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6785 ignore, BUILT_IN_NONE);
6786 if (target)
6787 return target;
6788 break;
6789
6790 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6791 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6792 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6793 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6794 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6795 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6796 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6797 ignore, BUILT_IN_NONE);
6798 if (target)
6799 return target;
6800 break;
6801
6802 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6803 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6804 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6805 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6806 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6807 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6808 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6809 ignore, BUILT_IN_NONE);
6810 if (target)
6811 return target;
6812 break;
6813
6814 case BUILT_IN_ATOMIC_FETCH_OR_1:
6815 case BUILT_IN_ATOMIC_FETCH_OR_2:
6816 case BUILT_IN_ATOMIC_FETCH_OR_4:
6817 case BUILT_IN_ATOMIC_FETCH_OR_8:
6818 case BUILT_IN_ATOMIC_FETCH_OR_16:
6819 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6820 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6821 ignore, BUILT_IN_NONE);
6822 if (target)
6823 return target;
6824 break;
6825
6826 case BUILT_IN_ATOMIC_TEST_AND_SET:
6827 return expand_builtin_atomic_test_and_set (exp, target);
6828
6829 case BUILT_IN_ATOMIC_CLEAR:
6830 return expand_builtin_atomic_clear (exp);
6831
6832 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6833 return expand_builtin_atomic_always_lock_free (exp);
6834
6835 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6836 target = expand_builtin_atomic_is_lock_free (exp);
6837 if (target)
6838 return target;
6839 break;
6840
6841 case BUILT_IN_ATOMIC_THREAD_FENCE:
6842 expand_builtin_atomic_thread_fence (exp);
6843 return const0_rtx;
6844
6845 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6846 expand_builtin_atomic_signal_fence (exp);
6847 return const0_rtx;
6848
6849 case BUILT_IN_OBJECT_SIZE:
6850 return expand_builtin_object_size (exp);
6851
6852 case BUILT_IN_MEMCPY_CHK:
6853 case BUILT_IN_MEMPCPY_CHK:
6854 case BUILT_IN_MEMMOVE_CHK:
6855 case BUILT_IN_MEMSET_CHK:
6856 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6857 if (target)
6858 return target;
6859 break;
6860
6861 case BUILT_IN_STRCPY_CHK:
6862 case BUILT_IN_STPCPY_CHK:
6863 case BUILT_IN_STRNCPY_CHK:
6864 case BUILT_IN_STPNCPY_CHK:
6865 case BUILT_IN_STRCAT_CHK:
6866 case BUILT_IN_STRNCAT_CHK:
6867 case BUILT_IN_SNPRINTF_CHK:
6868 case BUILT_IN_VSNPRINTF_CHK:
6869 maybe_emit_chk_warning (exp, fcode);
6870 break;
6871
6872 case BUILT_IN_SPRINTF_CHK:
6873 case BUILT_IN_VSPRINTF_CHK:
6874 maybe_emit_sprintf_chk_warning (exp, fcode);
6875 break;
6876
6877 case BUILT_IN_FREE:
6878 if (warn_free_nonheap_object)
6879 maybe_emit_free_warning (exp);
6880 break;
6881
6882 case BUILT_IN_THREAD_POINTER:
6883 return expand_builtin_thread_pointer (exp, target);
6884
6885 case BUILT_IN_SET_THREAD_POINTER:
6886 expand_builtin_set_thread_pointer (exp);
6887 return const0_rtx;
6888
6889 case BUILT_IN_CILK_DETACH:
6890 expand_builtin_cilk_detach (exp);
6891 return const0_rtx;
6892
6893 case BUILT_IN_CILK_POP_FRAME:
6894 expand_builtin_cilk_pop_frame (exp);
6895 return const0_rtx;
6896
6897 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6898 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6899 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6900 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6901 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6902 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6903 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6904 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6905 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6906 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6907 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6908 /* We allow user CHKP builtins if Pointer Bounds
6909 Checker is off. */
6910 if (!chkp_function_instrumented_p (current_function_decl))
6911 {
6912 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6913 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6914 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6915 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6916 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6917 return expand_normal (CALL_EXPR_ARG (exp, 0));
6918 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6919 return expand_normal (size_zero_node);
6920 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6921 return expand_normal (size_int (-1));
6922 else
6923 return const0_rtx;
6924 }
6925 /* FALLTHROUGH */
6926
6927 case BUILT_IN_CHKP_BNDMK:
6928 case BUILT_IN_CHKP_BNDSTX:
6929 case BUILT_IN_CHKP_BNDCL:
6930 case BUILT_IN_CHKP_BNDCU:
6931 case BUILT_IN_CHKP_BNDLDX:
6932 case BUILT_IN_CHKP_BNDRET:
6933 case BUILT_IN_CHKP_INTERSECT:
6934 case BUILT_IN_CHKP_NARROW:
6935 case BUILT_IN_CHKP_EXTRACT_LOWER:
6936 case BUILT_IN_CHKP_EXTRACT_UPPER:
6937 /* Software implementation of Pointer Bounds Checker is NYI.
6938 Target support is required. */
6939 error ("Your target platform does not support -fcheck-pointer-bounds");
6940 break;
6941
6942 case BUILT_IN_ACC_ON_DEVICE:
6943 /* Do library call, if we failed to expand the builtin when
6944 folding. */
6945 break;
6946
6947 default: /* just do library call, if unknown builtin */
6948 break;
6949 }
6950
6951 /* The switch statement above can drop through to cause the function
6952 to be called normally. */
6953 return expand_call (exp, target, ignore);
6954 }
6955
6956 /* Similar to expand_builtin but is used for instrumented calls. */
6957
6958 rtx
6959 expand_builtin_with_bounds (tree exp, rtx target,
6960 rtx subtarget ATTRIBUTE_UNUSED,
6961 machine_mode mode, int ignore)
6962 {
6963 tree fndecl = get_callee_fndecl (exp);
6964 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6965
6966 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6967
6968 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6969 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6970
6971 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6972 && fcode < END_CHKP_BUILTINS);
6973
6974 switch (fcode)
6975 {
6976 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6977 target = expand_builtin_memcpy_with_bounds (exp, target);
6978 if (target)
6979 return target;
6980 break;
6981
6982 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6983 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6984 if (target)
6985 return target;
6986 break;
6987
6988 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6989 target = expand_builtin_memset_with_bounds (exp, target, mode);
6990 if (target)
6991 return target;
6992 break;
6993
6994 default:
6995 break;
6996 }
6997
6998 /* The switch statement above can drop through to cause the function
6999 to be called normally. */
7000 return expand_call (exp, target, ignore);
7001 }
7002
7003 /* Determine whether a tree node represents a call to a built-in
7004 function. If the tree T is a call to a built-in function with
7005 the right number of arguments of the appropriate types, return
7006 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7007 Otherwise the return value is END_BUILTINS. */
7008
7009 enum built_in_function
7010 builtin_mathfn_code (const_tree t)
7011 {
7012 const_tree fndecl, arg, parmlist;
7013 const_tree argtype, parmtype;
7014 const_call_expr_arg_iterator iter;
7015
7016 if (TREE_CODE (t) != CALL_EXPR
7017 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7018 return END_BUILTINS;
7019
7020 fndecl = get_callee_fndecl (t);
7021 if (fndecl == NULL_TREE
7022 || TREE_CODE (fndecl) != FUNCTION_DECL
7023 || ! DECL_BUILT_IN (fndecl)
7024 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7025 return END_BUILTINS;
7026
7027 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7028 init_const_call_expr_arg_iterator (t, &iter);
7029 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7030 {
7031 /* If a function doesn't take a variable number of arguments,
7032 the last element in the list will have type `void'. */
7033 parmtype = TREE_VALUE (parmlist);
7034 if (VOID_TYPE_P (parmtype))
7035 {
7036 if (more_const_call_expr_args_p (&iter))
7037 return END_BUILTINS;
7038 return DECL_FUNCTION_CODE (fndecl);
7039 }
7040
7041 if (! more_const_call_expr_args_p (&iter))
7042 return END_BUILTINS;
7043
7044 arg = next_const_call_expr_arg (&iter);
7045 argtype = TREE_TYPE (arg);
7046
7047 if (SCALAR_FLOAT_TYPE_P (parmtype))
7048 {
7049 if (! SCALAR_FLOAT_TYPE_P (argtype))
7050 return END_BUILTINS;
7051 }
7052 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7053 {
7054 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7055 return END_BUILTINS;
7056 }
7057 else if (POINTER_TYPE_P (parmtype))
7058 {
7059 if (! POINTER_TYPE_P (argtype))
7060 return END_BUILTINS;
7061 }
7062 else if (INTEGRAL_TYPE_P (parmtype))
7063 {
7064 if (! INTEGRAL_TYPE_P (argtype))
7065 return END_BUILTINS;
7066 }
7067 else
7068 return END_BUILTINS;
7069 }
7070
7071 /* Variable-length argument list. */
7072 return DECL_FUNCTION_CODE (fndecl);
7073 }
7074
7075 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7076 evaluate to a constant. */
7077
7078 static tree
7079 fold_builtin_constant_p (tree arg)
7080 {
7081 /* We return 1 for a numeric type that's known to be a constant
7082 value at compile-time or for an aggregate type that's a
7083 literal constant. */
7084 STRIP_NOPS (arg);
7085
7086 /* If we know this is a constant, emit the constant of one. */
7087 if (CONSTANT_CLASS_P (arg)
7088 || (TREE_CODE (arg) == CONSTRUCTOR
7089 && TREE_CONSTANT (arg)))
7090 return integer_one_node;
7091 if (TREE_CODE (arg) == ADDR_EXPR)
7092 {
7093 tree op = TREE_OPERAND (arg, 0);
7094 if (TREE_CODE (op) == STRING_CST
7095 || (TREE_CODE (op) == ARRAY_REF
7096 && integer_zerop (TREE_OPERAND (op, 1))
7097 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7098 return integer_one_node;
7099 }
7100
7101 /* If this expression has side effects, show we don't know it to be a
7102 constant. Likewise if it's a pointer or aggregate type since in
7103 those case we only want literals, since those are only optimized
7104 when generating RTL, not later.
7105 And finally, if we are compiling an initializer, not code, we
7106 need to return a definite result now; there's not going to be any
7107 more optimization done. */
7108 if (TREE_SIDE_EFFECTS (arg)
7109 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7110 || POINTER_TYPE_P (TREE_TYPE (arg))
7111 || cfun == 0
7112 || folding_initializer
7113 || force_folding_builtin_constant_p)
7114 return integer_zero_node;
7115
7116 return NULL_TREE;
7117 }
7118
7119 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7120 return it as a truthvalue. */
7121
7122 static tree
7123 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7124 tree predictor)
7125 {
7126 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7127
7128 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7129 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7130 ret_type = TREE_TYPE (TREE_TYPE (fn));
7131 pred_type = TREE_VALUE (arg_types);
7132 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7133
7134 pred = fold_convert_loc (loc, pred_type, pred);
7135 expected = fold_convert_loc (loc, expected_type, expected);
7136 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7137 predictor);
7138
7139 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7140 build_int_cst (ret_type, 0));
7141 }
7142
7143 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7144 NULL_TREE if no simplification is possible. */
7145
7146 tree
7147 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7148 {
7149 tree inner, fndecl, inner_arg0;
7150 enum tree_code code;
7151
7152 /* Distribute the expected value over short-circuiting operators.
7153 See through the cast from truthvalue_type_node to long. */
7154 inner_arg0 = arg0;
7155 while (CONVERT_EXPR_P (inner_arg0)
7156 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7157 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7158 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7159
7160 /* If this is a builtin_expect within a builtin_expect keep the
7161 inner one. See through a comparison against a constant. It
7162 might have been added to create a thruthvalue. */
7163 inner = inner_arg0;
7164
7165 if (COMPARISON_CLASS_P (inner)
7166 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7167 inner = TREE_OPERAND (inner, 0);
7168
7169 if (TREE_CODE (inner) == CALL_EXPR
7170 && (fndecl = get_callee_fndecl (inner))
7171 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7172 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7173 return arg0;
7174
7175 inner = inner_arg0;
7176 code = TREE_CODE (inner);
7177 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7178 {
7179 tree op0 = TREE_OPERAND (inner, 0);
7180 tree op1 = TREE_OPERAND (inner, 1);
7181
7182 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7183 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7184 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7185
7186 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7187 }
7188
7189 /* If the argument isn't invariant then there's nothing else we can do. */
7190 if (!TREE_CONSTANT (inner_arg0))
7191 return NULL_TREE;
7192
7193 /* If we expect that a comparison against the argument will fold to
7194 a constant return the constant. In practice, this means a true
7195 constant or the address of a non-weak symbol. */
7196 inner = inner_arg0;
7197 STRIP_NOPS (inner);
7198 if (TREE_CODE (inner) == ADDR_EXPR)
7199 {
7200 do
7201 {
7202 inner = TREE_OPERAND (inner, 0);
7203 }
7204 while (TREE_CODE (inner) == COMPONENT_REF
7205 || TREE_CODE (inner) == ARRAY_REF);
7206 if ((TREE_CODE (inner) == VAR_DECL
7207 || TREE_CODE (inner) == FUNCTION_DECL)
7208 && DECL_WEAK (inner))
7209 return NULL_TREE;
7210 }
7211
7212 /* Otherwise, ARG0 already has the proper type for the return value. */
7213 return arg0;
7214 }
7215
7216 /* Fold a call to __builtin_classify_type with argument ARG. */
7217
7218 static tree
7219 fold_builtin_classify_type (tree arg)
7220 {
7221 if (arg == 0)
7222 return build_int_cst (integer_type_node, no_type_class);
7223
7224 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7225 }
7226
7227 /* Fold a call to __builtin_strlen with argument ARG. */
7228
7229 static tree
7230 fold_builtin_strlen (location_t loc, tree type, tree arg)
7231 {
7232 if (!validate_arg (arg, POINTER_TYPE))
7233 return NULL_TREE;
7234 else
7235 {
7236 tree len = c_strlen (arg, 0);
7237
7238 if (len)
7239 return fold_convert_loc (loc, type, len);
7240
7241 return NULL_TREE;
7242 }
7243 }
7244
7245 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7246
7247 static tree
7248 fold_builtin_inf (location_t loc, tree type, int warn)
7249 {
7250 REAL_VALUE_TYPE real;
7251
7252 /* __builtin_inff is intended to be usable to define INFINITY on all
7253 targets. If an infinity is not available, INFINITY expands "to a
7254 positive constant of type float that overflows at translation
7255 time", footnote "In this case, using INFINITY will violate the
7256 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7257 Thus we pedwarn to ensure this constraint violation is
7258 diagnosed. */
7259 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7260 pedwarn (loc, 0, "target format does not support infinity");
7261
7262 real_inf (&real);
7263 return build_real (type, real);
7264 }
7265
7266 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7267 NULL_TREE if no simplification can be made. */
7268
7269 static tree
7270 fold_builtin_sincos (location_t loc,
7271 tree arg0, tree arg1, tree arg2)
7272 {
7273 tree type;
7274 tree fndecl, call = NULL_TREE;
7275
7276 if (!validate_arg (arg0, REAL_TYPE)
7277 || !validate_arg (arg1, POINTER_TYPE)
7278 || !validate_arg (arg2, POINTER_TYPE))
7279 return NULL_TREE;
7280
7281 type = TREE_TYPE (arg0);
7282
7283 /* Calculate the result when the argument is a constant. */
7284 built_in_function fn = mathfn_built_in_2 (type, BUILT_IN_CEXPI);
7285 if (fn == END_BUILTINS)
7286 return NULL_TREE;
7287
7288 /* Canonicalize sincos to cexpi. */
7289 if (TREE_CODE (arg0) == REAL_CST)
7290 {
7291 tree complex_type = build_complex_type (type);
7292 call = fold_const_call (fn, complex_type, arg0);
7293 }
7294 if (!call)
7295 {
7296 if (!targetm.libc_has_function (function_c99_math_complex)
7297 || !builtin_decl_implicit_p (fn))
7298 return NULL_TREE;
7299 fndecl = builtin_decl_explicit (fn);
7300 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7301 call = builtin_save_expr (call);
7302 }
7303
7304 return build2 (COMPOUND_EXPR, void_type_node,
7305 build2 (MODIFY_EXPR, void_type_node,
7306 build_fold_indirect_ref_loc (loc, arg1),
7307 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7308 build2 (MODIFY_EXPR, void_type_node,
7309 build_fold_indirect_ref_loc (loc, arg2),
7310 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7311 }
7312
7313 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7314 arguments to the call, and TYPE is its return type.
7315 Return NULL_TREE if no simplification can be made. */
7316
7317 static tree
7318 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7319 {
7320 if (!validate_arg (arg1, POINTER_TYPE)
7321 || !validate_arg (arg2, INTEGER_TYPE)
7322 || !validate_arg (len, INTEGER_TYPE))
7323 return NULL_TREE;
7324 else
7325 {
7326 const char *p1;
7327
7328 if (TREE_CODE (arg2) != INTEGER_CST
7329 || !tree_fits_uhwi_p (len))
7330 return NULL_TREE;
7331
7332 p1 = c_getstr (arg1);
7333 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7334 {
7335 char c;
7336 const char *r;
7337 tree tem;
7338
7339 if (target_char_cast (arg2, &c))
7340 return NULL_TREE;
7341
7342 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7343
7344 if (r == NULL)
7345 return build_int_cst (TREE_TYPE (arg1), 0);
7346
7347 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7348 return fold_convert_loc (loc, type, tem);
7349 }
7350 return NULL_TREE;
7351 }
7352 }
7353
7354 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7355 Return NULL_TREE if no simplification can be made. */
7356
7357 static tree
7358 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7359 {
7360 if (!validate_arg (arg1, POINTER_TYPE)
7361 || !validate_arg (arg2, POINTER_TYPE)
7362 || !validate_arg (len, INTEGER_TYPE))
7363 return NULL_TREE;
7364
7365 /* If the LEN parameter is zero, return zero. */
7366 if (integer_zerop (len))
7367 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7368 arg1, arg2);
7369
7370 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7371 if (operand_equal_p (arg1, arg2, 0))
7372 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7373
7374 /* If len parameter is one, return an expression corresponding to
7375 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7376 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7377 {
7378 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7379 tree cst_uchar_ptr_node
7380 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7381
7382 tree ind1
7383 = fold_convert_loc (loc, integer_type_node,
7384 build1 (INDIRECT_REF, cst_uchar_node,
7385 fold_convert_loc (loc,
7386 cst_uchar_ptr_node,
7387 arg1)));
7388 tree ind2
7389 = fold_convert_loc (loc, integer_type_node,
7390 build1 (INDIRECT_REF, cst_uchar_node,
7391 fold_convert_loc (loc,
7392 cst_uchar_ptr_node,
7393 arg2)));
7394 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7395 }
7396
7397 return NULL_TREE;
7398 }
7399
7400 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7401 Return NULL_TREE if no simplification can be made. */
7402
7403 static tree
7404 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7405 {
7406 if (!validate_arg (arg1, POINTER_TYPE)
7407 || !validate_arg (arg2, POINTER_TYPE))
7408 return NULL_TREE;
7409
7410 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7411 if (operand_equal_p (arg1, arg2, 0))
7412 return integer_zero_node;
7413
7414 /* If the second arg is "", return *(const unsigned char*)arg1. */
7415 const char *p2 = c_getstr (arg2);
7416 if (p2 && *p2 == '\0')
7417 {
7418 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7419 tree cst_uchar_ptr_node
7420 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7421
7422 return fold_convert_loc (loc, integer_type_node,
7423 build1 (INDIRECT_REF, cst_uchar_node,
7424 fold_convert_loc (loc,
7425 cst_uchar_ptr_node,
7426 arg1)));
7427 }
7428
7429 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7430 const char *p1 = c_getstr (arg1);
7431 if (p1 && *p1 == '\0')
7432 {
7433 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7434 tree cst_uchar_ptr_node
7435 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7436
7437 tree temp
7438 = fold_convert_loc (loc, integer_type_node,
7439 build1 (INDIRECT_REF, cst_uchar_node,
7440 fold_convert_loc (loc,
7441 cst_uchar_ptr_node,
7442 arg2)));
7443 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7444 }
7445
7446 return NULL_TREE;
7447 }
7448
7449 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7450 Return NULL_TREE if no simplification can be made. */
7451
7452 static tree
7453 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7454 {
7455 if (!validate_arg (arg1, POINTER_TYPE)
7456 || !validate_arg (arg2, POINTER_TYPE)
7457 || !validate_arg (len, INTEGER_TYPE))
7458 return NULL_TREE;
7459
7460 /* If the LEN parameter is zero, return zero. */
7461 if (integer_zerop (len))
7462 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7463 arg1, arg2);
7464
7465 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7466 if (operand_equal_p (arg1, arg2, 0))
7467 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7468
7469 /* If the second arg is "", and the length is greater than zero,
7470 return *(const unsigned char*)arg1. */
7471 const char *p2 = c_getstr (arg2);
7472 if (p2 && *p2 == '\0'
7473 && TREE_CODE (len) == INTEGER_CST
7474 && tree_int_cst_sgn (len) == 1)
7475 {
7476 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7477 tree cst_uchar_ptr_node
7478 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7479
7480 return fold_convert_loc (loc, integer_type_node,
7481 build1 (INDIRECT_REF, cst_uchar_node,
7482 fold_convert_loc (loc,
7483 cst_uchar_ptr_node,
7484 arg1)));
7485 }
7486
7487 /* If the first arg is "", and the length is greater than zero,
7488 return -*(const unsigned char*)arg2. */
7489 const char *p1 = c_getstr (arg1);
7490 if (p1 && *p1 == '\0'
7491 && TREE_CODE (len) == INTEGER_CST
7492 && tree_int_cst_sgn (len) == 1)
7493 {
7494 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7495 tree cst_uchar_ptr_node
7496 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7497
7498 tree temp = fold_convert_loc (loc, integer_type_node,
7499 build1 (INDIRECT_REF, cst_uchar_node,
7500 fold_convert_loc (loc,
7501 cst_uchar_ptr_node,
7502 arg2)));
7503 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7504 }
7505
7506 /* If len parameter is one, return an expression corresponding to
7507 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7508 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7509 {
7510 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7511 tree cst_uchar_ptr_node
7512 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7513
7514 tree ind1 = fold_convert_loc (loc, integer_type_node,
7515 build1 (INDIRECT_REF, cst_uchar_node,
7516 fold_convert_loc (loc,
7517 cst_uchar_ptr_node,
7518 arg1)));
7519 tree ind2 = fold_convert_loc (loc, integer_type_node,
7520 build1 (INDIRECT_REF, cst_uchar_node,
7521 fold_convert_loc (loc,
7522 cst_uchar_ptr_node,
7523 arg2)));
7524 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7525 }
7526
7527 return NULL_TREE;
7528 }
7529
7530 /* Fold a call to builtin isascii with argument ARG. */
7531
7532 static tree
7533 fold_builtin_isascii (location_t loc, tree arg)
7534 {
7535 if (!validate_arg (arg, INTEGER_TYPE))
7536 return NULL_TREE;
7537 else
7538 {
7539 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7540 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7541 build_int_cst (integer_type_node,
7542 ~ (unsigned HOST_WIDE_INT) 0x7f));
7543 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7544 arg, integer_zero_node);
7545 }
7546 }
7547
7548 /* Fold a call to builtin toascii with argument ARG. */
7549
7550 static tree
7551 fold_builtin_toascii (location_t loc, tree arg)
7552 {
7553 if (!validate_arg (arg, INTEGER_TYPE))
7554 return NULL_TREE;
7555
7556 /* Transform toascii(c) -> (c & 0x7f). */
7557 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7558 build_int_cst (integer_type_node, 0x7f));
7559 }
7560
7561 /* Fold a call to builtin isdigit with argument ARG. */
7562
7563 static tree
7564 fold_builtin_isdigit (location_t loc, tree arg)
7565 {
7566 if (!validate_arg (arg, INTEGER_TYPE))
7567 return NULL_TREE;
7568 else
7569 {
7570 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7571 /* According to the C standard, isdigit is unaffected by locale.
7572 However, it definitely is affected by the target character set. */
7573 unsigned HOST_WIDE_INT target_digit0
7574 = lang_hooks.to_target_charset ('0');
7575
7576 if (target_digit0 == 0)
7577 return NULL_TREE;
7578
7579 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7580 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7581 build_int_cst (unsigned_type_node, target_digit0));
7582 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7583 build_int_cst (unsigned_type_node, 9));
7584 }
7585 }
7586
7587 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7588
7589 static tree
7590 fold_builtin_fabs (location_t loc, tree arg, tree type)
7591 {
7592 if (!validate_arg (arg, REAL_TYPE))
7593 return NULL_TREE;
7594
7595 arg = fold_convert_loc (loc, type, arg);
7596 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7597 }
7598
7599 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7600
7601 static tree
7602 fold_builtin_abs (location_t loc, tree arg, tree type)
7603 {
7604 if (!validate_arg (arg, INTEGER_TYPE))
7605 return NULL_TREE;
7606
7607 arg = fold_convert_loc (loc, type, arg);
7608 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7609 }
7610
7611 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7612
7613 static tree
7614 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7615 {
7616 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7617 if (validate_arg (arg0, REAL_TYPE)
7618 && validate_arg (arg1, REAL_TYPE)
7619 && validate_arg (arg2, REAL_TYPE)
7620 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7621 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7622
7623 return NULL_TREE;
7624 }
7625
7626 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7627
7628 static tree
7629 fold_builtin_carg (location_t loc, tree arg, tree type)
7630 {
7631 if (validate_arg (arg, COMPLEX_TYPE)
7632 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7633 {
7634 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7635
7636 if (atan2_fn)
7637 {
7638 tree new_arg = builtin_save_expr (arg);
7639 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7640 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7641 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7642 }
7643 }
7644
7645 return NULL_TREE;
7646 }
7647
7648 /* Fold a call to builtin frexp, we can assume the base is 2. */
7649
7650 static tree
7651 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7652 {
7653 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7654 return NULL_TREE;
7655
7656 STRIP_NOPS (arg0);
7657
7658 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7659 return NULL_TREE;
7660
7661 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7662
7663 /* Proceed if a valid pointer type was passed in. */
7664 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7665 {
7666 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7667 tree frac, exp;
7668
7669 switch (value->cl)
7670 {
7671 case rvc_zero:
7672 /* For +-0, return (*exp = 0, +-0). */
7673 exp = integer_zero_node;
7674 frac = arg0;
7675 break;
7676 case rvc_nan:
7677 case rvc_inf:
7678 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7679 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7680 case rvc_normal:
7681 {
7682 /* Since the frexp function always expects base 2, and in
7683 GCC normalized significands are already in the range
7684 [0.5, 1.0), we have exactly what frexp wants. */
7685 REAL_VALUE_TYPE frac_rvt = *value;
7686 SET_REAL_EXP (&frac_rvt, 0);
7687 frac = build_real (rettype, frac_rvt);
7688 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7689 }
7690 break;
7691 default:
7692 gcc_unreachable ();
7693 }
7694
7695 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7696 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7697 TREE_SIDE_EFFECTS (arg1) = 1;
7698 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7699 }
7700
7701 return NULL_TREE;
7702 }
7703
7704 /* Fold a call to builtin modf. */
7705
7706 static tree
7707 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7708 {
7709 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7710 return NULL_TREE;
7711
7712 STRIP_NOPS (arg0);
7713
7714 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7715 return NULL_TREE;
7716
7717 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7718
7719 /* Proceed if a valid pointer type was passed in. */
7720 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7721 {
7722 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7723 REAL_VALUE_TYPE trunc, frac;
7724
7725 switch (value->cl)
7726 {
7727 case rvc_nan:
7728 case rvc_zero:
7729 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7730 trunc = frac = *value;
7731 break;
7732 case rvc_inf:
7733 /* For +-Inf, return (*arg1 = arg0, +-0). */
7734 frac = dconst0;
7735 frac.sign = value->sign;
7736 trunc = *value;
7737 break;
7738 case rvc_normal:
7739 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7740 real_trunc (&trunc, VOIDmode, value);
7741 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7742 /* If the original number was negative and already
7743 integral, then the fractional part is -0.0. */
7744 if (value->sign && frac.cl == rvc_zero)
7745 frac.sign = value->sign;
7746 break;
7747 }
7748
7749 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7750 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7751 build_real (rettype, trunc));
7752 TREE_SIDE_EFFECTS (arg1) = 1;
7753 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7754 build_real (rettype, frac));
7755 }
7756
7757 return NULL_TREE;
7758 }
7759
7760 /* Given a location LOC, an interclass builtin function decl FNDECL
7761 and its single argument ARG, return an folded expression computing
7762 the same, or NULL_TREE if we either couldn't or didn't want to fold
7763 (the latter happen if there's an RTL instruction available). */
7764
7765 static tree
7766 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7767 {
7768 machine_mode mode;
7769
7770 if (!validate_arg (arg, REAL_TYPE))
7771 return NULL_TREE;
7772
7773 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7774 return NULL_TREE;
7775
7776 mode = TYPE_MODE (TREE_TYPE (arg));
7777
7778 /* If there is no optab, try generic code. */
7779 switch (DECL_FUNCTION_CODE (fndecl))
7780 {
7781 tree result;
7782
7783 CASE_FLT_FN (BUILT_IN_ISINF):
7784 {
7785 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7786 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7787 tree const type = TREE_TYPE (arg);
7788 REAL_VALUE_TYPE r;
7789 char buf[128];
7790
7791 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7792 real_from_string (&r, buf);
7793 result = build_call_expr (isgr_fn, 2,
7794 fold_build1_loc (loc, ABS_EXPR, type, arg),
7795 build_real (type, r));
7796 return result;
7797 }
7798 CASE_FLT_FN (BUILT_IN_FINITE):
7799 case BUILT_IN_ISFINITE:
7800 {
7801 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7802 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7803 tree const type = TREE_TYPE (arg);
7804 REAL_VALUE_TYPE r;
7805 char buf[128];
7806
7807 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7808 real_from_string (&r, buf);
7809 result = build_call_expr (isle_fn, 2,
7810 fold_build1_loc (loc, ABS_EXPR, type, arg),
7811 build_real (type, r));
7812 /*result = fold_build2_loc (loc, UNGT_EXPR,
7813 TREE_TYPE (TREE_TYPE (fndecl)),
7814 fold_build1_loc (loc, ABS_EXPR, type, arg),
7815 build_real (type, r));
7816 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7817 TREE_TYPE (TREE_TYPE (fndecl)),
7818 result);*/
7819 return result;
7820 }
7821 case BUILT_IN_ISNORMAL:
7822 {
7823 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7824 islessequal(fabs(x),DBL_MAX). */
7825 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7826 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7827 tree const type = TREE_TYPE (arg);
7828 REAL_VALUE_TYPE rmax, rmin;
7829 char buf[128];
7830
7831 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7832 real_from_string (&rmax, buf);
7833 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7834 real_from_string (&rmin, buf);
7835 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7836 result = build_call_expr (isle_fn, 2, arg,
7837 build_real (type, rmax));
7838 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
7839 build_call_expr (isge_fn, 2, arg,
7840 build_real (type, rmin)));
7841 return result;
7842 }
7843 default:
7844 break;
7845 }
7846
7847 return NULL_TREE;
7848 }
7849
7850 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7851 ARG is the argument for the call. */
7852
7853 static tree
7854 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7855 {
7856 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7857
7858 if (!validate_arg (arg, REAL_TYPE))
7859 return NULL_TREE;
7860
7861 switch (builtin_index)
7862 {
7863 case BUILT_IN_ISINF:
7864 if (!HONOR_INFINITIES (arg))
7865 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7866
7867 return NULL_TREE;
7868
7869 case BUILT_IN_ISINF_SIGN:
7870 {
7871 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7872 /* In a boolean context, GCC will fold the inner COND_EXPR to
7873 1. So e.g. "if (isinf_sign(x))" would be folded to just
7874 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7875 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
7876 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7877 tree tmp = NULL_TREE;
7878
7879 arg = builtin_save_expr (arg);
7880
7881 if (signbit_fn && isinf_fn)
7882 {
7883 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7884 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7885
7886 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7887 signbit_call, integer_zero_node);
7888 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7889 isinf_call, integer_zero_node);
7890
7891 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7892 integer_minus_one_node, integer_one_node);
7893 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7894 isinf_call, tmp,
7895 integer_zero_node);
7896 }
7897
7898 return tmp;
7899 }
7900
7901 case BUILT_IN_ISFINITE:
7902 if (!HONOR_NANS (arg)
7903 && !HONOR_INFINITIES (arg))
7904 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7905
7906 return NULL_TREE;
7907
7908 case BUILT_IN_ISNAN:
7909 if (!HONOR_NANS (arg))
7910 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7911
7912 arg = builtin_save_expr (arg);
7913 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7914
7915 default:
7916 gcc_unreachable ();
7917 }
7918 }
7919
7920 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7921 This builtin will generate code to return the appropriate floating
7922 point classification depending on the value of the floating point
7923 number passed in. The possible return values must be supplied as
7924 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7925 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7926 one floating point argument which is "type generic". */
7927
7928 static tree
7929 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7930 {
7931 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7932 arg, type, res, tmp;
7933 machine_mode mode;
7934 REAL_VALUE_TYPE r;
7935 char buf[128];
7936
7937 /* Verify the required arguments in the original call. */
7938 if (nargs != 6
7939 || !validate_arg (args[0], INTEGER_TYPE)
7940 || !validate_arg (args[1], INTEGER_TYPE)
7941 || !validate_arg (args[2], INTEGER_TYPE)
7942 || !validate_arg (args[3], INTEGER_TYPE)
7943 || !validate_arg (args[4], INTEGER_TYPE)
7944 || !validate_arg (args[5], REAL_TYPE))
7945 return NULL_TREE;
7946
7947 fp_nan = args[0];
7948 fp_infinite = args[1];
7949 fp_normal = args[2];
7950 fp_subnormal = args[3];
7951 fp_zero = args[4];
7952 arg = args[5];
7953 type = TREE_TYPE (arg);
7954 mode = TYPE_MODE (type);
7955 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7956
7957 /* fpclassify(x) ->
7958 isnan(x) ? FP_NAN :
7959 (fabs(x) == Inf ? FP_INFINITE :
7960 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7961 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7962
7963 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7964 build_real (type, dconst0));
7965 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7966 tmp, fp_zero, fp_subnormal);
7967
7968 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7969 real_from_string (&r, buf);
7970 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7971 arg, build_real (type, r));
7972 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7973
7974 if (HONOR_INFINITIES (mode))
7975 {
7976 real_inf (&r);
7977 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7978 build_real (type, r));
7979 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7980 fp_infinite, res);
7981 }
7982
7983 if (HONOR_NANS (mode))
7984 {
7985 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7986 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7987 }
7988
7989 return res;
7990 }
7991
7992 /* Fold a call to an unordered comparison function such as
7993 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
7994 being called and ARG0 and ARG1 are the arguments for the call.
7995 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7996 the opposite of the desired result. UNORDERED_CODE is used
7997 for modes that can hold NaNs and ORDERED_CODE is used for
7998 the rest. */
7999
8000 static tree
8001 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8002 enum tree_code unordered_code,
8003 enum tree_code ordered_code)
8004 {
8005 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8006 enum tree_code code;
8007 tree type0, type1;
8008 enum tree_code code0, code1;
8009 tree cmp_type = NULL_TREE;
8010
8011 type0 = TREE_TYPE (arg0);
8012 type1 = TREE_TYPE (arg1);
8013
8014 code0 = TREE_CODE (type0);
8015 code1 = TREE_CODE (type1);
8016
8017 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8018 /* Choose the wider of two real types. */
8019 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8020 ? type0 : type1;
8021 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8022 cmp_type = type0;
8023 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8024 cmp_type = type1;
8025
8026 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8027 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8028
8029 if (unordered_code == UNORDERED_EXPR)
8030 {
8031 if (!HONOR_NANS (arg0))
8032 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8033 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8034 }
8035
8036 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8037 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8038 fold_build2_loc (loc, code, type, arg0, arg1));
8039 }
8040
8041 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8042 arithmetics if it can never overflow, or into internal functions that
8043 return both result of arithmetics and overflowed boolean flag in
8044 a complex integer result, or some other check for overflow. */
8045
8046 static tree
8047 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8048 tree arg0, tree arg1, tree arg2)
8049 {
8050 enum internal_fn ifn = IFN_LAST;
8051 tree type = TREE_TYPE (TREE_TYPE (arg2));
8052 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8053 switch (fcode)
8054 {
8055 case BUILT_IN_ADD_OVERFLOW:
8056 case BUILT_IN_SADD_OVERFLOW:
8057 case BUILT_IN_SADDL_OVERFLOW:
8058 case BUILT_IN_SADDLL_OVERFLOW:
8059 case BUILT_IN_UADD_OVERFLOW:
8060 case BUILT_IN_UADDL_OVERFLOW:
8061 case BUILT_IN_UADDLL_OVERFLOW:
8062 ifn = IFN_ADD_OVERFLOW;
8063 break;
8064 case BUILT_IN_SUB_OVERFLOW:
8065 case BUILT_IN_SSUB_OVERFLOW:
8066 case BUILT_IN_SSUBL_OVERFLOW:
8067 case BUILT_IN_SSUBLL_OVERFLOW:
8068 case BUILT_IN_USUB_OVERFLOW:
8069 case BUILT_IN_USUBL_OVERFLOW:
8070 case BUILT_IN_USUBLL_OVERFLOW:
8071 ifn = IFN_SUB_OVERFLOW;
8072 break;
8073 case BUILT_IN_MUL_OVERFLOW:
8074 case BUILT_IN_SMUL_OVERFLOW:
8075 case BUILT_IN_SMULL_OVERFLOW:
8076 case BUILT_IN_SMULLL_OVERFLOW:
8077 case BUILT_IN_UMUL_OVERFLOW:
8078 case BUILT_IN_UMULL_OVERFLOW:
8079 case BUILT_IN_UMULLL_OVERFLOW:
8080 ifn = IFN_MUL_OVERFLOW;
8081 break;
8082 default:
8083 gcc_unreachable ();
8084 }
8085 tree ctype = build_complex_type (type);
8086 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8087 2, arg0, arg1);
8088 tree tgt = save_expr (call);
8089 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8090 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8091 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8092 tree store
8093 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8094 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8095 }
8096
8097 /* Fold a call to built-in function FNDECL with 0 arguments.
8098 This function returns NULL_TREE if no simplification was possible. */
8099
8100 static tree
8101 fold_builtin_0 (location_t loc, tree fndecl)
8102 {
8103 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8104 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8105 switch (fcode)
8106 {
8107 CASE_FLT_FN (BUILT_IN_INF):
8108 case BUILT_IN_INFD32:
8109 case BUILT_IN_INFD64:
8110 case BUILT_IN_INFD128:
8111 return fold_builtin_inf (loc, type, true);
8112
8113 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8114 return fold_builtin_inf (loc, type, false);
8115
8116 case BUILT_IN_CLASSIFY_TYPE:
8117 return fold_builtin_classify_type (NULL_TREE);
8118
8119 default:
8120 break;
8121 }
8122 return NULL_TREE;
8123 }
8124
8125 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8126 This function returns NULL_TREE if no simplification was possible. */
8127
8128 static tree
8129 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8130 {
8131 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8132 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8133
8134 if (TREE_CODE (arg0) == ERROR_MARK)
8135 return NULL_TREE;
8136
8137 if (tree ret = fold_const_call (fcode, type, arg0))
8138 return ret;
8139
8140 switch (fcode)
8141 {
8142 case BUILT_IN_CONSTANT_P:
8143 {
8144 tree val = fold_builtin_constant_p (arg0);
8145
8146 /* Gimplification will pull the CALL_EXPR for the builtin out of
8147 an if condition. When not optimizing, we'll not CSE it back.
8148 To avoid link error types of regressions, return false now. */
8149 if (!val && !optimize)
8150 val = integer_zero_node;
8151
8152 return val;
8153 }
8154
8155 case BUILT_IN_CLASSIFY_TYPE:
8156 return fold_builtin_classify_type (arg0);
8157
8158 case BUILT_IN_STRLEN:
8159 return fold_builtin_strlen (loc, type, arg0);
8160
8161 CASE_FLT_FN (BUILT_IN_FABS):
8162 case BUILT_IN_FABSD32:
8163 case BUILT_IN_FABSD64:
8164 case BUILT_IN_FABSD128:
8165 return fold_builtin_fabs (loc, arg0, type);
8166
8167 case BUILT_IN_ABS:
8168 case BUILT_IN_LABS:
8169 case BUILT_IN_LLABS:
8170 case BUILT_IN_IMAXABS:
8171 return fold_builtin_abs (loc, arg0, type);
8172
8173 CASE_FLT_FN (BUILT_IN_CONJ):
8174 if (validate_arg (arg0, COMPLEX_TYPE)
8175 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8176 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8177 break;
8178
8179 CASE_FLT_FN (BUILT_IN_CREAL):
8180 if (validate_arg (arg0, COMPLEX_TYPE)
8181 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8182 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8183 break;
8184
8185 CASE_FLT_FN (BUILT_IN_CIMAG):
8186 if (validate_arg (arg0, COMPLEX_TYPE)
8187 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8188 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8189 break;
8190
8191 CASE_FLT_FN (BUILT_IN_CARG):
8192 return fold_builtin_carg (loc, arg0, type);
8193
8194 case BUILT_IN_ISASCII:
8195 return fold_builtin_isascii (loc, arg0);
8196
8197 case BUILT_IN_TOASCII:
8198 return fold_builtin_toascii (loc, arg0);
8199
8200 case BUILT_IN_ISDIGIT:
8201 return fold_builtin_isdigit (loc, arg0);
8202
8203 CASE_FLT_FN (BUILT_IN_FINITE):
8204 case BUILT_IN_FINITED32:
8205 case BUILT_IN_FINITED64:
8206 case BUILT_IN_FINITED128:
8207 case BUILT_IN_ISFINITE:
8208 {
8209 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8210 if (ret)
8211 return ret;
8212 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8213 }
8214
8215 CASE_FLT_FN (BUILT_IN_ISINF):
8216 case BUILT_IN_ISINFD32:
8217 case BUILT_IN_ISINFD64:
8218 case BUILT_IN_ISINFD128:
8219 {
8220 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8221 if (ret)
8222 return ret;
8223 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8224 }
8225
8226 case BUILT_IN_ISNORMAL:
8227 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8228
8229 case BUILT_IN_ISINF_SIGN:
8230 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8231
8232 CASE_FLT_FN (BUILT_IN_ISNAN):
8233 case BUILT_IN_ISNAND32:
8234 case BUILT_IN_ISNAND64:
8235 case BUILT_IN_ISNAND128:
8236 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8237
8238 case BUILT_IN_FREE:
8239 if (integer_zerop (arg0))
8240 return build_empty_stmt (loc);
8241 break;
8242
8243 default:
8244 break;
8245 }
8246
8247 return NULL_TREE;
8248
8249 }
8250
8251 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8252 This function returns NULL_TREE if no simplification was possible. */
8253
8254 static tree
8255 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8256 {
8257 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8258 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8259
8260 if (TREE_CODE (arg0) == ERROR_MARK
8261 || TREE_CODE (arg1) == ERROR_MARK)
8262 return NULL_TREE;
8263
8264 if (tree ret = fold_const_call (fcode, type, arg0, arg1))
8265 return ret;
8266
8267 switch (fcode)
8268 {
8269 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8270 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8271 if (validate_arg (arg0, REAL_TYPE)
8272 && validate_arg (arg1, POINTER_TYPE))
8273 return do_mpfr_lgamma_r (arg0, arg1, type);
8274 break;
8275
8276 CASE_FLT_FN (BUILT_IN_FREXP):
8277 return fold_builtin_frexp (loc, arg0, arg1, type);
8278
8279 CASE_FLT_FN (BUILT_IN_MODF):
8280 return fold_builtin_modf (loc, arg0, arg1, type);
8281
8282 case BUILT_IN_STRSTR:
8283 return fold_builtin_strstr (loc, arg0, arg1, type);
8284
8285 case BUILT_IN_STRSPN:
8286 return fold_builtin_strspn (loc, arg0, arg1);
8287
8288 case BUILT_IN_STRCSPN:
8289 return fold_builtin_strcspn (loc, arg0, arg1);
8290
8291 case BUILT_IN_STRCHR:
8292 case BUILT_IN_INDEX:
8293 return fold_builtin_strchr (loc, arg0, arg1, type);
8294
8295 case BUILT_IN_STRRCHR:
8296 case BUILT_IN_RINDEX:
8297 return fold_builtin_strrchr (loc, arg0, arg1, type);
8298
8299 case BUILT_IN_STRCMP:
8300 return fold_builtin_strcmp (loc, arg0, arg1);
8301
8302 case BUILT_IN_STRPBRK:
8303 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8304
8305 case BUILT_IN_EXPECT:
8306 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8307
8308 case BUILT_IN_ISGREATER:
8309 return fold_builtin_unordered_cmp (loc, fndecl,
8310 arg0, arg1, UNLE_EXPR, LE_EXPR);
8311 case BUILT_IN_ISGREATEREQUAL:
8312 return fold_builtin_unordered_cmp (loc, fndecl,
8313 arg0, arg1, UNLT_EXPR, LT_EXPR);
8314 case BUILT_IN_ISLESS:
8315 return fold_builtin_unordered_cmp (loc, fndecl,
8316 arg0, arg1, UNGE_EXPR, GE_EXPR);
8317 case BUILT_IN_ISLESSEQUAL:
8318 return fold_builtin_unordered_cmp (loc, fndecl,
8319 arg0, arg1, UNGT_EXPR, GT_EXPR);
8320 case BUILT_IN_ISLESSGREATER:
8321 return fold_builtin_unordered_cmp (loc, fndecl,
8322 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8323 case BUILT_IN_ISUNORDERED:
8324 return fold_builtin_unordered_cmp (loc, fndecl,
8325 arg0, arg1, UNORDERED_EXPR,
8326 NOP_EXPR);
8327
8328 /* We do the folding for va_start in the expander. */
8329 case BUILT_IN_VA_START:
8330 break;
8331
8332 case BUILT_IN_OBJECT_SIZE:
8333 return fold_builtin_object_size (arg0, arg1);
8334
8335 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8336 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8337
8338 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8339 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8340
8341 default:
8342 break;
8343 }
8344 return NULL_TREE;
8345 }
8346
8347 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8348 and ARG2.
8349 This function returns NULL_TREE if no simplification was possible. */
8350
8351 static tree
8352 fold_builtin_3 (location_t loc, tree fndecl,
8353 tree arg0, tree arg1, tree arg2)
8354 {
8355 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8356 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8357
8358 if (TREE_CODE (arg0) == ERROR_MARK
8359 || TREE_CODE (arg1) == ERROR_MARK
8360 || TREE_CODE (arg2) == ERROR_MARK)
8361 return NULL_TREE;
8362
8363 if (tree ret = fold_const_call (fcode, type, arg0, arg1, arg2))
8364 return ret;
8365
8366 switch (fcode)
8367 {
8368
8369 CASE_FLT_FN (BUILT_IN_SINCOS):
8370 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8371
8372 CASE_FLT_FN (BUILT_IN_FMA):
8373 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8374
8375 CASE_FLT_FN (BUILT_IN_REMQUO):
8376 if (validate_arg (arg0, REAL_TYPE)
8377 && validate_arg (arg1, REAL_TYPE)
8378 && validate_arg (arg2, POINTER_TYPE))
8379 return do_mpfr_remquo (arg0, arg1, arg2);
8380 break;
8381
8382 case BUILT_IN_STRNCMP:
8383 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8384
8385 case BUILT_IN_MEMCHR:
8386 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8387
8388 case BUILT_IN_BCMP:
8389 case BUILT_IN_MEMCMP:
8390 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8391
8392 case BUILT_IN_EXPECT:
8393 return fold_builtin_expect (loc, arg0, arg1, arg2);
8394
8395 case BUILT_IN_ADD_OVERFLOW:
8396 case BUILT_IN_SUB_OVERFLOW:
8397 case BUILT_IN_MUL_OVERFLOW:
8398 case BUILT_IN_SADD_OVERFLOW:
8399 case BUILT_IN_SADDL_OVERFLOW:
8400 case BUILT_IN_SADDLL_OVERFLOW:
8401 case BUILT_IN_SSUB_OVERFLOW:
8402 case BUILT_IN_SSUBL_OVERFLOW:
8403 case BUILT_IN_SSUBLL_OVERFLOW:
8404 case BUILT_IN_SMUL_OVERFLOW:
8405 case BUILT_IN_SMULL_OVERFLOW:
8406 case BUILT_IN_SMULLL_OVERFLOW:
8407 case BUILT_IN_UADD_OVERFLOW:
8408 case BUILT_IN_UADDL_OVERFLOW:
8409 case BUILT_IN_UADDLL_OVERFLOW:
8410 case BUILT_IN_USUB_OVERFLOW:
8411 case BUILT_IN_USUBL_OVERFLOW:
8412 case BUILT_IN_USUBLL_OVERFLOW:
8413 case BUILT_IN_UMUL_OVERFLOW:
8414 case BUILT_IN_UMULL_OVERFLOW:
8415 case BUILT_IN_UMULLL_OVERFLOW:
8416 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8417
8418 default:
8419 break;
8420 }
8421 return NULL_TREE;
8422 }
8423
8424 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8425 arguments. IGNORE is true if the result of the
8426 function call is ignored. This function returns NULL_TREE if no
8427 simplification was possible. */
8428
8429 tree
8430 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8431 {
8432 tree ret = NULL_TREE;
8433
8434 switch (nargs)
8435 {
8436 case 0:
8437 ret = fold_builtin_0 (loc, fndecl);
8438 break;
8439 case 1:
8440 ret = fold_builtin_1 (loc, fndecl, args[0]);
8441 break;
8442 case 2:
8443 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8444 break;
8445 case 3:
8446 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8447 break;
8448 default:
8449 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8450 break;
8451 }
8452 if (ret)
8453 {
8454 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8455 SET_EXPR_LOCATION (ret, loc);
8456 TREE_NO_WARNING (ret) = 1;
8457 return ret;
8458 }
8459 return NULL_TREE;
8460 }
8461
8462 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8463 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8464 of arguments in ARGS to be omitted. OLDNARGS is the number of
8465 elements in ARGS. */
8466
8467 static tree
8468 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8469 int skip, tree fndecl, int n, va_list newargs)
8470 {
8471 int nargs = oldnargs - skip + n;
8472 tree *buffer;
8473
8474 if (n > 0)
8475 {
8476 int i, j;
8477
8478 buffer = XALLOCAVEC (tree, nargs);
8479 for (i = 0; i < n; i++)
8480 buffer[i] = va_arg (newargs, tree);
8481 for (j = skip; j < oldnargs; j++, i++)
8482 buffer[i] = args[j];
8483 }
8484 else
8485 buffer = args + skip;
8486
8487 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8488 }
8489
8490 /* Return true if FNDECL shouldn't be folded right now.
8491 If a built-in function has an inline attribute always_inline
8492 wrapper, defer folding it after always_inline functions have
8493 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8494 might not be performed. */
8495
8496 bool
8497 avoid_folding_inline_builtin (tree fndecl)
8498 {
8499 return (DECL_DECLARED_INLINE_P (fndecl)
8500 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8501 && cfun
8502 && !cfun->always_inline_functions_inlined
8503 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8504 }
8505
8506 /* A wrapper function for builtin folding that prevents warnings for
8507 "statement without effect" and the like, caused by removing the
8508 call node earlier than the warning is generated. */
8509
8510 tree
8511 fold_call_expr (location_t loc, tree exp, bool ignore)
8512 {
8513 tree ret = NULL_TREE;
8514 tree fndecl = get_callee_fndecl (exp);
8515 if (fndecl
8516 && TREE_CODE (fndecl) == FUNCTION_DECL
8517 && DECL_BUILT_IN (fndecl)
8518 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8519 yet. Defer folding until we see all the arguments
8520 (after inlining). */
8521 && !CALL_EXPR_VA_ARG_PACK (exp))
8522 {
8523 int nargs = call_expr_nargs (exp);
8524
8525 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8526 instead last argument is __builtin_va_arg_pack (). Defer folding
8527 even in that case, until arguments are finalized. */
8528 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8529 {
8530 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8531 if (fndecl2
8532 && TREE_CODE (fndecl2) == FUNCTION_DECL
8533 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8534 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8535 return NULL_TREE;
8536 }
8537
8538 if (avoid_folding_inline_builtin (fndecl))
8539 return NULL_TREE;
8540
8541 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8542 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8543 CALL_EXPR_ARGP (exp), ignore);
8544 else
8545 {
8546 tree *args = CALL_EXPR_ARGP (exp);
8547 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8548 if (ret)
8549 return ret;
8550 }
8551 }
8552 return NULL_TREE;
8553 }
8554
8555 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8556 N arguments are passed in the array ARGARRAY. Return a folded
8557 expression or NULL_TREE if no simplification was possible. */
8558
8559 tree
8560 fold_builtin_call_array (location_t loc, tree,
8561 tree fn,
8562 int n,
8563 tree *argarray)
8564 {
8565 if (TREE_CODE (fn) != ADDR_EXPR)
8566 return NULL_TREE;
8567
8568 tree fndecl = TREE_OPERAND (fn, 0);
8569 if (TREE_CODE (fndecl) == FUNCTION_DECL
8570 && DECL_BUILT_IN (fndecl))
8571 {
8572 /* If last argument is __builtin_va_arg_pack (), arguments to this
8573 function are not finalized yet. Defer folding until they are. */
8574 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8575 {
8576 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8577 if (fndecl2
8578 && TREE_CODE (fndecl2) == FUNCTION_DECL
8579 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8580 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8581 return NULL_TREE;
8582 }
8583 if (avoid_folding_inline_builtin (fndecl))
8584 return NULL_TREE;
8585 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8586 return targetm.fold_builtin (fndecl, n, argarray, false);
8587 else
8588 return fold_builtin_n (loc, fndecl, argarray, n, false);
8589 }
8590
8591 return NULL_TREE;
8592 }
8593
8594 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8595 along with N new arguments specified as the "..." parameters. SKIP
8596 is the number of arguments in EXP to be omitted. This function is used
8597 to do varargs-to-varargs transformations. */
8598
8599 static tree
8600 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8601 {
8602 va_list ap;
8603 tree t;
8604
8605 va_start (ap, n);
8606 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8607 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8608 va_end (ap);
8609
8610 return t;
8611 }
8612
8613 /* Validate a single argument ARG against a tree code CODE representing
8614 a type. */
8615
8616 static bool
8617 validate_arg (const_tree arg, enum tree_code code)
8618 {
8619 if (!arg)
8620 return false;
8621 else if (code == POINTER_TYPE)
8622 return POINTER_TYPE_P (TREE_TYPE (arg));
8623 else if (code == INTEGER_TYPE)
8624 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8625 return code == TREE_CODE (TREE_TYPE (arg));
8626 }
8627
8628 /* This function validates the types of a function call argument list
8629 against a specified list of tree_codes. If the last specifier is a 0,
8630 that represents an ellipses, otherwise the last specifier must be a
8631 VOID_TYPE.
8632
8633 This is the GIMPLE version of validate_arglist. Eventually we want to
8634 completely convert builtins.c to work from GIMPLEs and the tree based
8635 validate_arglist will then be removed. */
8636
8637 bool
8638 validate_gimple_arglist (const gcall *call, ...)
8639 {
8640 enum tree_code code;
8641 bool res = 0;
8642 va_list ap;
8643 const_tree arg;
8644 size_t i;
8645
8646 va_start (ap, call);
8647 i = 0;
8648
8649 do
8650 {
8651 code = (enum tree_code) va_arg (ap, int);
8652 switch (code)
8653 {
8654 case 0:
8655 /* This signifies an ellipses, any further arguments are all ok. */
8656 res = true;
8657 goto end;
8658 case VOID_TYPE:
8659 /* This signifies an endlink, if no arguments remain, return
8660 true, otherwise return false. */
8661 res = (i == gimple_call_num_args (call));
8662 goto end;
8663 default:
8664 /* If no parameters remain or the parameter's code does not
8665 match the specified code, return false. Otherwise continue
8666 checking any remaining arguments. */
8667 arg = gimple_call_arg (call, i++);
8668 if (!validate_arg (arg, code))
8669 goto end;
8670 break;
8671 }
8672 }
8673 while (1);
8674
8675 /* We need gotos here since we can only have one VA_CLOSE in a
8676 function. */
8677 end: ;
8678 va_end (ap);
8679
8680 return res;
8681 }
8682
8683 /* Default target-specific builtin expander that does nothing. */
8684
8685 rtx
8686 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8687 rtx target ATTRIBUTE_UNUSED,
8688 rtx subtarget ATTRIBUTE_UNUSED,
8689 machine_mode mode ATTRIBUTE_UNUSED,
8690 int ignore ATTRIBUTE_UNUSED)
8691 {
8692 return NULL_RTX;
8693 }
8694
8695 /* Returns true is EXP represents data that would potentially reside
8696 in a readonly section. */
8697
8698 bool
8699 readonly_data_expr (tree exp)
8700 {
8701 STRIP_NOPS (exp);
8702
8703 if (TREE_CODE (exp) != ADDR_EXPR)
8704 return false;
8705
8706 exp = get_base_address (TREE_OPERAND (exp, 0));
8707 if (!exp)
8708 return false;
8709
8710 /* Make sure we call decl_readonly_section only for trees it
8711 can handle (since it returns true for everything it doesn't
8712 understand). */
8713 if (TREE_CODE (exp) == STRING_CST
8714 || TREE_CODE (exp) == CONSTRUCTOR
8715 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8716 return decl_readonly_section (exp, 0);
8717 else
8718 return false;
8719 }
8720
8721 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8722 to the call, and TYPE is its return type.
8723
8724 Return NULL_TREE if no simplification was possible, otherwise return the
8725 simplified form of the call as a tree.
8726
8727 The simplified form may be a constant or other expression which
8728 computes the same value, but in a more efficient manner (including
8729 calls to other builtin functions).
8730
8731 The call may contain arguments which need to be evaluated, but
8732 which are not useful to determine the result of the call. In
8733 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8734 COMPOUND_EXPR will be an argument which must be evaluated.
8735 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8736 COMPOUND_EXPR in the chain will contain the tree for the simplified
8737 form of the builtin function call. */
8738
8739 static tree
8740 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8741 {
8742 if (!validate_arg (s1, POINTER_TYPE)
8743 || !validate_arg (s2, POINTER_TYPE))
8744 return NULL_TREE;
8745 else
8746 {
8747 tree fn;
8748 const char *p1, *p2;
8749
8750 p2 = c_getstr (s2);
8751 if (p2 == NULL)
8752 return NULL_TREE;
8753
8754 p1 = c_getstr (s1);
8755 if (p1 != NULL)
8756 {
8757 const char *r = strstr (p1, p2);
8758 tree tem;
8759
8760 if (r == NULL)
8761 return build_int_cst (TREE_TYPE (s1), 0);
8762
8763 /* Return an offset into the constant string argument. */
8764 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8765 return fold_convert_loc (loc, type, tem);
8766 }
8767
8768 /* The argument is const char *, and the result is char *, so we need
8769 a type conversion here to avoid a warning. */
8770 if (p2[0] == '\0')
8771 return fold_convert_loc (loc, type, s1);
8772
8773 if (p2[1] != '\0')
8774 return NULL_TREE;
8775
8776 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8777 if (!fn)
8778 return NULL_TREE;
8779
8780 /* New argument list transforming strstr(s1, s2) to
8781 strchr(s1, s2[0]). */
8782 return build_call_expr_loc (loc, fn, 2, s1,
8783 build_int_cst (integer_type_node, p2[0]));
8784 }
8785 }
8786
8787 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8788 the call, and TYPE is its return type.
8789
8790 Return NULL_TREE if no simplification was possible, otherwise return the
8791 simplified form of the call as a tree.
8792
8793 The simplified form may be a constant or other expression which
8794 computes the same value, but in a more efficient manner (including
8795 calls to other builtin functions).
8796
8797 The call may contain arguments which need to be evaluated, but
8798 which are not useful to determine the result of the call. In
8799 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8800 COMPOUND_EXPR will be an argument which must be evaluated.
8801 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8802 COMPOUND_EXPR in the chain will contain the tree for the simplified
8803 form of the builtin function call. */
8804
8805 static tree
8806 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8807 {
8808 if (!validate_arg (s1, POINTER_TYPE)
8809 || !validate_arg (s2, INTEGER_TYPE))
8810 return NULL_TREE;
8811 else
8812 {
8813 const char *p1;
8814
8815 if (TREE_CODE (s2) != INTEGER_CST)
8816 return NULL_TREE;
8817
8818 p1 = c_getstr (s1);
8819 if (p1 != NULL)
8820 {
8821 char c;
8822 const char *r;
8823 tree tem;
8824
8825 if (target_char_cast (s2, &c))
8826 return NULL_TREE;
8827
8828 r = strchr (p1, c);
8829
8830 if (r == NULL)
8831 return build_int_cst (TREE_TYPE (s1), 0);
8832
8833 /* Return an offset into the constant string argument. */
8834 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8835 return fold_convert_loc (loc, type, tem);
8836 }
8837 return NULL_TREE;
8838 }
8839 }
8840
8841 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8842 the call, and TYPE is its return type.
8843
8844 Return NULL_TREE if no simplification was possible, otherwise return the
8845 simplified form of the call as a tree.
8846
8847 The simplified form may be a constant or other expression which
8848 computes the same value, but in a more efficient manner (including
8849 calls to other builtin functions).
8850
8851 The call may contain arguments which need to be evaluated, but
8852 which are not useful to determine the result of the call. In
8853 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8854 COMPOUND_EXPR will be an argument which must be evaluated.
8855 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8856 COMPOUND_EXPR in the chain will contain the tree for the simplified
8857 form of the builtin function call. */
8858
8859 static tree
8860 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8861 {
8862 if (!validate_arg (s1, POINTER_TYPE)
8863 || !validate_arg (s2, INTEGER_TYPE))
8864 return NULL_TREE;
8865 else
8866 {
8867 tree fn;
8868 const char *p1;
8869
8870 if (TREE_CODE (s2) != INTEGER_CST)
8871 return NULL_TREE;
8872
8873 p1 = c_getstr (s1);
8874 if (p1 != NULL)
8875 {
8876 char c;
8877 const char *r;
8878 tree tem;
8879
8880 if (target_char_cast (s2, &c))
8881 return NULL_TREE;
8882
8883 r = strrchr (p1, c);
8884
8885 if (r == NULL)
8886 return build_int_cst (TREE_TYPE (s1), 0);
8887
8888 /* Return an offset into the constant string argument. */
8889 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8890 return fold_convert_loc (loc, type, tem);
8891 }
8892
8893 if (! integer_zerop (s2))
8894 return NULL_TREE;
8895
8896 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8897 if (!fn)
8898 return NULL_TREE;
8899
8900 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
8901 return build_call_expr_loc (loc, fn, 2, s1, s2);
8902 }
8903 }
8904
8905 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8906 to the call, and TYPE is its return type.
8907
8908 Return NULL_TREE if no simplification was possible, otherwise return the
8909 simplified form of the call as a tree.
8910
8911 The simplified form may be a constant or other expression which
8912 computes the same value, but in a more efficient manner (including
8913 calls to other builtin functions).
8914
8915 The call may contain arguments which need to be evaluated, but
8916 which are not useful to determine the result of the call. In
8917 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8918 COMPOUND_EXPR will be an argument which must be evaluated.
8919 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8920 COMPOUND_EXPR in the chain will contain the tree for the simplified
8921 form of the builtin function call. */
8922
8923 static tree
8924 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8925 {
8926 if (!validate_arg (s1, POINTER_TYPE)
8927 || !validate_arg (s2, POINTER_TYPE))
8928 return NULL_TREE;
8929 else
8930 {
8931 tree fn;
8932 const char *p1, *p2;
8933
8934 p2 = c_getstr (s2);
8935 if (p2 == NULL)
8936 return NULL_TREE;
8937
8938 p1 = c_getstr (s1);
8939 if (p1 != NULL)
8940 {
8941 const char *r = strpbrk (p1, p2);
8942 tree tem;
8943
8944 if (r == NULL)
8945 return build_int_cst (TREE_TYPE (s1), 0);
8946
8947 /* Return an offset into the constant string argument. */
8948 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8949 return fold_convert_loc (loc, type, tem);
8950 }
8951
8952 if (p2[0] == '\0')
8953 /* strpbrk(x, "") == NULL.
8954 Evaluate and ignore s1 in case it had side-effects. */
8955 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8956
8957 if (p2[1] != '\0')
8958 return NULL_TREE; /* Really call strpbrk. */
8959
8960 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8961 if (!fn)
8962 return NULL_TREE;
8963
8964 /* New argument list transforming strpbrk(s1, s2) to
8965 strchr(s1, s2[0]). */
8966 return build_call_expr_loc (loc, fn, 2, s1,
8967 build_int_cst (integer_type_node, p2[0]));
8968 }
8969 }
8970
8971 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8972 to the call.
8973
8974 Return NULL_TREE if no simplification was possible, otherwise return the
8975 simplified form of the call as a tree.
8976
8977 The simplified form may be a constant or other expression which
8978 computes the same value, but in a more efficient manner (including
8979 calls to other builtin functions).
8980
8981 The call may contain arguments which need to be evaluated, but
8982 which are not useful to determine the result of the call. In
8983 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8984 COMPOUND_EXPR will be an argument which must be evaluated.
8985 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8986 COMPOUND_EXPR in the chain will contain the tree for the simplified
8987 form of the builtin function call. */
8988
8989 static tree
8990 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8991 {
8992 if (!validate_arg (s1, POINTER_TYPE)
8993 || !validate_arg (s2, POINTER_TYPE))
8994 return NULL_TREE;
8995 else
8996 {
8997 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8998
8999 /* If either argument is "", return NULL_TREE. */
9000 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9001 /* Evaluate and ignore both arguments in case either one has
9002 side-effects. */
9003 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9004 s1, s2);
9005 return NULL_TREE;
9006 }
9007 }
9008
9009 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9010 to the call.
9011
9012 Return NULL_TREE if no simplification was possible, otherwise return the
9013 simplified form of the call as a tree.
9014
9015 The simplified form may be a constant or other expression which
9016 computes the same value, but in a more efficient manner (including
9017 calls to other builtin functions).
9018
9019 The call may contain arguments which need to be evaluated, but
9020 which are not useful to determine the result of the call. In
9021 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9022 COMPOUND_EXPR will be an argument which must be evaluated.
9023 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9024 COMPOUND_EXPR in the chain will contain the tree for the simplified
9025 form of the builtin function call. */
9026
9027 static tree
9028 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9029 {
9030 if (!validate_arg (s1, POINTER_TYPE)
9031 || !validate_arg (s2, POINTER_TYPE))
9032 return NULL_TREE;
9033 else
9034 {
9035 /* If the first argument is "", return NULL_TREE. */
9036 const char *p1 = c_getstr (s1);
9037 if (p1 && *p1 == '\0')
9038 {
9039 /* Evaluate and ignore argument s2 in case it has
9040 side-effects. */
9041 return omit_one_operand_loc (loc, size_type_node,
9042 size_zero_node, s2);
9043 }
9044
9045 /* If the second argument is "", return __builtin_strlen(s1). */
9046 const char *p2 = c_getstr (s2);
9047 if (p2 && *p2 == '\0')
9048 {
9049 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9050
9051 /* If the replacement _DECL isn't initialized, don't do the
9052 transformation. */
9053 if (!fn)
9054 return NULL_TREE;
9055
9056 return build_call_expr_loc (loc, fn, 1, s1);
9057 }
9058 return NULL_TREE;
9059 }
9060 }
9061
9062 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9063 produced. False otherwise. This is done so that we don't output the error
9064 or warning twice or three times. */
9065
9066 bool
9067 fold_builtin_next_arg (tree exp, bool va_start_p)
9068 {
9069 tree fntype = TREE_TYPE (current_function_decl);
9070 int nargs = call_expr_nargs (exp);
9071 tree arg;
9072 /* There is good chance the current input_location points inside the
9073 definition of the va_start macro (perhaps on the token for
9074 builtin) in a system header, so warnings will not be emitted.
9075 Use the location in real source code. */
9076 source_location current_location =
9077 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9078 NULL);
9079
9080 if (!stdarg_p (fntype))
9081 {
9082 error ("%<va_start%> used in function with fixed args");
9083 return true;
9084 }
9085
9086 if (va_start_p)
9087 {
9088 if (va_start_p && (nargs != 2))
9089 {
9090 error ("wrong number of arguments to function %<va_start%>");
9091 return true;
9092 }
9093 arg = CALL_EXPR_ARG (exp, 1);
9094 }
9095 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9096 when we checked the arguments and if needed issued a warning. */
9097 else
9098 {
9099 if (nargs == 0)
9100 {
9101 /* Evidently an out of date version of <stdarg.h>; can't validate
9102 va_start's second argument, but can still work as intended. */
9103 warning_at (current_location,
9104 OPT_Wvarargs,
9105 "%<__builtin_next_arg%> called without an argument");
9106 return true;
9107 }
9108 else if (nargs > 1)
9109 {
9110 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9111 return true;
9112 }
9113 arg = CALL_EXPR_ARG (exp, 0);
9114 }
9115
9116 if (TREE_CODE (arg) == SSA_NAME)
9117 arg = SSA_NAME_VAR (arg);
9118
9119 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9120 or __builtin_next_arg (0) the first time we see it, after checking
9121 the arguments and if needed issuing a warning. */
9122 if (!integer_zerop (arg))
9123 {
9124 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9125
9126 /* Strip off all nops for the sake of the comparison. This
9127 is not quite the same as STRIP_NOPS. It does more.
9128 We must also strip off INDIRECT_EXPR for C++ reference
9129 parameters. */
9130 while (CONVERT_EXPR_P (arg)
9131 || TREE_CODE (arg) == INDIRECT_REF)
9132 arg = TREE_OPERAND (arg, 0);
9133 if (arg != last_parm)
9134 {
9135 /* FIXME: Sometimes with the tree optimizers we can get the
9136 not the last argument even though the user used the last
9137 argument. We just warn and set the arg to be the last
9138 argument so that we will get wrong-code because of
9139 it. */
9140 warning_at (current_location,
9141 OPT_Wvarargs,
9142 "second parameter of %<va_start%> not last named argument");
9143 }
9144
9145 /* Undefined by C99 7.15.1.4p4 (va_start):
9146 "If the parameter parmN is declared with the register storage
9147 class, with a function or array type, or with a type that is
9148 not compatible with the type that results after application of
9149 the default argument promotions, the behavior is undefined."
9150 */
9151 else if (DECL_REGISTER (arg))
9152 {
9153 warning_at (current_location,
9154 OPT_Wvarargs,
9155 "undefined behaviour when second parameter of "
9156 "%<va_start%> is declared with %<register%> storage");
9157 }
9158
9159 /* We want to verify the second parameter just once before the tree
9160 optimizers are run and then avoid keeping it in the tree,
9161 as otherwise we could warn even for correct code like:
9162 void foo (int i, ...)
9163 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9164 if (va_start_p)
9165 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9166 else
9167 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9168 }
9169 return false;
9170 }
9171
9172
9173 /* Expand a call EXP to __builtin_object_size. */
9174
9175 static rtx
9176 expand_builtin_object_size (tree exp)
9177 {
9178 tree ost;
9179 int object_size_type;
9180 tree fndecl = get_callee_fndecl (exp);
9181
9182 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9183 {
9184 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9185 exp, fndecl);
9186 expand_builtin_trap ();
9187 return const0_rtx;
9188 }
9189
9190 ost = CALL_EXPR_ARG (exp, 1);
9191 STRIP_NOPS (ost);
9192
9193 if (TREE_CODE (ost) != INTEGER_CST
9194 || tree_int_cst_sgn (ost) < 0
9195 || compare_tree_int (ost, 3) > 0)
9196 {
9197 error ("%Klast argument of %D is not integer constant between 0 and 3",
9198 exp, fndecl);
9199 expand_builtin_trap ();
9200 return const0_rtx;
9201 }
9202
9203 object_size_type = tree_to_shwi (ost);
9204
9205 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9206 }
9207
9208 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9209 FCODE is the BUILT_IN_* to use.
9210 Return NULL_RTX if we failed; the caller should emit a normal call,
9211 otherwise try to get the result in TARGET, if convenient (and in
9212 mode MODE if that's convenient). */
9213
9214 static rtx
9215 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9216 enum built_in_function fcode)
9217 {
9218 tree dest, src, len, size;
9219
9220 if (!validate_arglist (exp,
9221 POINTER_TYPE,
9222 fcode == BUILT_IN_MEMSET_CHK
9223 ? INTEGER_TYPE : POINTER_TYPE,
9224 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9225 return NULL_RTX;
9226
9227 dest = CALL_EXPR_ARG (exp, 0);
9228 src = CALL_EXPR_ARG (exp, 1);
9229 len = CALL_EXPR_ARG (exp, 2);
9230 size = CALL_EXPR_ARG (exp, 3);
9231
9232 if (! tree_fits_uhwi_p (size))
9233 return NULL_RTX;
9234
9235 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9236 {
9237 tree fn;
9238
9239 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9240 {
9241 warning_at (tree_nonartificial_location (exp),
9242 0, "%Kcall to %D will always overflow destination buffer",
9243 exp, get_callee_fndecl (exp));
9244 return NULL_RTX;
9245 }
9246
9247 fn = NULL_TREE;
9248 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9249 mem{cpy,pcpy,move,set} is available. */
9250 switch (fcode)
9251 {
9252 case BUILT_IN_MEMCPY_CHK:
9253 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9254 break;
9255 case BUILT_IN_MEMPCPY_CHK:
9256 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9257 break;
9258 case BUILT_IN_MEMMOVE_CHK:
9259 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9260 break;
9261 case BUILT_IN_MEMSET_CHK:
9262 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9263 break;
9264 default:
9265 break;
9266 }
9267
9268 if (! fn)
9269 return NULL_RTX;
9270
9271 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9272 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9273 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9274 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9275 }
9276 else if (fcode == BUILT_IN_MEMSET_CHK)
9277 return NULL_RTX;
9278 else
9279 {
9280 unsigned int dest_align = get_pointer_alignment (dest);
9281
9282 /* If DEST is not a pointer type, call the normal function. */
9283 if (dest_align == 0)
9284 return NULL_RTX;
9285
9286 /* If SRC and DEST are the same (and not volatile), do nothing. */
9287 if (operand_equal_p (src, dest, 0))
9288 {
9289 tree expr;
9290
9291 if (fcode != BUILT_IN_MEMPCPY_CHK)
9292 {
9293 /* Evaluate and ignore LEN in case it has side-effects. */
9294 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9295 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9296 }
9297
9298 expr = fold_build_pointer_plus (dest, len);
9299 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9300 }
9301
9302 /* __memmove_chk special case. */
9303 if (fcode == BUILT_IN_MEMMOVE_CHK)
9304 {
9305 unsigned int src_align = get_pointer_alignment (src);
9306
9307 if (src_align == 0)
9308 return NULL_RTX;
9309
9310 /* If src is categorized for a readonly section we can use
9311 normal __memcpy_chk. */
9312 if (readonly_data_expr (src))
9313 {
9314 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9315 if (!fn)
9316 return NULL_RTX;
9317 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9318 dest, src, len, size);
9319 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9320 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9321 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9322 }
9323 }
9324 return NULL_RTX;
9325 }
9326 }
9327
9328 /* Emit warning if a buffer overflow is detected at compile time. */
9329
9330 static void
9331 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9332 {
9333 int is_strlen = 0;
9334 tree len, size;
9335 location_t loc = tree_nonartificial_location (exp);
9336
9337 switch (fcode)
9338 {
9339 case BUILT_IN_STRCPY_CHK:
9340 case BUILT_IN_STPCPY_CHK:
9341 /* For __strcat_chk the warning will be emitted only if overflowing
9342 by at least strlen (dest) + 1 bytes. */
9343 case BUILT_IN_STRCAT_CHK:
9344 len = CALL_EXPR_ARG (exp, 1);
9345 size = CALL_EXPR_ARG (exp, 2);
9346 is_strlen = 1;
9347 break;
9348 case BUILT_IN_STRNCAT_CHK:
9349 case BUILT_IN_STRNCPY_CHK:
9350 case BUILT_IN_STPNCPY_CHK:
9351 len = CALL_EXPR_ARG (exp, 2);
9352 size = CALL_EXPR_ARG (exp, 3);
9353 break;
9354 case BUILT_IN_SNPRINTF_CHK:
9355 case BUILT_IN_VSNPRINTF_CHK:
9356 len = CALL_EXPR_ARG (exp, 1);
9357 size = CALL_EXPR_ARG (exp, 3);
9358 break;
9359 default:
9360 gcc_unreachable ();
9361 }
9362
9363 if (!len || !size)
9364 return;
9365
9366 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9367 return;
9368
9369 if (is_strlen)
9370 {
9371 len = c_strlen (len, 1);
9372 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9373 return;
9374 }
9375 else if (fcode == BUILT_IN_STRNCAT_CHK)
9376 {
9377 tree src = CALL_EXPR_ARG (exp, 1);
9378 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9379 return;
9380 src = c_strlen (src, 1);
9381 if (! src || ! tree_fits_uhwi_p (src))
9382 {
9383 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9384 exp, get_callee_fndecl (exp));
9385 return;
9386 }
9387 else if (tree_int_cst_lt (src, size))
9388 return;
9389 }
9390 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9391 return;
9392
9393 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9394 exp, get_callee_fndecl (exp));
9395 }
9396
9397 /* Emit warning if a buffer overflow is detected at compile time
9398 in __sprintf_chk/__vsprintf_chk calls. */
9399
9400 static void
9401 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9402 {
9403 tree size, len, fmt;
9404 const char *fmt_str;
9405 int nargs = call_expr_nargs (exp);
9406
9407 /* Verify the required arguments in the original call. */
9408
9409 if (nargs < 4)
9410 return;
9411 size = CALL_EXPR_ARG (exp, 2);
9412 fmt = CALL_EXPR_ARG (exp, 3);
9413
9414 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9415 return;
9416
9417 /* Check whether the format is a literal string constant. */
9418 fmt_str = c_getstr (fmt);
9419 if (fmt_str == NULL)
9420 return;
9421
9422 if (!init_target_chars ())
9423 return;
9424
9425 /* If the format doesn't contain % args or %%, we know its size. */
9426 if (strchr (fmt_str, target_percent) == 0)
9427 len = build_int_cstu (size_type_node, strlen (fmt_str));
9428 /* If the format is "%s" and first ... argument is a string literal,
9429 we know it too. */
9430 else if (fcode == BUILT_IN_SPRINTF_CHK
9431 && strcmp (fmt_str, target_percent_s) == 0)
9432 {
9433 tree arg;
9434
9435 if (nargs < 5)
9436 return;
9437 arg = CALL_EXPR_ARG (exp, 4);
9438 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9439 return;
9440
9441 len = c_strlen (arg, 1);
9442 if (!len || ! tree_fits_uhwi_p (len))
9443 return;
9444 }
9445 else
9446 return;
9447
9448 if (! tree_int_cst_lt (len, size))
9449 warning_at (tree_nonartificial_location (exp),
9450 0, "%Kcall to %D will always overflow destination buffer",
9451 exp, get_callee_fndecl (exp));
9452 }
9453
9454 /* Emit warning if a free is called with address of a variable. */
9455
9456 static void
9457 maybe_emit_free_warning (tree exp)
9458 {
9459 tree arg = CALL_EXPR_ARG (exp, 0);
9460
9461 STRIP_NOPS (arg);
9462 if (TREE_CODE (arg) != ADDR_EXPR)
9463 return;
9464
9465 arg = get_base_address (TREE_OPERAND (arg, 0));
9466 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9467 return;
9468
9469 if (SSA_VAR_P (arg))
9470 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9471 "%Kattempt to free a non-heap object %qD", exp, arg);
9472 else
9473 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9474 "%Kattempt to free a non-heap object", exp);
9475 }
9476
9477 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9478 if possible. */
9479
9480 static tree
9481 fold_builtin_object_size (tree ptr, tree ost)
9482 {
9483 unsigned HOST_WIDE_INT bytes;
9484 int object_size_type;
9485
9486 if (!validate_arg (ptr, POINTER_TYPE)
9487 || !validate_arg (ost, INTEGER_TYPE))
9488 return NULL_TREE;
9489
9490 STRIP_NOPS (ost);
9491
9492 if (TREE_CODE (ost) != INTEGER_CST
9493 || tree_int_cst_sgn (ost) < 0
9494 || compare_tree_int (ost, 3) > 0)
9495 return NULL_TREE;
9496
9497 object_size_type = tree_to_shwi (ost);
9498
9499 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9500 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9501 and (size_t) 0 for types 2 and 3. */
9502 if (TREE_SIDE_EFFECTS (ptr))
9503 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9504
9505 if (TREE_CODE (ptr) == ADDR_EXPR)
9506 {
9507 bytes = compute_builtin_object_size (ptr, object_size_type);
9508 if (wi::fits_to_tree_p (bytes, size_type_node))
9509 return build_int_cstu (size_type_node, bytes);
9510 }
9511 else if (TREE_CODE (ptr) == SSA_NAME)
9512 {
9513 /* If object size is not known yet, delay folding until
9514 later. Maybe subsequent passes will help determining
9515 it. */
9516 bytes = compute_builtin_object_size (ptr, object_size_type);
9517 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
9518 && wi::fits_to_tree_p (bytes, size_type_node))
9519 return build_int_cstu (size_type_node, bytes);
9520 }
9521
9522 return NULL_TREE;
9523 }
9524
9525 /* Builtins with folding operations that operate on "..." arguments
9526 need special handling; we need to store the arguments in a convenient
9527 data structure before attempting any folding. Fortunately there are
9528 only a few builtins that fall into this category. FNDECL is the
9529 function, EXP is the CALL_EXPR for the call. */
9530
9531 static tree
9532 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9533 {
9534 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9535 tree ret = NULL_TREE;
9536
9537 switch (fcode)
9538 {
9539 case BUILT_IN_FPCLASSIFY:
9540 ret = fold_builtin_fpclassify (loc, args, nargs);
9541 break;
9542
9543 default:
9544 break;
9545 }
9546 if (ret)
9547 {
9548 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9549 SET_EXPR_LOCATION (ret, loc);
9550 TREE_NO_WARNING (ret) = 1;
9551 return ret;
9552 }
9553 return NULL_TREE;
9554 }
9555
9556 /* Initialize format string characters in the target charset. */
9557
9558 bool
9559 init_target_chars (void)
9560 {
9561 static bool init;
9562 if (!init)
9563 {
9564 target_newline = lang_hooks.to_target_charset ('\n');
9565 target_percent = lang_hooks.to_target_charset ('%');
9566 target_c = lang_hooks.to_target_charset ('c');
9567 target_s = lang_hooks.to_target_charset ('s');
9568 if (target_newline == 0 || target_percent == 0 || target_c == 0
9569 || target_s == 0)
9570 return false;
9571
9572 target_percent_c[0] = target_percent;
9573 target_percent_c[1] = target_c;
9574 target_percent_c[2] = '\0';
9575
9576 target_percent_s[0] = target_percent;
9577 target_percent_s[1] = target_s;
9578 target_percent_s[2] = '\0';
9579
9580 target_percent_s_newline[0] = target_percent;
9581 target_percent_s_newline[1] = target_s;
9582 target_percent_s_newline[2] = target_newline;
9583 target_percent_s_newline[3] = '\0';
9584
9585 init = true;
9586 }
9587 return true;
9588 }
9589
9590 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9591 and no overflow/underflow occurred. INEXACT is true if M was not
9592 exactly calculated. TYPE is the tree type for the result. This
9593 function assumes that you cleared the MPFR flags and then
9594 calculated M to see if anything subsequently set a flag prior to
9595 entering this function. Return NULL_TREE if any checks fail. */
9596
9597 static tree
9598 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9599 {
9600 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9601 overflow/underflow occurred. If -frounding-math, proceed iff the
9602 result of calling FUNC was exact. */
9603 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9604 && (!flag_rounding_math || !inexact))
9605 {
9606 REAL_VALUE_TYPE rr;
9607
9608 real_from_mpfr (&rr, m, type, GMP_RNDN);
9609 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9610 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9611 but the mpft_t is not, then we underflowed in the
9612 conversion. */
9613 if (real_isfinite (&rr)
9614 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9615 {
9616 REAL_VALUE_TYPE rmode;
9617
9618 real_convert (&rmode, TYPE_MODE (type), &rr);
9619 /* Proceed iff the specified mode can hold the value. */
9620 if (real_identical (&rmode, &rr))
9621 return build_real (type, rmode);
9622 }
9623 }
9624 return NULL_TREE;
9625 }
9626
9627 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9628 number and no overflow/underflow occurred. INEXACT is true if M
9629 was not exactly calculated. TYPE is the tree type for the result.
9630 This function assumes that you cleared the MPFR flags and then
9631 calculated M to see if anything subsequently set a flag prior to
9632 entering this function. Return NULL_TREE if any checks fail, if
9633 FORCE_CONVERT is true, then bypass the checks. */
9634
9635 static tree
9636 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9637 {
9638 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9639 overflow/underflow occurred. If -frounding-math, proceed iff the
9640 result of calling FUNC was exact. */
9641 if (force_convert
9642 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9643 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9644 && (!flag_rounding_math || !inexact)))
9645 {
9646 REAL_VALUE_TYPE re, im;
9647
9648 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9649 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9650 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9651 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9652 but the mpft_t is not, then we underflowed in the
9653 conversion. */
9654 if (force_convert
9655 || (real_isfinite (&re) && real_isfinite (&im)
9656 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9657 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9658 {
9659 REAL_VALUE_TYPE re_mode, im_mode;
9660
9661 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9662 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9663 /* Proceed iff the specified mode can hold the value. */
9664 if (force_convert
9665 || (real_identical (&re_mode, &re)
9666 && real_identical (&im_mode, &im)))
9667 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9668 build_real (TREE_TYPE (type), im_mode));
9669 }
9670 }
9671 return NULL_TREE;
9672 }
9673
9674 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9675 the pointer *(ARG_QUO) and return the result. The type is taken
9676 from the type of ARG0 and is used for setting the precision of the
9677 calculation and results. */
9678
9679 static tree
9680 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9681 {
9682 tree const type = TREE_TYPE (arg0);
9683 tree result = NULL_TREE;
9684
9685 STRIP_NOPS (arg0);
9686 STRIP_NOPS (arg1);
9687
9688 /* To proceed, MPFR must exactly represent the target floating point
9689 format, which only happens when the target base equals two. */
9690 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9691 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9692 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9693 {
9694 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9695 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9696
9697 if (real_isfinite (ra0) && real_isfinite (ra1))
9698 {
9699 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9700 const int prec = fmt->p;
9701 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9702 tree result_rem;
9703 long integer_quo;
9704 mpfr_t m0, m1;
9705
9706 mpfr_inits2 (prec, m0, m1, NULL);
9707 mpfr_from_real (m0, ra0, GMP_RNDN);
9708 mpfr_from_real (m1, ra1, GMP_RNDN);
9709 mpfr_clear_flags ();
9710 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9711 /* Remquo is independent of the rounding mode, so pass
9712 inexact=0 to do_mpfr_ckconv(). */
9713 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9714 mpfr_clears (m0, m1, NULL);
9715 if (result_rem)
9716 {
9717 /* MPFR calculates quo in the host's long so it may
9718 return more bits in quo than the target int can hold
9719 if sizeof(host long) > sizeof(target int). This can
9720 happen even for native compilers in LP64 mode. In
9721 these cases, modulo the quo value with the largest
9722 number that the target int can hold while leaving one
9723 bit for the sign. */
9724 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9725 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9726
9727 /* Dereference the quo pointer argument. */
9728 arg_quo = build_fold_indirect_ref (arg_quo);
9729 /* Proceed iff a valid pointer type was passed in. */
9730 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9731 {
9732 /* Set the value. */
9733 tree result_quo
9734 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9735 build_int_cst (TREE_TYPE (arg_quo),
9736 integer_quo));
9737 TREE_SIDE_EFFECTS (result_quo) = 1;
9738 /* Combine the quo assignment with the rem. */
9739 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9740 result_quo, result_rem));
9741 }
9742 }
9743 }
9744 }
9745 return result;
9746 }
9747
9748 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9749 resulting value as a tree with type TYPE. The mpfr precision is
9750 set to the precision of TYPE. We assume that this mpfr function
9751 returns zero if the result could be calculated exactly within the
9752 requested precision. In addition, the integer pointer represented
9753 by ARG_SG will be dereferenced and set to the appropriate signgam
9754 (-1,1) value. */
9755
9756 static tree
9757 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9758 {
9759 tree result = NULL_TREE;
9760
9761 STRIP_NOPS (arg);
9762
9763 /* To proceed, MPFR must exactly represent the target floating point
9764 format, which only happens when the target base equals two. Also
9765 verify ARG is a constant and that ARG_SG is an int pointer. */
9766 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9767 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9768 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9769 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9770 {
9771 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9772
9773 /* In addition to NaN and Inf, the argument cannot be zero or a
9774 negative integer. */
9775 if (real_isfinite (ra)
9776 && ra->cl != rvc_zero
9777 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9778 {
9779 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9780 const int prec = fmt->p;
9781 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9782 int inexact, sg;
9783 mpfr_t m;
9784 tree result_lg;
9785
9786 mpfr_init2 (m, prec);
9787 mpfr_from_real (m, ra, GMP_RNDN);
9788 mpfr_clear_flags ();
9789 inexact = mpfr_lgamma (m, &sg, m, rnd);
9790 result_lg = do_mpfr_ckconv (m, type, inexact);
9791 mpfr_clear (m);
9792 if (result_lg)
9793 {
9794 tree result_sg;
9795
9796 /* Dereference the arg_sg pointer argument. */
9797 arg_sg = build_fold_indirect_ref (arg_sg);
9798 /* Assign the signgam value into *arg_sg. */
9799 result_sg = fold_build2 (MODIFY_EXPR,
9800 TREE_TYPE (arg_sg), arg_sg,
9801 build_int_cst (TREE_TYPE (arg_sg), sg));
9802 TREE_SIDE_EFFECTS (result_sg) = 1;
9803 /* Combine the signgam assignment with the lgamma result. */
9804 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9805 result_sg, result_lg));
9806 }
9807 }
9808 }
9809
9810 return result;
9811 }
9812
9813 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9814 mpc function FUNC on it and return the resulting value as a tree
9815 with type TYPE. The mpfr precision is set to the precision of
9816 TYPE. We assume that function FUNC returns zero if the result
9817 could be calculated exactly within the requested precision. If
9818 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9819 in the arguments and/or results. */
9820
9821 tree
9822 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9823 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9824 {
9825 tree result = NULL_TREE;
9826
9827 STRIP_NOPS (arg0);
9828 STRIP_NOPS (arg1);
9829
9830 /* To proceed, MPFR must exactly represent the target floating point
9831 format, which only happens when the target base equals two. */
9832 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9833 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9834 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9835 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9836 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9837 {
9838 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9839 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9840 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9841 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9842
9843 if (do_nonfinite
9844 || (real_isfinite (re0) && real_isfinite (im0)
9845 && real_isfinite (re1) && real_isfinite (im1)))
9846 {
9847 const struct real_format *const fmt =
9848 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9849 const int prec = fmt->p;
9850 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9851 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9852 int inexact;
9853 mpc_t m0, m1;
9854
9855 mpc_init2 (m0, prec);
9856 mpc_init2 (m1, prec);
9857 mpfr_from_real (mpc_realref (m0), re0, rnd);
9858 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9859 mpfr_from_real (mpc_realref (m1), re1, rnd);
9860 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9861 mpfr_clear_flags ();
9862 inexact = func (m0, m0, m1, crnd);
9863 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9864 mpc_clear (m0);
9865 mpc_clear (m1);
9866 }
9867 }
9868
9869 return result;
9870 }
9871
9872 /* A wrapper function for builtin folding that prevents warnings for
9873 "statement without effect" and the like, caused by removing the
9874 call node earlier than the warning is generated. */
9875
9876 tree
9877 fold_call_stmt (gcall *stmt, bool ignore)
9878 {
9879 tree ret = NULL_TREE;
9880 tree fndecl = gimple_call_fndecl (stmt);
9881 location_t loc = gimple_location (stmt);
9882 if (fndecl
9883 && TREE_CODE (fndecl) == FUNCTION_DECL
9884 && DECL_BUILT_IN (fndecl)
9885 && !gimple_call_va_arg_pack_p (stmt))
9886 {
9887 int nargs = gimple_call_num_args (stmt);
9888 tree *args = (nargs > 0
9889 ? gimple_call_arg_ptr (stmt, 0)
9890 : &error_mark_node);
9891
9892 if (avoid_folding_inline_builtin (fndecl))
9893 return NULL_TREE;
9894 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9895 {
9896 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9897 }
9898 else
9899 {
9900 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9901 if (ret)
9902 {
9903 /* Propagate location information from original call to
9904 expansion of builtin. Otherwise things like
9905 maybe_emit_chk_warning, that operate on the expansion
9906 of a builtin, will use the wrong location information. */
9907 if (gimple_has_location (stmt))
9908 {
9909 tree realret = ret;
9910 if (TREE_CODE (ret) == NOP_EXPR)
9911 realret = TREE_OPERAND (ret, 0);
9912 if (CAN_HAVE_LOCATION_P (realret)
9913 && !EXPR_HAS_LOCATION (realret))
9914 SET_EXPR_LOCATION (realret, loc);
9915 return realret;
9916 }
9917 return ret;
9918 }
9919 }
9920 }
9921 return NULL_TREE;
9922 }
9923
9924 /* Look up the function in builtin_decl that corresponds to DECL
9925 and set ASMSPEC as its user assembler name. DECL must be a
9926 function decl that declares a builtin. */
9927
9928 void
9929 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9930 {
9931 tree builtin;
9932 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9933 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9934 && asmspec != 0);
9935
9936 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9937 set_user_assembler_name (builtin, asmspec);
9938 switch (DECL_FUNCTION_CODE (decl))
9939 {
9940 case BUILT_IN_MEMCPY:
9941 init_block_move_fn (asmspec);
9942 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
9943 break;
9944 case BUILT_IN_MEMSET:
9945 init_block_clear_fn (asmspec);
9946 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
9947 break;
9948 case BUILT_IN_MEMMOVE:
9949 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
9950 break;
9951 case BUILT_IN_MEMCMP:
9952 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
9953 break;
9954 case BUILT_IN_ABORT:
9955 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
9956 break;
9957 case BUILT_IN_FFS:
9958 if (INT_TYPE_SIZE < BITS_PER_WORD)
9959 {
9960 set_user_assembler_libfunc ("ffs", asmspec);
9961 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
9962 MODE_INT, 0), "ffs");
9963 }
9964 break;
9965 default:
9966 break;
9967 }
9968 }
9969
9970 /* Return true if DECL is a builtin that expands to a constant or similarly
9971 simple code. */
9972 bool
9973 is_simple_builtin (tree decl)
9974 {
9975 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9976 switch (DECL_FUNCTION_CODE (decl))
9977 {
9978 /* Builtins that expand to constants. */
9979 case BUILT_IN_CONSTANT_P:
9980 case BUILT_IN_EXPECT:
9981 case BUILT_IN_OBJECT_SIZE:
9982 case BUILT_IN_UNREACHABLE:
9983 /* Simple register moves or loads from stack. */
9984 case BUILT_IN_ASSUME_ALIGNED:
9985 case BUILT_IN_RETURN_ADDRESS:
9986 case BUILT_IN_EXTRACT_RETURN_ADDR:
9987 case BUILT_IN_FROB_RETURN_ADDR:
9988 case BUILT_IN_RETURN:
9989 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9990 case BUILT_IN_FRAME_ADDRESS:
9991 case BUILT_IN_VA_END:
9992 case BUILT_IN_STACK_SAVE:
9993 case BUILT_IN_STACK_RESTORE:
9994 /* Exception state returns or moves registers around. */
9995 case BUILT_IN_EH_FILTER:
9996 case BUILT_IN_EH_POINTER:
9997 case BUILT_IN_EH_COPY_VALUES:
9998 return true;
9999
10000 default:
10001 return false;
10002 }
10003
10004 return false;
10005 }
10006
10007 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10008 most probably expanded inline into reasonably simple code. This is a
10009 superset of is_simple_builtin. */
10010 bool
10011 is_inexpensive_builtin (tree decl)
10012 {
10013 if (!decl)
10014 return false;
10015 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10016 return true;
10017 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10018 switch (DECL_FUNCTION_CODE (decl))
10019 {
10020 case BUILT_IN_ABS:
10021 case BUILT_IN_ALLOCA:
10022 case BUILT_IN_ALLOCA_WITH_ALIGN:
10023 case BUILT_IN_BSWAP16:
10024 case BUILT_IN_BSWAP32:
10025 case BUILT_IN_BSWAP64:
10026 case BUILT_IN_CLZ:
10027 case BUILT_IN_CLZIMAX:
10028 case BUILT_IN_CLZL:
10029 case BUILT_IN_CLZLL:
10030 case BUILT_IN_CTZ:
10031 case BUILT_IN_CTZIMAX:
10032 case BUILT_IN_CTZL:
10033 case BUILT_IN_CTZLL:
10034 case BUILT_IN_FFS:
10035 case BUILT_IN_FFSIMAX:
10036 case BUILT_IN_FFSL:
10037 case BUILT_IN_FFSLL:
10038 case BUILT_IN_IMAXABS:
10039 case BUILT_IN_FINITE:
10040 case BUILT_IN_FINITEF:
10041 case BUILT_IN_FINITEL:
10042 case BUILT_IN_FINITED32:
10043 case BUILT_IN_FINITED64:
10044 case BUILT_IN_FINITED128:
10045 case BUILT_IN_FPCLASSIFY:
10046 case BUILT_IN_ISFINITE:
10047 case BUILT_IN_ISINF_SIGN:
10048 case BUILT_IN_ISINF:
10049 case BUILT_IN_ISINFF:
10050 case BUILT_IN_ISINFL:
10051 case BUILT_IN_ISINFD32:
10052 case BUILT_IN_ISINFD64:
10053 case BUILT_IN_ISINFD128:
10054 case BUILT_IN_ISNAN:
10055 case BUILT_IN_ISNANF:
10056 case BUILT_IN_ISNANL:
10057 case BUILT_IN_ISNAND32:
10058 case BUILT_IN_ISNAND64:
10059 case BUILT_IN_ISNAND128:
10060 case BUILT_IN_ISNORMAL:
10061 case BUILT_IN_ISGREATER:
10062 case BUILT_IN_ISGREATEREQUAL:
10063 case BUILT_IN_ISLESS:
10064 case BUILT_IN_ISLESSEQUAL:
10065 case BUILT_IN_ISLESSGREATER:
10066 case BUILT_IN_ISUNORDERED:
10067 case BUILT_IN_VA_ARG_PACK:
10068 case BUILT_IN_VA_ARG_PACK_LEN:
10069 case BUILT_IN_VA_COPY:
10070 case BUILT_IN_TRAP:
10071 case BUILT_IN_SAVEREGS:
10072 case BUILT_IN_POPCOUNTL:
10073 case BUILT_IN_POPCOUNTLL:
10074 case BUILT_IN_POPCOUNTIMAX:
10075 case BUILT_IN_POPCOUNT:
10076 case BUILT_IN_PARITYL:
10077 case BUILT_IN_PARITYLL:
10078 case BUILT_IN_PARITYIMAX:
10079 case BUILT_IN_PARITY:
10080 case BUILT_IN_LABS:
10081 case BUILT_IN_LLABS:
10082 case BUILT_IN_PREFETCH:
10083 case BUILT_IN_ACC_ON_DEVICE:
10084 return true;
10085
10086 default:
10087 return is_simple_builtin (decl);
10088 }
10089
10090 return false;
10091 }