Move #undef DEF_BUILTIN* to builtins.def
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "fold-const-call.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "tree-object-size.h"
48 #include "realmpfr.h"
49 #include "cfgrtl.h"
50 #include "except.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "libfuncs.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "langhooks.h"
59 #include "value-prof.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "tree-chkp.h"
64 #include "rtl-chkp.h"
65
66
67 struct target_builtins default_target_builtins;
68 #if SWITCHABLE_TARGET
69 struct target_builtins *this_target_builtins = &default_target_builtins;
70 #endif
71
72 /* Define the names of the builtin function types and codes. */
73 const char *const built_in_class_names[BUILT_IN_LAST]
74 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
75
76 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
77 const char * built_in_names[(int) END_BUILTINS] =
78 {
79 #include "builtins.def"
80 };
81
82 /* Setup an array of builtin_info_type, make sure each element decl is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info[(int)END_BUILTINS];
85
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
88
89 static rtx c_readstr (const char *, machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 static rtx result_vector (int, rtx);
95 static void expand_builtin_prefetch (tree);
96 static rtx expand_builtin_apply_args (void);
97 static rtx expand_builtin_apply_args_1 (void);
98 static rtx expand_builtin_apply (rtx, rtx, rtx);
99 static void expand_builtin_return (rtx);
100 static enum type_class type_to_class (tree);
101 static rtx expand_builtin_classify_type (tree);
102 static void expand_errno_check (tree, rtx);
103 static rtx expand_builtin_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
107 static rtx expand_builtin_interclass_mathfn (tree, rtx);
108 static rtx expand_builtin_sincos (tree);
109 static rtx expand_builtin_cexpi (tree, rtx);
110 static rtx expand_builtin_int_roundingfn (tree, rtx);
111 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
112 static rtx expand_builtin_next_arg (void);
113 static rtx expand_builtin_va_start (tree);
114 static rtx expand_builtin_va_end (tree);
115 static rtx expand_builtin_va_copy (tree);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
121 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
122 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
123 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
125 machine_mode, int, tree);
126 static rtx expand_builtin_strcpy (tree, rtx);
127 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
128 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, machine_mode);
132 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
146 static bool validate_arg (const_tree, enum tree_code code);
147 static rtx expand_builtin_fabs (tree, rtx, rtx);
148 static rtx expand_builtin_signbit (tree, rtx);
149 static tree fold_builtin_strchr (location_t, tree, tree, tree);
150 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
152 static tree fold_builtin_strcmp (location_t, tree, tree);
153 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
154 static tree fold_builtin_isascii (location_t, tree);
155 static tree fold_builtin_toascii (location_t, tree);
156 static tree fold_builtin_isdigit (location_t, tree);
157 static tree fold_builtin_fabs (location_t, tree, tree);
158 static tree fold_builtin_abs (location_t, tree, tree);
159 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
160 enum tree_code);
161 static tree fold_builtin_0 (location_t, tree);
162 static tree fold_builtin_1 (location_t, tree, tree);
163 static tree fold_builtin_2 (location_t, tree, tree, tree);
164 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_varargs (location_t, tree, tree*, int);
166
167 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
168 static tree fold_builtin_strstr (location_t, tree, tree, tree);
169 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
170 static tree fold_builtin_strspn (location_t, tree, tree);
171 static tree fold_builtin_strcspn (location_t, tree, tree);
172
173 static rtx expand_builtin_object_size (tree);
174 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
175 enum built_in_function);
176 static void maybe_emit_chk_warning (tree, enum built_in_function);
177 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_free_warning (tree);
179 static tree fold_builtin_object_size (tree, tree);
180
181 unsigned HOST_WIDE_INT target_newline;
182 unsigned HOST_WIDE_INT target_percent;
183 static unsigned HOST_WIDE_INT target_c;
184 static unsigned HOST_WIDE_INT target_s;
185 char target_percent_c[3];
186 char target_percent_s[3];
187 char target_percent_s_newline[4];
188 static tree do_mpfr_remquo (tree, tree, tree);
189 static tree do_mpfr_lgamma_r (tree, tree, tree);
190 static void expand_builtin_sync_synchronize (void);
191
192 /* Return true if NAME starts with __builtin_ or __sync_. */
193
194 static bool
195 is_builtin_name (const char *name)
196 {
197 if (strncmp (name, "__builtin_", 10) == 0)
198 return true;
199 if (strncmp (name, "__sync_", 7) == 0)
200 return true;
201 if (strncmp (name, "__atomic_", 9) == 0)
202 return true;
203 if (flag_cilkplus
204 && (!strcmp (name, "__cilkrts_detach")
205 || !strcmp (name, "__cilkrts_pop_frame")))
206 return true;
207 return false;
208 }
209
210
211 /* Return true if DECL is a function symbol representing a built-in. */
212
213 bool
214 is_builtin_fn (tree decl)
215 {
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
217 }
218
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
222
223 static bool
224 called_as_built_in (tree node)
225 {
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
231 }
232
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
237
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
244
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
247
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
251 {
252 HOST_WIDE_INT bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
258
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
262 &mode, &unsignedp, &volatilep, true);
263
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
267 {
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
274 }
275 else if (TREE_CODE (exp) == LABEL_DECL)
276 ;
277 else if (TREE_CODE (exp) == CONST_DECL)
278 {
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
284
285 known_alignment = true;
286 }
287 else if (DECL_P (exp))
288 {
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
291 }
292 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
293 {
294 align = TYPE_ALIGN (TREE_TYPE (exp));
295 }
296 else if (TREE_CODE (exp) == INDIRECT_REF
297 || TREE_CODE (exp) == MEM_REF
298 || TREE_CODE (exp) == TARGET_MEM_REF)
299 {
300 tree addr = TREE_OPERAND (exp, 0);
301 unsigned ptr_align;
302 unsigned HOST_WIDE_INT ptr_bitpos;
303 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
304
305 /* If the address is explicitely aligned, handle that. */
306 if (TREE_CODE (addr) == BIT_AND_EXPR
307 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
308 {
309 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
310 ptr_bitmask *= BITS_PER_UNIT;
311 align = ptr_bitmask & -ptr_bitmask;
312 addr = TREE_OPERAND (addr, 0);
313 }
314
315 known_alignment
316 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
317 align = MAX (ptr_align, align);
318
319 /* Re-apply explicit alignment to the bitpos. */
320 ptr_bitpos &= ptr_bitmask;
321
322 /* The alignment of the pointer operand in a TARGET_MEM_REF
323 has to take the variable offset parts into account. */
324 if (TREE_CODE (exp) == TARGET_MEM_REF)
325 {
326 if (TMR_INDEX (exp))
327 {
328 unsigned HOST_WIDE_INT step = 1;
329 if (TMR_STEP (exp))
330 step = TREE_INT_CST_LOW (TMR_STEP (exp));
331 align = MIN (align, (step & -step) * BITS_PER_UNIT);
332 }
333 if (TMR_INDEX2 (exp))
334 align = BITS_PER_UNIT;
335 known_alignment = false;
336 }
337
338 /* When EXP is an actual memory reference then we can use
339 TYPE_ALIGN of a pointer indirection to derive alignment.
340 Do so only if get_pointer_alignment_1 did not reveal absolute
341 alignment knowledge and if using that alignment would
342 improve the situation. */
343 if (!addr_p && !known_alignment
344 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
345 align = TYPE_ALIGN (TREE_TYPE (exp));
346 else
347 {
348 /* Else adjust bitpos accordingly. */
349 bitpos += ptr_bitpos;
350 if (TREE_CODE (exp) == MEM_REF
351 || TREE_CODE (exp) == TARGET_MEM_REF)
352 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
353 }
354 }
355 else if (TREE_CODE (exp) == STRING_CST)
356 {
357 /* STRING_CST are the only constant objects we allow to be not
358 wrapped inside a CONST_DECL. */
359 align = TYPE_ALIGN (TREE_TYPE (exp));
360 if (CONSTANT_CLASS_P (exp))
361 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
362
363 known_alignment = true;
364 }
365
366 /* If there is a non-constant offset part extract the maximum
367 alignment that can prevail. */
368 if (offset)
369 {
370 unsigned int trailing_zeros = tree_ctz (offset);
371 if (trailing_zeros < HOST_BITS_PER_INT)
372 {
373 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
374 if (inner)
375 align = MIN (align, inner);
376 }
377 }
378
379 *alignp = align;
380 *bitposp = bitpos & (*alignp - 1);
381 return known_alignment;
382 }
383
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388
389 bool
390 get_object_alignment_1 (tree exp, unsigned int *alignp,
391 unsigned HOST_WIDE_INT *bitposp)
392 {
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
394 }
395
396 /* Return the alignment in bits of EXP, an object. */
397
398 unsigned int
399 get_object_alignment (tree exp)
400 {
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
403
404 get_object_alignment_1 (exp, &align, &bitpos);
405
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
408
409 if (bitpos != 0)
410 align = (bitpos & -bitpos);
411 return align;
412 }
413
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
418
419 If EXP is not a pointer, false is returned too. */
420
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
424 {
425 STRIP_NOPS (exp);
426
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 {
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
439 {
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
442 {
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
446 }
447 }
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
451 }
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 {
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 {
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* We cannot really tell whether this result is an approximation. */
463 return true;
464 }
465 else
466 {
467 *bitposp = 0;
468 *alignp = BITS_PER_UNIT;
469 return false;
470 }
471 }
472 else if (TREE_CODE (exp) == INTEGER_CST)
473 {
474 *alignp = BIGGEST_ALIGNMENT;
475 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 & (BIGGEST_ALIGNMENT - 1));
477 return true;
478 }
479
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT;
482 return false;
483 }
484
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
488
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
491
492 unsigned int
493 get_pointer_alignment (tree exp)
494 {
495 unsigned HOST_WIDE_INT bitpos = 0;
496 unsigned int align;
497
498 get_pointer_alignment_1 (exp, &align, &bitpos);
499
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
502
503 if (bitpos != 0)
504 align = (bitpos & -bitpos);
505
506 return align;
507 }
508
509 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
510 way, because it could contain a zero byte in the middle.
511 TREE_STRING_LENGTH is the size of the character array, not the string.
512
513 ONLY_VALUE should be nonzero if the result is not going to be emitted
514 into the instruction stream and zero if it is going to be expanded.
515 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
516 is returned, otherwise NULL, since
517 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
518 evaluate the side-effects.
519
520 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
521 accesses. Note that this implies the result is not going to be emitted
522 into the instruction stream.
523
524 The value returned is of type `ssizetype'.
525
526 Unfortunately, string_constant can't access the values of const char
527 arrays with initializers, so neither can we do so here. */
528
529 tree
530 c_strlen (tree src, int only_value)
531 {
532 tree offset_node;
533 HOST_WIDE_INT offset;
534 int max;
535 const char *ptr;
536 location_t loc;
537
538 STRIP_NOPS (src);
539 if (TREE_CODE (src) == COND_EXPR
540 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
541 {
542 tree len1, len2;
543
544 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
545 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
546 if (tree_int_cst_equal (len1, len2))
547 return len1;
548 }
549
550 if (TREE_CODE (src) == COMPOUND_EXPR
551 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
552 return c_strlen (TREE_OPERAND (src, 1), only_value);
553
554 loc = EXPR_LOC_OR_LOC (src, input_location);
555
556 src = string_constant (src, &offset_node);
557 if (src == 0)
558 return NULL_TREE;
559
560 max = TREE_STRING_LENGTH (src) - 1;
561 ptr = TREE_STRING_POINTER (src);
562
563 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
564 {
565 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
566 compute the offset to the following null if we don't know where to
567 start searching for it. */
568 int i;
569
570 for (i = 0; i < max; i++)
571 if (ptr[i] == 0)
572 return NULL_TREE;
573
574 /* We don't know the starting offset, but we do know that the string
575 has no internal zero bytes. We can assume that the offset falls
576 within the bounds of the string; otherwise, the programmer deserves
577 what he gets. Subtract the offset from the length of the string,
578 and return that. This would perhaps not be valid if we were dealing
579 with named arrays in addition to literal string constants. */
580
581 return size_diffop_loc (loc, size_int (max), offset_node);
582 }
583
584 /* We have a known offset into the string. Start searching there for
585 a null character if we can represent it as a single HOST_WIDE_INT. */
586 if (offset_node == 0)
587 offset = 0;
588 else if (! tree_fits_shwi_p (offset_node))
589 offset = -1;
590 else
591 offset = tree_to_shwi (offset_node);
592
593 /* If the offset is known to be out of bounds, warn, and call strlen at
594 runtime. */
595 if (offset < 0 || offset > max)
596 {
597 /* Suppress multiple warnings for propagated constant strings. */
598 if (only_value != 2
599 && !TREE_NO_WARNING (src))
600 {
601 warning_at (loc, 0, "offset outside bounds of constant string");
602 TREE_NO_WARNING (src) = 1;
603 }
604 return NULL_TREE;
605 }
606
607 /* Use strlen to search for the first zero byte. Since any strings
608 constructed with build_string will have nulls appended, we win even
609 if we get handed something like (char[4])"abcd".
610
611 Since OFFSET is our starting index into the string, no further
612 calculation is needed. */
613 return ssize_int (strlen (ptr + offset));
614 }
615
616 /* Return a constant integer corresponding to target reading
617 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
618
619 static rtx
620 c_readstr (const char *str, machine_mode mode)
621 {
622 HOST_WIDE_INT ch;
623 unsigned int i, j;
624 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
625
626 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
627 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
628 / HOST_BITS_PER_WIDE_INT;
629
630 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
631 for (i = 0; i < len; i++)
632 tmp[i] = 0;
633
634 ch = 1;
635 for (i = 0; i < GET_MODE_SIZE (mode); i++)
636 {
637 j = i;
638 if (WORDS_BIG_ENDIAN)
639 j = GET_MODE_SIZE (mode) - i - 1;
640 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
641 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
642 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
643 j *= BITS_PER_UNIT;
644
645 if (ch)
646 ch = (unsigned char) str[i];
647 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
648 }
649
650 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
651 return immed_wide_int_const (c, mode);
652 }
653
654 /* Cast a target constant CST to target CHAR and if that value fits into
655 host char type, return zero and put that value into variable pointed to by
656 P. */
657
658 static int
659 target_char_cast (tree cst, char *p)
660 {
661 unsigned HOST_WIDE_INT val, hostval;
662
663 if (TREE_CODE (cst) != INTEGER_CST
664 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
665 return 1;
666
667 /* Do not care if it fits or not right here. */
668 val = TREE_INT_CST_LOW (cst);
669
670 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
671 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
672
673 hostval = val;
674 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
675 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
676
677 if (val != hostval)
678 return 1;
679
680 *p = hostval;
681 return 0;
682 }
683
684 /* Similar to save_expr, but assumes that arbitrary code is not executed
685 in between the multiple evaluations. In particular, we assume that a
686 non-addressable local variable will not be modified. */
687
688 static tree
689 builtin_save_expr (tree exp)
690 {
691 if (TREE_CODE (exp) == SSA_NAME
692 || (TREE_ADDRESSABLE (exp) == 0
693 && (TREE_CODE (exp) == PARM_DECL
694 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
695 return exp;
696
697 return save_expr (exp);
698 }
699
700 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
701 times to get the address of either a higher stack frame, or a return
702 address located within it (depending on FNDECL_CODE). */
703
704 static rtx
705 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
706 {
707 int i;
708 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
709 if (tem == NULL_RTX)
710 {
711 /* For a zero count with __builtin_return_address, we don't care what
712 frame address we return, because target-specific definitions will
713 override us. Therefore frame pointer elimination is OK, and using
714 the soft frame pointer is OK.
715
716 For a nonzero count, or a zero count with __builtin_frame_address,
717 we require a stable offset from the current frame pointer to the
718 previous one, so we must use the hard frame pointer, and
719 we must disable frame pointer elimination. */
720 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
721 tem = frame_pointer_rtx;
722 else
723 {
724 tem = hard_frame_pointer_rtx;
725
726 /* Tell reload not to eliminate the frame pointer. */
727 crtl->accesses_prior_frames = 1;
728 }
729 }
730
731 if (count > 0)
732 SETUP_FRAME_ADDRESSES ();
733
734 /* On the SPARC, the return address is not in the frame, it is in a
735 register. There is no way to access it off of the current frame
736 pointer, but it can be accessed off the previous frame pointer by
737 reading the value from the register window save area. */
738 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
739 count--;
740
741 /* Scan back COUNT frames to the specified frame. */
742 for (i = 0; i < count; i++)
743 {
744 /* Assume the dynamic chain pointer is in the word that the
745 frame address points to, unless otherwise specified. */
746 tem = DYNAMIC_CHAIN_ADDRESS (tem);
747 tem = memory_address (Pmode, tem);
748 tem = gen_frame_mem (Pmode, tem);
749 tem = copy_to_reg (tem);
750 }
751
752 /* For __builtin_frame_address, return what we've got. But, on
753 the SPARC for example, we may have to add a bias. */
754 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
755 return FRAME_ADDR_RTX (tem);
756
757 /* For __builtin_return_address, get the return address from that frame. */
758 #ifdef RETURN_ADDR_RTX
759 tem = RETURN_ADDR_RTX (count, tem);
760 #else
761 tem = memory_address (Pmode,
762 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
763 tem = gen_frame_mem (Pmode, tem);
764 #endif
765 return tem;
766 }
767
768 /* Alias set used for setjmp buffer. */
769 static alias_set_type setjmp_alias_set = -1;
770
771 /* Construct the leading half of a __builtin_setjmp call. Control will
772 return to RECEIVER_LABEL. This is also called directly by the SJLJ
773 exception handling code. */
774
775 void
776 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
777 {
778 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
779 rtx stack_save;
780 rtx mem;
781
782 if (setjmp_alias_set == -1)
783 setjmp_alias_set = new_alias_set ();
784
785 buf_addr = convert_memory_address (Pmode, buf_addr);
786
787 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
788
789 /* We store the frame pointer and the address of receiver_label in
790 the buffer and use the rest of it for the stack save area, which
791 is machine-dependent. */
792
793 mem = gen_rtx_MEM (Pmode, buf_addr);
794 set_mem_alias_set (mem, setjmp_alias_set);
795 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
796
797 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
798 GET_MODE_SIZE (Pmode))),
799 set_mem_alias_set (mem, setjmp_alias_set);
800
801 emit_move_insn (validize_mem (mem),
802 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
803
804 stack_save = gen_rtx_MEM (sa_mode,
805 plus_constant (Pmode, buf_addr,
806 2 * GET_MODE_SIZE (Pmode)));
807 set_mem_alias_set (stack_save, setjmp_alias_set);
808 emit_stack_save (SAVE_NONLOCAL, &stack_save);
809
810 /* If there is further processing to do, do it. */
811 if (targetm.have_builtin_setjmp_setup ())
812 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
813
814 /* We have a nonlocal label. */
815 cfun->has_nonlocal_label = 1;
816 }
817
818 /* Construct the trailing part of a __builtin_setjmp call. This is
819 also called directly by the SJLJ exception handling code.
820 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
821
822 void
823 expand_builtin_setjmp_receiver (rtx receiver_label)
824 {
825 rtx chain;
826
827 /* Mark the FP as used when we get here, so we have to make sure it's
828 marked as used by this function. */
829 emit_use (hard_frame_pointer_rtx);
830
831 /* Mark the static chain as clobbered here so life information
832 doesn't get messed up for it. */
833 chain = targetm.calls.static_chain (current_function_decl, true);
834 if (chain && REG_P (chain))
835 emit_clobber (chain);
836
837 /* Now put in the code to restore the frame pointer, and argument
838 pointer, if needed. */
839 if (! targetm.have_nonlocal_goto ())
840 {
841 /* First adjust our frame pointer to its actual value. It was
842 previously set to the start of the virtual area corresponding to
843 the stacked variables when we branched here and now needs to be
844 adjusted to the actual hardware fp value.
845
846 Assignments to virtual registers are converted by
847 instantiate_virtual_regs into the corresponding assignment
848 to the underlying register (fp in this case) that makes
849 the original assignment true.
850 So the following insn will actually be decrementing fp by
851 STARTING_FRAME_OFFSET. */
852 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
853
854 /* Restoring the frame pointer also modifies the hard frame pointer.
855 Mark it used (so that the previous assignment remains live once
856 the frame pointer is eliminated) and clobbered (to represent the
857 implicit update from the assignment). */
858 emit_use (hard_frame_pointer_rtx);
859 emit_clobber (hard_frame_pointer_rtx);
860 }
861
862 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
863 {
864 #ifdef ELIMINABLE_REGS
865 /* If the argument pointer can be eliminated in favor of the
866 frame pointer, we don't need to restore it. We assume here
867 that if such an elimination is present, it can always be used.
868 This is the case on all known machines; if we don't make this
869 assumption, we do unnecessary saving on many machines. */
870 size_t i;
871 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
872
873 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
874 if (elim_regs[i].from == ARG_POINTER_REGNUM
875 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
876 break;
877
878 if (i == ARRAY_SIZE (elim_regs))
879 #endif
880 {
881 /* Now restore our arg pointer from the address at which it
882 was saved in our stack frame. */
883 emit_move_insn (crtl->args.internal_arg_pointer,
884 copy_to_reg (get_arg_pointer_save_area ()));
885 }
886 }
887
888 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
889 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
890 else if (targetm.have_nonlocal_goto_receiver ())
891 emit_insn (targetm.gen_nonlocal_goto_receiver ());
892 else
893 { /* Nothing */ }
894
895 /* We must not allow the code we just generated to be reordered by
896 scheduling. Specifically, the update of the frame pointer must
897 happen immediately, not later. */
898 emit_insn (gen_blockage ());
899 }
900
901 /* __builtin_longjmp is passed a pointer to an array of five words (not
902 all will be used on all machines). It operates similarly to the C
903 library function of the same name, but is more efficient. Much of
904 the code below is copied from the handling of non-local gotos. */
905
906 static void
907 expand_builtin_longjmp (rtx buf_addr, rtx value)
908 {
909 rtx fp, lab, stack;
910 rtx_insn *insn, *last;
911 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
912
913 /* DRAP is needed for stack realign if longjmp is expanded to current
914 function */
915 if (SUPPORTS_STACK_ALIGNMENT)
916 crtl->need_drap = true;
917
918 if (setjmp_alias_set == -1)
919 setjmp_alias_set = new_alias_set ();
920
921 buf_addr = convert_memory_address (Pmode, buf_addr);
922
923 buf_addr = force_reg (Pmode, buf_addr);
924
925 /* We require that the user must pass a second argument of 1, because
926 that is what builtin_setjmp will return. */
927 gcc_assert (value == const1_rtx);
928
929 last = get_last_insn ();
930 if (targetm.have_builtin_longjmp ())
931 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
932 else
933 {
934 fp = gen_rtx_MEM (Pmode, buf_addr);
935 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
936 GET_MODE_SIZE (Pmode)));
937
938 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
939 2 * GET_MODE_SIZE (Pmode)));
940 set_mem_alias_set (fp, setjmp_alias_set);
941 set_mem_alias_set (lab, setjmp_alias_set);
942 set_mem_alias_set (stack, setjmp_alias_set);
943
944 /* Pick up FP, label, and SP from the block and jump. This code is
945 from expand_goto in stmt.c; see there for detailed comments. */
946 if (targetm.have_nonlocal_goto ())
947 /* We have to pass a value to the nonlocal_goto pattern that will
948 get copied into the static_chain pointer, but it does not matter
949 what that value is, because builtin_setjmp does not use it. */
950 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
951 else
952 {
953 lab = copy_to_reg (lab);
954
955 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
956 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
957
958 emit_move_insn (hard_frame_pointer_rtx, fp);
959 emit_stack_restore (SAVE_NONLOCAL, stack);
960
961 emit_use (hard_frame_pointer_rtx);
962 emit_use (stack_pointer_rtx);
963 emit_indirect_jump (lab);
964 }
965 }
966
967 /* Search backwards and mark the jump insn as a non-local goto.
968 Note that this precludes the use of __builtin_longjmp to a
969 __builtin_setjmp target in the same function. However, we've
970 already cautioned the user that these functions are for
971 internal exception handling use only. */
972 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
973 {
974 gcc_assert (insn != last);
975
976 if (JUMP_P (insn))
977 {
978 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
979 break;
980 }
981 else if (CALL_P (insn))
982 break;
983 }
984 }
985
986 static inline bool
987 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
988 {
989 return (iter->i < iter->n);
990 }
991
992 /* This function validates the types of a function call argument list
993 against a specified list of tree_codes. If the last specifier is a 0,
994 that represents an ellipses, otherwise the last specifier must be a
995 VOID_TYPE. */
996
997 static bool
998 validate_arglist (const_tree callexpr, ...)
999 {
1000 enum tree_code code;
1001 bool res = 0;
1002 va_list ap;
1003 const_call_expr_arg_iterator iter;
1004 const_tree arg;
1005
1006 va_start (ap, callexpr);
1007 init_const_call_expr_arg_iterator (callexpr, &iter);
1008
1009 do
1010 {
1011 code = (enum tree_code) va_arg (ap, int);
1012 switch (code)
1013 {
1014 case 0:
1015 /* This signifies an ellipses, any further arguments are all ok. */
1016 res = true;
1017 goto end;
1018 case VOID_TYPE:
1019 /* This signifies an endlink, if no arguments remain, return
1020 true, otherwise return false. */
1021 res = !more_const_call_expr_args_p (&iter);
1022 goto end;
1023 default:
1024 /* If no parameters remain or the parameter's code does not
1025 match the specified code, return false. Otherwise continue
1026 checking any remaining arguments. */
1027 arg = next_const_call_expr_arg (&iter);
1028 if (!validate_arg (arg, code))
1029 goto end;
1030 break;
1031 }
1032 }
1033 while (1);
1034
1035 /* We need gotos here since we can only have one VA_CLOSE in a
1036 function. */
1037 end: ;
1038 va_end (ap);
1039
1040 return res;
1041 }
1042
1043 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1044 and the address of the save area. */
1045
1046 static rtx
1047 expand_builtin_nonlocal_goto (tree exp)
1048 {
1049 tree t_label, t_save_area;
1050 rtx r_label, r_save_area, r_fp, r_sp;
1051 rtx_insn *insn;
1052
1053 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1054 return NULL_RTX;
1055
1056 t_label = CALL_EXPR_ARG (exp, 0);
1057 t_save_area = CALL_EXPR_ARG (exp, 1);
1058
1059 r_label = expand_normal (t_label);
1060 r_label = convert_memory_address (Pmode, r_label);
1061 r_save_area = expand_normal (t_save_area);
1062 r_save_area = convert_memory_address (Pmode, r_save_area);
1063 /* Copy the address of the save location to a register just in case it was
1064 based on the frame pointer. */
1065 r_save_area = copy_to_reg (r_save_area);
1066 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1067 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1068 plus_constant (Pmode, r_save_area,
1069 GET_MODE_SIZE (Pmode)));
1070
1071 crtl->has_nonlocal_goto = 1;
1072
1073 /* ??? We no longer need to pass the static chain value, afaik. */
1074 if (targetm.have_nonlocal_goto ())
1075 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1076 else
1077 {
1078 r_label = copy_to_reg (r_label);
1079
1080 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1081 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1082
1083 /* Restore frame pointer for containing function. */
1084 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1085 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1086
1087 /* USE of hard_frame_pointer_rtx added for consistency;
1088 not clear if really needed. */
1089 emit_use (hard_frame_pointer_rtx);
1090 emit_use (stack_pointer_rtx);
1091
1092 /* If the architecture is using a GP register, we must
1093 conservatively assume that the target function makes use of it.
1094 The prologue of functions with nonlocal gotos must therefore
1095 initialize the GP register to the appropriate value, and we
1096 must then make sure that this value is live at the point
1097 of the jump. (Note that this doesn't necessarily apply
1098 to targets with a nonlocal_goto pattern; they are free
1099 to implement it in their own way. Note also that this is
1100 a no-op if the GP register is a global invariant.) */
1101 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1102 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1103 emit_use (pic_offset_table_rtx);
1104
1105 emit_indirect_jump (r_label);
1106 }
1107
1108 /* Search backwards to the jump insn and mark it as a
1109 non-local goto. */
1110 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1111 {
1112 if (JUMP_P (insn))
1113 {
1114 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1115 break;
1116 }
1117 else if (CALL_P (insn))
1118 break;
1119 }
1120
1121 return const0_rtx;
1122 }
1123
1124 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1125 (not all will be used on all machines) that was passed to __builtin_setjmp.
1126 It updates the stack pointer in that block to the current value. This is
1127 also called directly by the SJLJ exception handling code. */
1128
1129 void
1130 expand_builtin_update_setjmp_buf (rtx buf_addr)
1131 {
1132 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1133 rtx stack_save
1134 = gen_rtx_MEM (sa_mode,
1135 memory_address
1136 (sa_mode,
1137 plus_constant (Pmode, buf_addr,
1138 2 * GET_MODE_SIZE (Pmode))));
1139
1140 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1141 }
1142
1143 /* Expand a call to __builtin_prefetch. For a target that does not support
1144 data prefetch, evaluate the memory address argument in case it has side
1145 effects. */
1146
1147 static void
1148 expand_builtin_prefetch (tree exp)
1149 {
1150 tree arg0, arg1, arg2;
1151 int nargs;
1152 rtx op0, op1, op2;
1153
1154 if (!validate_arglist (exp, POINTER_TYPE, 0))
1155 return;
1156
1157 arg0 = CALL_EXPR_ARG (exp, 0);
1158
1159 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1160 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1161 locality). */
1162 nargs = call_expr_nargs (exp);
1163 if (nargs > 1)
1164 arg1 = CALL_EXPR_ARG (exp, 1);
1165 else
1166 arg1 = integer_zero_node;
1167 if (nargs > 2)
1168 arg2 = CALL_EXPR_ARG (exp, 2);
1169 else
1170 arg2 = integer_three_node;
1171
1172 /* Argument 0 is an address. */
1173 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1174
1175 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1176 if (TREE_CODE (arg1) != INTEGER_CST)
1177 {
1178 error ("second argument to %<__builtin_prefetch%> must be a constant");
1179 arg1 = integer_zero_node;
1180 }
1181 op1 = expand_normal (arg1);
1182 /* Argument 1 must be either zero or one. */
1183 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1184 {
1185 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1186 " using zero");
1187 op1 = const0_rtx;
1188 }
1189
1190 /* Argument 2 (locality) must be a compile-time constant int. */
1191 if (TREE_CODE (arg2) != INTEGER_CST)
1192 {
1193 error ("third argument to %<__builtin_prefetch%> must be a constant");
1194 arg2 = integer_zero_node;
1195 }
1196 op2 = expand_normal (arg2);
1197 /* Argument 2 must be 0, 1, 2, or 3. */
1198 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1199 {
1200 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1201 op2 = const0_rtx;
1202 }
1203
1204 if (targetm.have_prefetch ())
1205 {
1206 struct expand_operand ops[3];
1207
1208 create_address_operand (&ops[0], op0);
1209 create_integer_operand (&ops[1], INTVAL (op1));
1210 create_integer_operand (&ops[2], INTVAL (op2));
1211 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1212 return;
1213 }
1214
1215 /* Don't do anything with direct references to volatile memory, but
1216 generate code to handle other side effects. */
1217 if (!MEM_P (op0) && side_effects_p (op0))
1218 emit_insn (op0);
1219 }
1220
1221 /* Get a MEM rtx for expression EXP which is the address of an operand
1222 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1223 the maximum length of the block of memory that might be accessed or
1224 NULL if unknown. */
1225
1226 static rtx
1227 get_memory_rtx (tree exp, tree len)
1228 {
1229 tree orig_exp = exp;
1230 rtx addr, mem;
1231
1232 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1233 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1234 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1235 exp = TREE_OPERAND (exp, 0);
1236
1237 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1238 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1239
1240 /* Get an expression we can use to find the attributes to assign to MEM.
1241 First remove any nops. */
1242 while (CONVERT_EXPR_P (exp)
1243 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1244 exp = TREE_OPERAND (exp, 0);
1245
1246 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1247 (as builtin stringops may alias with anything). */
1248 exp = fold_build2 (MEM_REF,
1249 build_array_type (char_type_node,
1250 build_range_type (sizetype,
1251 size_one_node, len)),
1252 exp, build_int_cst (ptr_type_node, 0));
1253
1254 /* If the MEM_REF has no acceptable address, try to get the base object
1255 from the original address we got, and build an all-aliasing
1256 unknown-sized access to that one. */
1257 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1258 set_mem_attributes (mem, exp, 0);
1259 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1260 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1261 0))))
1262 {
1263 exp = build_fold_addr_expr (exp);
1264 exp = fold_build2 (MEM_REF,
1265 build_array_type (char_type_node,
1266 build_range_type (sizetype,
1267 size_zero_node,
1268 NULL)),
1269 exp, build_int_cst (ptr_type_node, 0));
1270 set_mem_attributes (mem, exp, 0);
1271 }
1272 set_mem_alias_set (mem, 0);
1273 return mem;
1274 }
1275 \f
1276 /* Built-in functions to perform an untyped call and return. */
1277
1278 #define apply_args_mode \
1279 (this_target_builtins->x_apply_args_mode)
1280 #define apply_result_mode \
1281 (this_target_builtins->x_apply_result_mode)
1282
1283 /* Return the size required for the block returned by __builtin_apply_args,
1284 and initialize apply_args_mode. */
1285
1286 static int
1287 apply_args_size (void)
1288 {
1289 static int size = -1;
1290 int align;
1291 unsigned int regno;
1292 machine_mode mode;
1293
1294 /* The values computed by this function never change. */
1295 if (size < 0)
1296 {
1297 /* The first value is the incoming arg-pointer. */
1298 size = GET_MODE_SIZE (Pmode);
1299
1300 /* The second value is the structure value address unless this is
1301 passed as an "invisible" first argument. */
1302 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1303 size += GET_MODE_SIZE (Pmode);
1304
1305 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1306 if (FUNCTION_ARG_REGNO_P (regno))
1307 {
1308 mode = targetm.calls.get_raw_arg_mode (regno);
1309
1310 gcc_assert (mode != VOIDmode);
1311
1312 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1313 if (size % align != 0)
1314 size = CEIL (size, align) * align;
1315 size += GET_MODE_SIZE (mode);
1316 apply_args_mode[regno] = mode;
1317 }
1318 else
1319 {
1320 apply_args_mode[regno] = VOIDmode;
1321 }
1322 }
1323 return size;
1324 }
1325
1326 /* Return the size required for the block returned by __builtin_apply,
1327 and initialize apply_result_mode. */
1328
1329 static int
1330 apply_result_size (void)
1331 {
1332 static int size = -1;
1333 int align, regno;
1334 machine_mode mode;
1335
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1338 {
1339 size = 0;
1340
1341 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1342 if (targetm.calls.function_value_regno_p (regno))
1343 {
1344 mode = targetm.calls.get_raw_result_mode (regno);
1345
1346 gcc_assert (mode != VOIDmode);
1347
1348 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1349 if (size % align != 0)
1350 size = CEIL (size, align) * align;
1351 size += GET_MODE_SIZE (mode);
1352 apply_result_mode[regno] = mode;
1353 }
1354 else
1355 apply_result_mode[regno] = VOIDmode;
1356
1357 /* Allow targets that use untyped_call and untyped_return to override
1358 the size so that machine-specific information can be stored here. */
1359 #ifdef APPLY_RESULT_SIZE
1360 size = APPLY_RESULT_SIZE;
1361 #endif
1362 }
1363 return size;
1364 }
1365
1366 /* Create a vector describing the result block RESULT. If SAVEP is true,
1367 the result block is used to save the values; otherwise it is used to
1368 restore the values. */
1369
1370 static rtx
1371 result_vector (int savep, rtx result)
1372 {
1373 int regno, size, align, nelts;
1374 machine_mode mode;
1375 rtx reg, mem;
1376 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1377
1378 size = nelts = 0;
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if ((mode = apply_result_mode[regno]) != VOIDmode)
1381 {
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1386 mem = adjust_address (result, mode, size);
1387 savevec[nelts++] = (savep
1388 ? gen_rtx_SET (mem, reg)
1389 : gen_rtx_SET (reg, mem));
1390 size += GET_MODE_SIZE (mode);
1391 }
1392 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1393 }
1394
1395 /* Save the state required to perform an untyped call with the same
1396 arguments as were passed to the current function. */
1397
1398 static rtx
1399 expand_builtin_apply_args_1 (void)
1400 {
1401 rtx registers, tem;
1402 int size, align, regno;
1403 machine_mode mode;
1404 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1405
1406 /* Create a block where the arg-pointer, structure value address,
1407 and argument registers can be saved. */
1408 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1409
1410 /* Walk past the arg-pointer and structure value address. */
1411 size = GET_MODE_SIZE (Pmode);
1412 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1413 size += GET_MODE_SIZE (Pmode);
1414
1415 /* Save each register used in calling a function to the block. */
1416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1417 if ((mode = apply_args_mode[regno]) != VOIDmode)
1418 {
1419 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1420 if (size % align != 0)
1421 size = CEIL (size, align) * align;
1422
1423 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1424
1425 emit_move_insn (adjust_address (registers, mode, size), tem);
1426 size += GET_MODE_SIZE (mode);
1427 }
1428
1429 /* Save the arg pointer to the block. */
1430 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1431 /* We need the pointer as the caller actually passed them to us, not
1432 as we might have pretended they were passed. Make sure it's a valid
1433 operand, as emit_move_insn isn't expected to handle a PLUS. */
1434 if (STACK_GROWS_DOWNWARD)
1435 tem
1436 = force_operand (plus_constant (Pmode, tem,
1437 crtl->args.pretend_args_size),
1438 NULL_RTX);
1439 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1440
1441 size = GET_MODE_SIZE (Pmode);
1442
1443 /* Save the structure value address unless this is passed as an
1444 "invisible" first argument. */
1445 if (struct_incoming_value)
1446 {
1447 emit_move_insn (adjust_address (registers, Pmode, size),
1448 copy_to_reg (struct_incoming_value));
1449 size += GET_MODE_SIZE (Pmode);
1450 }
1451
1452 /* Return the address of the block. */
1453 return copy_addr_to_reg (XEXP (registers, 0));
1454 }
1455
1456 /* __builtin_apply_args returns block of memory allocated on
1457 the stack into which is stored the arg pointer, structure
1458 value address, static chain, and all the registers that might
1459 possibly be used in performing a function call. The code is
1460 moved to the start of the function so the incoming values are
1461 saved. */
1462
1463 static rtx
1464 expand_builtin_apply_args (void)
1465 {
1466 /* Don't do __builtin_apply_args more than once in a function.
1467 Save the result of the first call and reuse it. */
1468 if (apply_args_value != 0)
1469 return apply_args_value;
1470 {
1471 /* When this function is called, it means that registers must be
1472 saved on entry to this function. So we migrate the
1473 call to the first insn of this function. */
1474 rtx temp;
1475
1476 start_sequence ();
1477 temp = expand_builtin_apply_args_1 ();
1478 rtx_insn *seq = get_insns ();
1479 end_sequence ();
1480
1481 apply_args_value = temp;
1482
1483 /* Put the insns after the NOTE that starts the function.
1484 If this is inside a start_sequence, make the outer-level insn
1485 chain current, so the code is placed at the start of the
1486 function. If internal_arg_pointer is a non-virtual pseudo,
1487 it needs to be placed after the function that initializes
1488 that pseudo. */
1489 push_topmost_sequence ();
1490 if (REG_P (crtl->args.internal_arg_pointer)
1491 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1492 emit_insn_before (seq, parm_birth_insn);
1493 else
1494 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1495 pop_topmost_sequence ();
1496 return temp;
1497 }
1498 }
1499
1500 /* Perform an untyped call and save the state required to perform an
1501 untyped return of whatever value was returned by the given function. */
1502
1503 static rtx
1504 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1505 {
1506 int size, align, regno;
1507 machine_mode mode;
1508 rtx incoming_args, result, reg, dest, src;
1509 rtx_call_insn *call_insn;
1510 rtx old_stack_level = 0;
1511 rtx call_fusage = 0;
1512 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1513
1514 arguments = convert_memory_address (Pmode, arguments);
1515
1516 /* Create a block where the return registers can be saved. */
1517 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1518
1519 /* Fetch the arg pointer from the ARGUMENTS block. */
1520 incoming_args = gen_reg_rtx (Pmode);
1521 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1522 if (!STACK_GROWS_DOWNWARD)
1523 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1524 incoming_args, 0, OPTAB_LIB_WIDEN);
1525
1526 /* Push a new argument block and copy the arguments. Do not allow
1527 the (potential) memcpy call below to interfere with our stack
1528 manipulations. */
1529 do_pending_stack_adjust ();
1530 NO_DEFER_POP;
1531
1532 /* Save the stack with nonlocal if available. */
1533 if (targetm.have_save_stack_nonlocal ())
1534 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1535 else
1536 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1537
1538 /* Allocate a block of memory onto the stack and copy the memory
1539 arguments to the outgoing arguments address. We can pass TRUE
1540 as the 4th argument because we just saved the stack pointer
1541 and will restore it right after the call. */
1542 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1543
1544 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1545 may have already set current_function_calls_alloca to true.
1546 current_function_calls_alloca won't be set if argsize is zero,
1547 so we have to guarantee need_drap is true here. */
1548 if (SUPPORTS_STACK_ALIGNMENT)
1549 crtl->need_drap = true;
1550
1551 dest = virtual_outgoing_args_rtx;
1552 if (!STACK_GROWS_DOWNWARD)
1553 {
1554 if (CONST_INT_P (argsize))
1555 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1556 else
1557 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1558 }
1559 dest = gen_rtx_MEM (BLKmode, dest);
1560 set_mem_align (dest, PARM_BOUNDARY);
1561 src = gen_rtx_MEM (BLKmode, incoming_args);
1562 set_mem_align (src, PARM_BOUNDARY);
1563 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1564
1565 /* Refer to the argument block. */
1566 apply_args_size ();
1567 arguments = gen_rtx_MEM (BLKmode, arguments);
1568 set_mem_align (arguments, PARM_BOUNDARY);
1569
1570 /* Walk past the arg-pointer and structure value address. */
1571 size = GET_MODE_SIZE (Pmode);
1572 if (struct_value)
1573 size += GET_MODE_SIZE (Pmode);
1574
1575 /* Restore each of the registers previously saved. Make USE insns
1576 for each of these registers for use in making the call. */
1577 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1578 if ((mode = apply_args_mode[regno]) != VOIDmode)
1579 {
1580 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1581 if (size % align != 0)
1582 size = CEIL (size, align) * align;
1583 reg = gen_rtx_REG (mode, regno);
1584 emit_move_insn (reg, adjust_address (arguments, mode, size));
1585 use_reg (&call_fusage, reg);
1586 size += GET_MODE_SIZE (mode);
1587 }
1588
1589 /* Restore the structure value address unless this is passed as an
1590 "invisible" first argument. */
1591 size = GET_MODE_SIZE (Pmode);
1592 if (struct_value)
1593 {
1594 rtx value = gen_reg_rtx (Pmode);
1595 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1596 emit_move_insn (struct_value, value);
1597 if (REG_P (struct_value))
1598 use_reg (&call_fusage, struct_value);
1599 size += GET_MODE_SIZE (Pmode);
1600 }
1601
1602 /* All arguments and registers used for the call are set up by now! */
1603 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1604
1605 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1606 and we don't want to load it into a register as an optimization,
1607 because prepare_call_address already did it if it should be done. */
1608 if (GET_CODE (function) != SYMBOL_REF)
1609 function = memory_address (FUNCTION_MODE, function);
1610
1611 /* Generate the actual call instruction and save the return value. */
1612 if (targetm.have_untyped_call ())
1613 {
1614 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1615 emit_call_insn (targetm.gen_untyped_call (mem, result,
1616 result_vector (1, result)));
1617 }
1618 else if (targetm.have_call_value ())
1619 {
1620 rtx valreg = 0;
1621
1622 /* Locate the unique return register. It is not possible to
1623 express a call that sets more than one return register using
1624 call_value; use untyped_call for that. In fact, untyped_call
1625 only needs to save the return registers in the given block. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_result_mode[regno]) != VOIDmode)
1628 {
1629 gcc_assert (!valreg); /* have_untyped_call required. */
1630
1631 valreg = gen_rtx_REG (mode, regno);
1632 }
1633
1634 emit_insn (targetm.gen_call_value (valreg,
1635 gen_rtx_MEM (FUNCTION_MODE, function),
1636 const0_rtx, NULL_RTX, const0_rtx));
1637
1638 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1639 }
1640 else
1641 gcc_unreachable ();
1642
1643 /* Find the CALL insn we just emitted, and attach the register usage
1644 information. */
1645 call_insn = last_call_insn ();
1646 add_function_usage_to (call_insn, call_fusage);
1647
1648 /* Restore the stack. */
1649 if (targetm.have_save_stack_nonlocal ())
1650 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1651 else
1652 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1653 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1654
1655 OK_DEFER_POP;
1656
1657 /* Return the address of the result block. */
1658 result = copy_addr_to_reg (XEXP (result, 0));
1659 return convert_memory_address (ptr_mode, result);
1660 }
1661
1662 /* Perform an untyped return. */
1663
1664 static void
1665 expand_builtin_return (rtx result)
1666 {
1667 int size, align, regno;
1668 machine_mode mode;
1669 rtx reg;
1670 rtx_insn *call_fusage = 0;
1671
1672 result = convert_memory_address (Pmode, result);
1673
1674 apply_result_size ();
1675 result = gen_rtx_MEM (BLKmode, result);
1676
1677 if (targetm.have_untyped_return ())
1678 {
1679 rtx vector = result_vector (0, result);
1680 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1681 emit_barrier ();
1682 return;
1683 }
1684
1685 /* Restore the return value and note that each value is used. */
1686 size = 0;
1687 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1688 if ((mode = apply_result_mode[regno]) != VOIDmode)
1689 {
1690 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1691 if (size % align != 0)
1692 size = CEIL (size, align) * align;
1693 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1694 emit_move_insn (reg, adjust_address (result, mode, size));
1695
1696 push_to_sequence (call_fusage);
1697 emit_use (reg);
1698 call_fusage = get_insns ();
1699 end_sequence ();
1700 size += GET_MODE_SIZE (mode);
1701 }
1702
1703 /* Put the USE insns before the return. */
1704 emit_insn (call_fusage);
1705
1706 /* Return whatever values was restored by jumping directly to the end
1707 of the function. */
1708 expand_naked_return ();
1709 }
1710
1711 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1712
1713 static enum type_class
1714 type_to_class (tree type)
1715 {
1716 switch (TREE_CODE (type))
1717 {
1718 case VOID_TYPE: return void_type_class;
1719 case INTEGER_TYPE: return integer_type_class;
1720 case ENUMERAL_TYPE: return enumeral_type_class;
1721 case BOOLEAN_TYPE: return boolean_type_class;
1722 case POINTER_TYPE: return pointer_type_class;
1723 case REFERENCE_TYPE: return reference_type_class;
1724 case OFFSET_TYPE: return offset_type_class;
1725 case REAL_TYPE: return real_type_class;
1726 case COMPLEX_TYPE: return complex_type_class;
1727 case FUNCTION_TYPE: return function_type_class;
1728 case METHOD_TYPE: return method_type_class;
1729 case RECORD_TYPE: return record_type_class;
1730 case UNION_TYPE:
1731 case QUAL_UNION_TYPE: return union_type_class;
1732 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1733 ? string_type_class : array_type_class);
1734 case LANG_TYPE: return lang_type_class;
1735 default: return no_type_class;
1736 }
1737 }
1738
1739 /* Expand a call EXP to __builtin_classify_type. */
1740
1741 static rtx
1742 expand_builtin_classify_type (tree exp)
1743 {
1744 if (call_expr_nargs (exp))
1745 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1746 return GEN_INT (no_type_class);
1747 }
1748
1749 /* This helper macro, meant to be used in mathfn_built_in below,
1750 determines which among a set of three builtin math functions is
1751 appropriate for a given type mode. The `F' and `L' cases are
1752 automatically generated from the `double' case. */
1753 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1754 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1755 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1756 fcodel = BUILT_IN_MATHFN##L ; break;
1757 /* Similar to above, but appends _R after any F/L suffix. */
1758 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1759 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1760 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1761 fcodel = BUILT_IN_MATHFN##L_R ; break;
1762
1763 /* Return a function equivalent to FN but operating on floating-point
1764 values of type TYPE, or END_BUILTINS if no such function exists.
1765 This is purely an operation on built-in function codes; it does not
1766 guarantee that the target actually has an implementation of the
1767 function. */
1768
1769 static built_in_function
1770 mathfn_built_in_2 (tree type, built_in_function fn)
1771 {
1772 built_in_function fcode, fcodef, fcodel;
1773
1774 switch (fn)
1775 {
1776 CASE_MATHFN (BUILT_IN_ACOS)
1777 CASE_MATHFN (BUILT_IN_ACOSH)
1778 CASE_MATHFN (BUILT_IN_ASIN)
1779 CASE_MATHFN (BUILT_IN_ASINH)
1780 CASE_MATHFN (BUILT_IN_ATAN)
1781 CASE_MATHFN (BUILT_IN_ATAN2)
1782 CASE_MATHFN (BUILT_IN_ATANH)
1783 CASE_MATHFN (BUILT_IN_CBRT)
1784 CASE_MATHFN (BUILT_IN_CEIL)
1785 CASE_MATHFN (BUILT_IN_CEXPI)
1786 CASE_MATHFN (BUILT_IN_COPYSIGN)
1787 CASE_MATHFN (BUILT_IN_COS)
1788 CASE_MATHFN (BUILT_IN_COSH)
1789 CASE_MATHFN (BUILT_IN_DREM)
1790 CASE_MATHFN (BUILT_IN_ERF)
1791 CASE_MATHFN (BUILT_IN_ERFC)
1792 CASE_MATHFN (BUILT_IN_EXP)
1793 CASE_MATHFN (BUILT_IN_EXP10)
1794 CASE_MATHFN (BUILT_IN_EXP2)
1795 CASE_MATHFN (BUILT_IN_EXPM1)
1796 CASE_MATHFN (BUILT_IN_FABS)
1797 CASE_MATHFN (BUILT_IN_FDIM)
1798 CASE_MATHFN (BUILT_IN_FLOOR)
1799 CASE_MATHFN (BUILT_IN_FMA)
1800 CASE_MATHFN (BUILT_IN_FMAX)
1801 CASE_MATHFN (BUILT_IN_FMIN)
1802 CASE_MATHFN (BUILT_IN_FMOD)
1803 CASE_MATHFN (BUILT_IN_FREXP)
1804 CASE_MATHFN (BUILT_IN_GAMMA)
1805 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1806 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1807 CASE_MATHFN (BUILT_IN_HYPOT)
1808 CASE_MATHFN (BUILT_IN_ILOGB)
1809 CASE_MATHFN (BUILT_IN_ICEIL)
1810 CASE_MATHFN (BUILT_IN_IFLOOR)
1811 CASE_MATHFN (BUILT_IN_INF)
1812 CASE_MATHFN (BUILT_IN_IRINT)
1813 CASE_MATHFN (BUILT_IN_IROUND)
1814 CASE_MATHFN (BUILT_IN_ISINF)
1815 CASE_MATHFN (BUILT_IN_J0)
1816 CASE_MATHFN (BUILT_IN_J1)
1817 CASE_MATHFN (BUILT_IN_JN)
1818 CASE_MATHFN (BUILT_IN_LCEIL)
1819 CASE_MATHFN (BUILT_IN_LDEXP)
1820 CASE_MATHFN (BUILT_IN_LFLOOR)
1821 CASE_MATHFN (BUILT_IN_LGAMMA)
1822 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1823 CASE_MATHFN (BUILT_IN_LLCEIL)
1824 CASE_MATHFN (BUILT_IN_LLFLOOR)
1825 CASE_MATHFN (BUILT_IN_LLRINT)
1826 CASE_MATHFN (BUILT_IN_LLROUND)
1827 CASE_MATHFN (BUILT_IN_LOG)
1828 CASE_MATHFN (BUILT_IN_LOG10)
1829 CASE_MATHFN (BUILT_IN_LOG1P)
1830 CASE_MATHFN (BUILT_IN_LOG2)
1831 CASE_MATHFN (BUILT_IN_LOGB)
1832 CASE_MATHFN (BUILT_IN_LRINT)
1833 CASE_MATHFN (BUILT_IN_LROUND)
1834 CASE_MATHFN (BUILT_IN_MODF)
1835 CASE_MATHFN (BUILT_IN_NAN)
1836 CASE_MATHFN (BUILT_IN_NANS)
1837 CASE_MATHFN (BUILT_IN_NEARBYINT)
1838 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1839 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1840 CASE_MATHFN (BUILT_IN_POW)
1841 CASE_MATHFN (BUILT_IN_POWI)
1842 CASE_MATHFN (BUILT_IN_POW10)
1843 CASE_MATHFN (BUILT_IN_REMAINDER)
1844 CASE_MATHFN (BUILT_IN_REMQUO)
1845 CASE_MATHFN (BUILT_IN_RINT)
1846 CASE_MATHFN (BUILT_IN_ROUND)
1847 CASE_MATHFN (BUILT_IN_SCALB)
1848 CASE_MATHFN (BUILT_IN_SCALBLN)
1849 CASE_MATHFN (BUILT_IN_SCALBN)
1850 CASE_MATHFN (BUILT_IN_SIGNBIT)
1851 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1852 CASE_MATHFN (BUILT_IN_SIN)
1853 CASE_MATHFN (BUILT_IN_SINCOS)
1854 CASE_MATHFN (BUILT_IN_SINH)
1855 CASE_MATHFN (BUILT_IN_SQRT)
1856 CASE_MATHFN (BUILT_IN_TAN)
1857 CASE_MATHFN (BUILT_IN_TANH)
1858 CASE_MATHFN (BUILT_IN_TGAMMA)
1859 CASE_MATHFN (BUILT_IN_TRUNC)
1860 CASE_MATHFN (BUILT_IN_Y0)
1861 CASE_MATHFN (BUILT_IN_Y1)
1862 CASE_MATHFN (BUILT_IN_YN)
1863
1864 default:
1865 return END_BUILTINS;
1866 }
1867
1868 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1869 return fcode;
1870 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1871 return fcodef;
1872 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1873 return fcodel;
1874 else
1875 return END_BUILTINS;
1876 }
1877
1878 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1879 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1880 otherwise use the explicit declaration. If we can't do the conversion,
1881 return null. */
1882
1883 static tree
1884 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1885 {
1886 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1887 if (fcode2 == END_BUILTINS)
1888 return NULL_TREE;
1889
1890 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1891 return NULL_TREE;
1892
1893 return builtin_decl_explicit (fcode2);
1894 }
1895
1896 /* Like mathfn_built_in_1(), but always use the implicit array. */
1897
1898 tree
1899 mathfn_built_in (tree type, enum built_in_function fn)
1900 {
1901 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1902 }
1903
1904 /* If errno must be maintained, expand the RTL to check if the result,
1905 TARGET, of a built-in function call, EXP, is NaN, and if so set
1906 errno to EDOM. */
1907
1908 static void
1909 expand_errno_check (tree exp, rtx target)
1910 {
1911 rtx_code_label *lab = gen_label_rtx ();
1912
1913 /* Test the result; if it is NaN, set errno=EDOM because
1914 the argument was not in the domain. */
1915 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1916 NULL_RTX, NULL, lab,
1917 /* The jump is very likely. */
1918 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1919
1920 #ifdef TARGET_EDOM
1921 /* If this built-in doesn't throw an exception, set errno directly. */
1922 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1923 {
1924 #ifdef GEN_ERRNO_RTX
1925 rtx errno_rtx = GEN_ERRNO_RTX;
1926 #else
1927 rtx errno_rtx
1928 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1929 #endif
1930 emit_move_insn (errno_rtx,
1931 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1932 emit_label (lab);
1933 return;
1934 }
1935 #endif
1936
1937 /* Make sure the library call isn't expanded as a tail call. */
1938 CALL_EXPR_TAILCALL (exp) = 0;
1939
1940 /* We can't set errno=EDOM directly; let the library call do it.
1941 Pop the arguments right away in case the call gets deleted. */
1942 NO_DEFER_POP;
1943 expand_call (exp, target, 0);
1944 OK_DEFER_POP;
1945 emit_label (lab);
1946 }
1947
1948 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1949 Return NULL_RTX if a normal call should be emitted rather than expanding
1950 the function in-line. EXP is the expression that is a call to the builtin
1951 function; if convenient, the result should be placed in TARGET.
1952 SUBTARGET may be used as the target for computing one of EXP's operands. */
1953
1954 static rtx
1955 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1956 {
1957 optab builtin_optab;
1958 rtx op0;
1959 rtx_insn *insns;
1960 tree fndecl = get_callee_fndecl (exp);
1961 machine_mode mode;
1962 bool errno_set = false;
1963 bool try_widening = false;
1964 tree arg;
1965
1966 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1967 return NULL_RTX;
1968
1969 arg = CALL_EXPR_ARG (exp, 0);
1970
1971 switch (DECL_FUNCTION_CODE (fndecl))
1972 {
1973 CASE_FLT_FN (BUILT_IN_SQRT):
1974 errno_set = ! tree_expr_nonnegative_p (arg);
1975 try_widening = true;
1976 builtin_optab = sqrt_optab;
1977 break;
1978 CASE_FLT_FN (BUILT_IN_EXP):
1979 errno_set = true; builtin_optab = exp_optab; break;
1980 CASE_FLT_FN (BUILT_IN_EXP10):
1981 CASE_FLT_FN (BUILT_IN_POW10):
1982 errno_set = true; builtin_optab = exp10_optab; break;
1983 CASE_FLT_FN (BUILT_IN_EXP2):
1984 errno_set = true; builtin_optab = exp2_optab; break;
1985 CASE_FLT_FN (BUILT_IN_EXPM1):
1986 errno_set = true; builtin_optab = expm1_optab; break;
1987 CASE_FLT_FN (BUILT_IN_LOGB):
1988 errno_set = true; builtin_optab = logb_optab; break;
1989 CASE_FLT_FN (BUILT_IN_LOG):
1990 errno_set = true; builtin_optab = log_optab; break;
1991 CASE_FLT_FN (BUILT_IN_LOG10):
1992 errno_set = true; builtin_optab = log10_optab; break;
1993 CASE_FLT_FN (BUILT_IN_LOG2):
1994 errno_set = true; builtin_optab = log2_optab; break;
1995 CASE_FLT_FN (BUILT_IN_LOG1P):
1996 errno_set = true; builtin_optab = log1p_optab; break;
1997 CASE_FLT_FN (BUILT_IN_ASIN):
1998 builtin_optab = asin_optab; break;
1999 CASE_FLT_FN (BUILT_IN_ACOS):
2000 builtin_optab = acos_optab; break;
2001 CASE_FLT_FN (BUILT_IN_TAN):
2002 builtin_optab = tan_optab; break;
2003 CASE_FLT_FN (BUILT_IN_ATAN):
2004 builtin_optab = atan_optab; break;
2005 CASE_FLT_FN (BUILT_IN_FLOOR):
2006 builtin_optab = floor_optab; break;
2007 CASE_FLT_FN (BUILT_IN_CEIL):
2008 builtin_optab = ceil_optab; break;
2009 CASE_FLT_FN (BUILT_IN_TRUNC):
2010 builtin_optab = btrunc_optab; break;
2011 CASE_FLT_FN (BUILT_IN_ROUND):
2012 builtin_optab = round_optab; break;
2013 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2014 builtin_optab = nearbyint_optab;
2015 if (flag_trapping_math)
2016 break;
2017 /* Else fallthrough and expand as rint. */
2018 CASE_FLT_FN (BUILT_IN_RINT):
2019 builtin_optab = rint_optab; break;
2020 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2021 builtin_optab = significand_optab; break;
2022 default:
2023 gcc_unreachable ();
2024 }
2025
2026 /* Make a suitable register to place result in. */
2027 mode = TYPE_MODE (TREE_TYPE (exp));
2028
2029 if (! flag_errno_math || ! HONOR_NANS (mode))
2030 errno_set = false;
2031
2032 /* Before working hard, check whether the instruction is available, but try
2033 to widen the mode for specific operations. */
2034 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2035 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2036 && (!errno_set || !optimize_insn_for_size_p ()))
2037 {
2038 rtx result = gen_reg_rtx (mode);
2039
2040 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2041 need to expand the argument again. This way, we will not perform
2042 side-effects more the once. */
2043 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2044
2045 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2046
2047 start_sequence ();
2048
2049 /* Compute into RESULT.
2050 Set RESULT to wherever the result comes back. */
2051 result = expand_unop (mode, builtin_optab, op0, result, 0);
2052
2053 if (result != 0)
2054 {
2055 if (errno_set)
2056 expand_errno_check (exp, result);
2057
2058 /* Output the entire sequence. */
2059 insns = get_insns ();
2060 end_sequence ();
2061 emit_insn (insns);
2062 return result;
2063 }
2064
2065 /* If we were unable to expand via the builtin, stop the sequence
2066 (without outputting the insns) and call to the library function
2067 with the stabilized argument list. */
2068 end_sequence ();
2069 }
2070
2071 return expand_call (exp, target, target == const0_rtx);
2072 }
2073
2074 /* Expand a call to the builtin binary math functions (pow and atan2).
2075 Return NULL_RTX if a normal call should be emitted rather than expanding the
2076 function in-line. EXP is the expression that is a call to the builtin
2077 function; if convenient, the result should be placed in TARGET.
2078 SUBTARGET may be used as the target for computing one of EXP's
2079 operands. */
2080
2081 static rtx
2082 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2083 {
2084 optab builtin_optab;
2085 rtx op0, op1, result;
2086 rtx_insn *insns;
2087 int op1_type = REAL_TYPE;
2088 tree fndecl = get_callee_fndecl (exp);
2089 tree arg0, arg1;
2090 machine_mode mode;
2091 bool errno_set = true;
2092
2093 switch (DECL_FUNCTION_CODE (fndecl))
2094 {
2095 CASE_FLT_FN (BUILT_IN_SCALBN):
2096 CASE_FLT_FN (BUILT_IN_SCALBLN):
2097 CASE_FLT_FN (BUILT_IN_LDEXP):
2098 op1_type = INTEGER_TYPE;
2099 default:
2100 break;
2101 }
2102
2103 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2104 return NULL_RTX;
2105
2106 arg0 = CALL_EXPR_ARG (exp, 0);
2107 arg1 = CALL_EXPR_ARG (exp, 1);
2108
2109 switch (DECL_FUNCTION_CODE (fndecl))
2110 {
2111 CASE_FLT_FN (BUILT_IN_POW):
2112 builtin_optab = pow_optab; break;
2113 CASE_FLT_FN (BUILT_IN_ATAN2):
2114 builtin_optab = atan2_optab; break;
2115 CASE_FLT_FN (BUILT_IN_SCALB):
2116 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2117 return 0;
2118 builtin_optab = scalb_optab; break;
2119 CASE_FLT_FN (BUILT_IN_SCALBN):
2120 CASE_FLT_FN (BUILT_IN_SCALBLN):
2121 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2122 return 0;
2123 /* Fall through... */
2124 CASE_FLT_FN (BUILT_IN_LDEXP):
2125 builtin_optab = ldexp_optab; break;
2126 CASE_FLT_FN (BUILT_IN_FMOD):
2127 builtin_optab = fmod_optab; break;
2128 CASE_FLT_FN (BUILT_IN_REMAINDER):
2129 CASE_FLT_FN (BUILT_IN_DREM):
2130 builtin_optab = remainder_optab; break;
2131 default:
2132 gcc_unreachable ();
2133 }
2134
2135 /* Make a suitable register to place result in. */
2136 mode = TYPE_MODE (TREE_TYPE (exp));
2137
2138 /* Before working hard, check whether the instruction is available. */
2139 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2140 return NULL_RTX;
2141
2142 result = gen_reg_rtx (mode);
2143
2144 if (! flag_errno_math || ! HONOR_NANS (mode))
2145 errno_set = false;
2146
2147 if (errno_set && optimize_insn_for_size_p ())
2148 return 0;
2149
2150 /* Always stabilize the argument list. */
2151 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2152 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2153
2154 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2155 op1 = expand_normal (arg1);
2156
2157 start_sequence ();
2158
2159 /* Compute into RESULT.
2160 Set RESULT to wherever the result comes back. */
2161 result = expand_binop (mode, builtin_optab, op0, op1,
2162 result, 0, OPTAB_DIRECT);
2163
2164 /* If we were unable to expand via the builtin, stop the sequence
2165 (without outputting the insns) and call to the library function
2166 with the stabilized argument list. */
2167 if (result == 0)
2168 {
2169 end_sequence ();
2170 return expand_call (exp, target, target == const0_rtx);
2171 }
2172
2173 if (errno_set)
2174 expand_errno_check (exp, result);
2175
2176 /* Output the entire sequence. */
2177 insns = get_insns ();
2178 end_sequence ();
2179 emit_insn (insns);
2180
2181 return result;
2182 }
2183
2184 /* Expand a call to the builtin trinary math functions (fma).
2185 Return NULL_RTX if a normal call should be emitted rather than expanding the
2186 function in-line. EXP is the expression that is a call to the builtin
2187 function; if convenient, the result should be placed in TARGET.
2188 SUBTARGET may be used as the target for computing one of EXP's
2189 operands. */
2190
2191 static rtx
2192 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2193 {
2194 optab builtin_optab;
2195 rtx op0, op1, op2, result;
2196 rtx_insn *insns;
2197 tree fndecl = get_callee_fndecl (exp);
2198 tree arg0, arg1, arg2;
2199 machine_mode mode;
2200
2201 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2202 return NULL_RTX;
2203
2204 arg0 = CALL_EXPR_ARG (exp, 0);
2205 arg1 = CALL_EXPR_ARG (exp, 1);
2206 arg2 = CALL_EXPR_ARG (exp, 2);
2207
2208 switch (DECL_FUNCTION_CODE (fndecl))
2209 {
2210 CASE_FLT_FN (BUILT_IN_FMA):
2211 builtin_optab = fma_optab; break;
2212 default:
2213 gcc_unreachable ();
2214 }
2215
2216 /* Make a suitable register to place result in. */
2217 mode = TYPE_MODE (TREE_TYPE (exp));
2218
2219 /* Before working hard, check whether the instruction is available. */
2220 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2221 return NULL_RTX;
2222
2223 result = gen_reg_rtx (mode);
2224
2225 /* Always stabilize the argument list. */
2226 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2227 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2228 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2229
2230 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2231 op1 = expand_normal (arg1);
2232 op2 = expand_normal (arg2);
2233
2234 start_sequence ();
2235
2236 /* Compute into RESULT.
2237 Set RESULT to wherever the result comes back. */
2238 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2239 result, 0);
2240
2241 /* If we were unable to expand via the builtin, stop the sequence
2242 (without outputting the insns) and call to the library function
2243 with the stabilized argument list. */
2244 if (result == 0)
2245 {
2246 end_sequence ();
2247 return expand_call (exp, target, target == const0_rtx);
2248 }
2249
2250 /* Output the entire sequence. */
2251 insns = get_insns ();
2252 end_sequence ();
2253 emit_insn (insns);
2254
2255 return result;
2256 }
2257
2258 /* Expand a call to the builtin sin and cos math functions.
2259 Return NULL_RTX if a normal call should be emitted rather than expanding the
2260 function in-line. EXP is the expression that is a call to the builtin
2261 function; if convenient, the result should be placed in TARGET.
2262 SUBTARGET may be used as the target for computing one of EXP's
2263 operands. */
2264
2265 static rtx
2266 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2267 {
2268 optab builtin_optab;
2269 rtx op0;
2270 rtx_insn *insns;
2271 tree fndecl = get_callee_fndecl (exp);
2272 machine_mode mode;
2273 tree arg;
2274
2275 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2276 return NULL_RTX;
2277
2278 arg = CALL_EXPR_ARG (exp, 0);
2279
2280 switch (DECL_FUNCTION_CODE (fndecl))
2281 {
2282 CASE_FLT_FN (BUILT_IN_SIN):
2283 CASE_FLT_FN (BUILT_IN_COS):
2284 builtin_optab = sincos_optab; break;
2285 default:
2286 gcc_unreachable ();
2287 }
2288
2289 /* Make a suitable register to place result in. */
2290 mode = TYPE_MODE (TREE_TYPE (exp));
2291
2292 /* Check if sincos insn is available, otherwise fallback
2293 to sin or cos insn. */
2294 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2295 switch (DECL_FUNCTION_CODE (fndecl))
2296 {
2297 CASE_FLT_FN (BUILT_IN_SIN):
2298 builtin_optab = sin_optab; break;
2299 CASE_FLT_FN (BUILT_IN_COS):
2300 builtin_optab = cos_optab; break;
2301 default:
2302 gcc_unreachable ();
2303 }
2304
2305 /* Before working hard, check whether the instruction is available. */
2306 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2307 {
2308 rtx result = gen_reg_rtx (mode);
2309
2310 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2311 need to expand the argument again. This way, we will not perform
2312 side-effects more the once. */
2313 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2314
2315 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2316
2317 start_sequence ();
2318
2319 /* Compute into RESULT.
2320 Set RESULT to wherever the result comes back. */
2321 if (builtin_optab == sincos_optab)
2322 {
2323 int ok;
2324
2325 switch (DECL_FUNCTION_CODE (fndecl))
2326 {
2327 CASE_FLT_FN (BUILT_IN_SIN):
2328 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2329 break;
2330 CASE_FLT_FN (BUILT_IN_COS):
2331 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2332 break;
2333 default:
2334 gcc_unreachable ();
2335 }
2336 gcc_assert (ok);
2337 }
2338 else
2339 result = expand_unop (mode, builtin_optab, op0, result, 0);
2340
2341 if (result != 0)
2342 {
2343 /* Output the entire sequence. */
2344 insns = get_insns ();
2345 end_sequence ();
2346 emit_insn (insns);
2347 return result;
2348 }
2349
2350 /* If we were unable to expand via the builtin, stop the sequence
2351 (without outputting the insns) and call to the library function
2352 with the stabilized argument list. */
2353 end_sequence ();
2354 }
2355
2356 return expand_call (exp, target, target == const0_rtx);
2357 }
2358
2359 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2360 return an RTL instruction code that implements the functionality.
2361 If that isn't possible or available return CODE_FOR_nothing. */
2362
2363 static enum insn_code
2364 interclass_mathfn_icode (tree arg, tree fndecl)
2365 {
2366 bool errno_set = false;
2367 optab builtin_optab = unknown_optab;
2368 machine_mode mode;
2369
2370 switch (DECL_FUNCTION_CODE (fndecl))
2371 {
2372 CASE_FLT_FN (BUILT_IN_ILOGB):
2373 errno_set = true; builtin_optab = ilogb_optab; break;
2374 CASE_FLT_FN (BUILT_IN_ISINF):
2375 builtin_optab = isinf_optab; break;
2376 case BUILT_IN_ISNORMAL:
2377 case BUILT_IN_ISFINITE:
2378 CASE_FLT_FN (BUILT_IN_FINITE):
2379 case BUILT_IN_FINITED32:
2380 case BUILT_IN_FINITED64:
2381 case BUILT_IN_FINITED128:
2382 case BUILT_IN_ISINFD32:
2383 case BUILT_IN_ISINFD64:
2384 case BUILT_IN_ISINFD128:
2385 /* These builtins have no optabs (yet). */
2386 break;
2387 default:
2388 gcc_unreachable ();
2389 }
2390
2391 /* There's no easy way to detect the case we need to set EDOM. */
2392 if (flag_errno_math && errno_set)
2393 return CODE_FOR_nothing;
2394
2395 /* Optab mode depends on the mode of the input argument. */
2396 mode = TYPE_MODE (TREE_TYPE (arg));
2397
2398 if (builtin_optab)
2399 return optab_handler (builtin_optab, mode);
2400 return CODE_FOR_nothing;
2401 }
2402
2403 /* Expand a call to one of the builtin math functions that operate on
2404 floating point argument and output an integer result (ilogb, isinf,
2405 isnan, etc).
2406 Return 0 if a normal call should be emitted rather than expanding the
2407 function in-line. EXP is the expression that is a call to the builtin
2408 function; if convenient, the result should be placed in TARGET. */
2409
2410 static rtx
2411 expand_builtin_interclass_mathfn (tree exp, rtx target)
2412 {
2413 enum insn_code icode = CODE_FOR_nothing;
2414 rtx op0;
2415 tree fndecl = get_callee_fndecl (exp);
2416 machine_mode mode;
2417 tree arg;
2418
2419 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2420 return NULL_RTX;
2421
2422 arg = CALL_EXPR_ARG (exp, 0);
2423 icode = interclass_mathfn_icode (arg, fndecl);
2424 mode = TYPE_MODE (TREE_TYPE (arg));
2425
2426 if (icode != CODE_FOR_nothing)
2427 {
2428 struct expand_operand ops[1];
2429 rtx_insn *last = get_last_insn ();
2430 tree orig_arg = arg;
2431
2432 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2433 need to expand the argument again. This way, we will not perform
2434 side-effects more the once. */
2435 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2436
2437 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2438
2439 if (mode != GET_MODE (op0))
2440 op0 = convert_to_mode (mode, op0, 0);
2441
2442 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2443 if (maybe_legitimize_operands (icode, 0, 1, ops)
2444 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2445 return ops[0].value;
2446
2447 delete_insns_since (last);
2448 CALL_EXPR_ARG (exp, 0) = orig_arg;
2449 }
2450
2451 return NULL_RTX;
2452 }
2453
2454 /* Expand a call to the builtin sincos math function.
2455 Return NULL_RTX if a normal call should be emitted rather than expanding the
2456 function in-line. EXP is the expression that is a call to the builtin
2457 function. */
2458
2459 static rtx
2460 expand_builtin_sincos (tree exp)
2461 {
2462 rtx op0, op1, op2, target1, target2;
2463 machine_mode mode;
2464 tree arg, sinp, cosp;
2465 int result;
2466 location_t loc = EXPR_LOCATION (exp);
2467 tree alias_type, alias_off;
2468
2469 if (!validate_arglist (exp, REAL_TYPE,
2470 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2471 return NULL_RTX;
2472
2473 arg = CALL_EXPR_ARG (exp, 0);
2474 sinp = CALL_EXPR_ARG (exp, 1);
2475 cosp = CALL_EXPR_ARG (exp, 2);
2476
2477 /* Make a suitable register to place result in. */
2478 mode = TYPE_MODE (TREE_TYPE (arg));
2479
2480 /* Check if sincos insn is available, otherwise emit the call. */
2481 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2482 return NULL_RTX;
2483
2484 target1 = gen_reg_rtx (mode);
2485 target2 = gen_reg_rtx (mode);
2486
2487 op0 = expand_normal (arg);
2488 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2489 alias_off = build_int_cst (alias_type, 0);
2490 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2491 sinp, alias_off));
2492 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2493 cosp, alias_off));
2494
2495 /* Compute into target1 and target2.
2496 Set TARGET to wherever the result comes back. */
2497 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2498 gcc_assert (result);
2499
2500 /* Move target1 and target2 to the memory locations indicated
2501 by op1 and op2. */
2502 emit_move_insn (op1, target1);
2503 emit_move_insn (op2, target2);
2504
2505 return const0_rtx;
2506 }
2507
2508 /* Expand a call to the internal cexpi builtin to the sincos math function.
2509 EXP is the expression that is a call to the builtin function; if convenient,
2510 the result should be placed in TARGET. */
2511
2512 static rtx
2513 expand_builtin_cexpi (tree exp, rtx target)
2514 {
2515 tree fndecl = get_callee_fndecl (exp);
2516 tree arg, type;
2517 machine_mode mode;
2518 rtx op0, op1, op2;
2519 location_t loc = EXPR_LOCATION (exp);
2520
2521 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2522 return NULL_RTX;
2523
2524 arg = CALL_EXPR_ARG (exp, 0);
2525 type = TREE_TYPE (arg);
2526 mode = TYPE_MODE (TREE_TYPE (arg));
2527
2528 /* Try expanding via a sincos optab, fall back to emitting a libcall
2529 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2530 is only generated from sincos, cexp or if we have either of them. */
2531 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2532 {
2533 op1 = gen_reg_rtx (mode);
2534 op2 = gen_reg_rtx (mode);
2535
2536 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2537
2538 /* Compute into op1 and op2. */
2539 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2540 }
2541 else if (targetm.libc_has_function (function_sincos))
2542 {
2543 tree call, fn = NULL_TREE;
2544 tree top1, top2;
2545 rtx op1a, op2a;
2546
2547 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2548 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2549 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2550 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2551 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2552 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2553 else
2554 gcc_unreachable ();
2555
2556 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2557 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2558 op1a = copy_addr_to_reg (XEXP (op1, 0));
2559 op2a = copy_addr_to_reg (XEXP (op2, 0));
2560 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2561 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2562
2563 /* Make sure not to fold the sincos call again. */
2564 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2565 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2566 call, 3, arg, top1, top2));
2567 }
2568 else
2569 {
2570 tree call, fn = NULL_TREE, narg;
2571 tree ctype = build_complex_type (type);
2572
2573 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2574 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2575 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2576 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2577 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2578 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2579 else
2580 gcc_unreachable ();
2581
2582 /* If we don't have a decl for cexp create one. This is the
2583 friendliest fallback if the user calls __builtin_cexpi
2584 without full target C99 function support. */
2585 if (fn == NULL_TREE)
2586 {
2587 tree fntype;
2588 const char *name = NULL;
2589
2590 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2591 name = "cexpf";
2592 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2593 name = "cexp";
2594 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2595 name = "cexpl";
2596
2597 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2598 fn = build_fn_decl (name, fntype);
2599 }
2600
2601 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2602 build_real (type, dconst0), arg);
2603
2604 /* Make sure not to fold the cexp call again. */
2605 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2606 return expand_expr (build_call_nary (ctype, call, 1, narg),
2607 target, VOIDmode, EXPAND_NORMAL);
2608 }
2609
2610 /* Now build the proper return type. */
2611 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2612 make_tree (TREE_TYPE (arg), op2),
2613 make_tree (TREE_TYPE (arg), op1)),
2614 target, VOIDmode, EXPAND_NORMAL);
2615 }
2616
2617 /* Conveniently construct a function call expression. FNDECL names the
2618 function to be called, N is the number of arguments, and the "..."
2619 parameters are the argument expressions. Unlike build_call_exr
2620 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2621
2622 static tree
2623 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2624 {
2625 va_list ap;
2626 tree fntype = TREE_TYPE (fndecl);
2627 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2628
2629 va_start (ap, n);
2630 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2631 va_end (ap);
2632 SET_EXPR_LOCATION (fn, loc);
2633 return fn;
2634 }
2635
2636 /* Expand a call to one of the builtin rounding functions gcc defines
2637 as an extension (lfloor and lceil). As these are gcc extensions we
2638 do not need to worry about setting errno to EDOM.
2639 If expanding via optab fails, lower expression to (int)(floor(x)).
2640 EXP is the expression that is a call to the builtin function;
2641 if convenient, the result should be placed in TARGET. */
2642
2643 static rtx
2644 expand_builtin_int_roundingfn (tree exp, rtx target)
2645 {
2646 convert_optab builtin_optab;
2647 rtx op0, tmp;
2648 rtx_insn *insns;
2649 tree fndecl = get_callee_fndecl (exp);
2650 enum built_in_function fallback_fn;
2651 tree fallback_fndecl;
2652 machine_mode mode;
2653 tree arg;
2654
2655 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2656 gcc_unreachable ();
2657
2658 arg = CALL_EXPR_ARG (exp, 0);
2659
2660 switch (DECL_FUNCTION_CODE (fndecl))
2661 {
2662 CASE_FLT_FN (BUILT_IN_ICEIL):
2663 CASE_FLT_FN (BUILT_IN_LCEIL):
2664 CASE_FLT_FN (BUILT_IN_LLCEIL):
2665 builtin_optab = lceil_optab;
2666 fallback_fn = BUILT_IN_CEIL;
2667 break;
2668
2669 CASE_FLT_FN (BUILT_IN_IFLOOR):
2670 CASE_FLT_FN (BUILT_IN_LFLOOR):
2671 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2672 builtin_optab = lfloor_optab;
2673 fallback_fn = BUILT_IN_FLOOR;
2674 break;
2675
2676 default:
2677 gcc_unreachable ();
2678 }
2679
2680 /* Make a suitable register to place result in. */
2681 mode = TYPE_MODE (TREE_TYPE (exp));
2682
2683 target = gen_reg_rtx (mode);
2684
2685 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2686 need to expand the argument again. This way, we will not perform
2687 side-effects more the once. */
2688 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2689
2690 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2691
2692 start_sequence ();
2693
2694 /* Compute into TARGET. */
2695 if (expand_sfix_optab (target, op0, builtin_optab))
2696 {
2697 /* Output the entire sequence. */
2698 insns = get_insns ();
2699 end_sequence ();
2700 emit_insn (insns);
2701 return target;
2702 }
2703
2704 /* If we were unable to expand via the builtin, stop the sequence
2705 (without outputting the insns). */
2706 end_sequence ();
2707
2708 /* Fall back to floating point rounding optab. */
2709 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2710
2711 /* For non-C99 targets we may end up without a fallback fndecl here
2712 if the user called __builtin_lfloor directly. In this case emit
2713 a call to the floor/ceil variants nevertheless. This should result
2714 in the best user experience for not full C99 targets. */
2715 if (fallback_fndecl == NULL_TREE)
2716 {
2717 tree fntype;
2718 const char *name = NULL;
2719
2720 switch (DECL_FUNCTION_CODE (fndecl))
2721 {
2722 case BUILT_IN_ICEIL:
2723 case BUILT_IN_LCEIL:
2724 case BUILT_IN_LLCEIL:
2725 name = "ceil";
2726 break;
2727 case BUILT_IN_ICEILF:
2728 case BUILT_IN_LCEILF:
2729 case BUILT_IN_LLCEILF:
2730 name = "ceilf";
2731 break;
2732 case BUILT_IN_ICEILL:
2733 case BUILT_IN_LCEILL:
2734 case BUILT_IN_LLCEILL:
2735 name = "ceill";
2736 break;
2737 case BUILT_IN_IFLOOR:
2738 case BUILT_IN_LFLOOR:
2739 case BUILT_IN_LLFLOOR:
2740 name = "floor";
2741 break;
2742 case BUILT_IN_IFLOORF:
2743 case BUILT_IN_LFLOORF:
2744 case BUILT_IN_LLFLOORF:
2745 name = "floorf";
2746 break;
2747 case BUILT_IN_IFLOORL:
2748 case BUILT_IN_LFLOORL:
2749 case BUILT_IN_LLFLOORL:
2750 name = "floorl";
2751 break;
2752 default:
2753 gcc_unreachable ();
2754 }
2755
2756 fntype = build_function_type_list (TREE_TYPE (arg),
2757 TREE_TYPE (arg), NULL_TREE);
2758 fallback_fndecl = build_fn_decl (name, fntype);
2759 }
2760
2761 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2762
2763 tmp = expand_normal (exp);
2764 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2765
2766 /* Truncate the result of floating point optab to integer
2767 via expand_fix (). */
2768 target = gen_reg_rtx (mode);
2769 expand_fix (target, tmp, 0);
2770
2771 return target;
2772 }
2773
2774 /* Expand a call to one of the builtin math functions doing integer
2775 conversion (lrint).
2776 Return 0 if a normal call should be emitted rather than expanding the
2777 function in-line. EXP is the expression that is a call to the builtin
2778 function; if convenient, the result should be placed in TARGET. */
2779
2780 static rtx
2781 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2782 {
2783 convert_optab builtin_optab;
2784 rtx op0;
2785 rtx_insn *insns;
2786 tree fndecl = get_callee_fndecl (exp);
2787 tree arg;
2788 machine_mode mode;
2789 enum built_in_function fallback_fn = BUILT_IN_NONE;
2790
2791 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2792 gcc_unreachable ();
2793
2794 arg = CALL_EXPR_ARG (exp, 0);
2795
2796 switch (DECL_FUNCTION_CODE (fndecl))
2797 {
2798 CASE_FLT_FN (BUILT_IN_IRINT):
2799 fallback_fn = BUILT_IN_LRINT;
2800 /* FALLTHRU */
2801 CASE_FLT_FN (BUILT_IN_LRINT):
2802 CASE_FLT_FN (BUILT_IN_LLRINT):
2803 builtin_optab = lrint_optab;
2804 break;
2805
2806 CASE_FLT_FN (BUILT_IN_IROUND):
2807 fallback_fn = BUILT_IN_LROUND;
2808 /* FALLTHRU */
2809 CASE_FLT_FN (BUILT_IN_LROUND):
2810 CASE_FLT_FN (BUILT_IN_LLROUND):
2811 builtin_optab = lround_optab;
2812 break;
2813
2814 default:
2815 gcc_unreachable ();
2816 }
2817
2818 /* There's no easy way to detect the case we need to set EDOM. */
2819 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2820 return NULL_RTX;
2821
2822 /* Make a suitable register to place result in. */
2823 mode = TYPE_MODE (TREE_TYPE (exp));
2824
2825 /* There's no easy way to detect the case we need to set EDOM. */
2826 if (!flag_errno_math)
2827 {
2828 rtx result = gen_reg_rtx (mode);
2829
2830 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2831 need to expand the argument again. This way, we will not perform
2832 side-effects more the once. */
2833 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2834
2835 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2836
2837 start_sequence ();
2838
2839 if (expand_sfix_optab (result, op0, builtin_optab))
2840 {
2841 /* Output the entire sequence. */
2842 insns = get_insns ();
2843 end_sequence ();
2844 emit_insn (insns);
2845 return result;
2846 }
2847
2848 /* If we were unable to expand via the builtin, stop the sequence
2849 (without outputting the insns) and call to the library function
2850 with the stabilized argument list. */
2851 end_sequence ();
2852 }
2853
2854 if (fallback_fn != BUILT_IN_NONE)
2855 {
2856 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2857 targets, (int) round (x) should never be transformed into
2858 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2859 a call to lround in the hope that the target provides at least some
2860 C99 functions. This should result in the best user experience for
2861 not full C99 targets. */
2862 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2863 fallback_fn, 0);
2864
2865 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2866 fallback_fndecl, 1, arg);
2867
2868 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2869 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2870 return convert_to_mode (mode, target, 0);
2871 }
2872
2873 return expand_call (exp, target, target == const0_rtx);
2874 }
2875
2876 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2877 a normal call should be emitted rather than expanding the function
2878 in-line. EXP is the expression that is a call to the builtin
2879 function; if convenient, the result should be placed in TARGET. */
2880
2881 static rtx
2882 expand_builtin_powi (tree exp, rtx target)
2883 {
2884 tree arg0, arg1;
2885 rtx op0, op1;
2886 machine_mode mode;
2887 machine_mode mode2;
2888
2889 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2890 return NULL_RTX;
2891
2892 arg0 = CALL_EXPR_ARG (exp, 0);
2893 arg1 = CALL_EXPR_ARG (exp, 1);
2894 mode = TYPE_MODE (TREE_TYPE (exp));
2895
2896 /* Emit a libcall to libgcc. */
2897
2898 /* Mode of the 2nd argument must match that of an int. */
2899 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2900
2901 if (target == NULL_RTX)
2902 target = gen_reg_rtx (mode);
2903
2904 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2905 if (GET_MODE (op0) != mode)
2906 op0 = convert_to_mode (mode, op0, 0);
2907 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2908 if (GET_MODE (op1) != mode2)
2909 op1 = convert_to_mode (mode2, op1, 0);
2910
2911 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2912 target, LCT_CONST, mode, 2,
2913 op0, mode, op1, mode2);
2914
2915 return target;
2916 }
2917
2918 /* Expand expression EXP which is a call to the strlen builtin. Return
2919 NULL_RTX if we failed the caller should emit a normal call, otherwise
2920 try to get the result in TARGET, if convenient. */
2921
2922 static rtx
2923 expand_builtin_strlen (tree exp, rtx target,
2924 machine_mode target_mode)
2925 {
2926 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2927 return NULL_RTX;
2928 else
2929 {
2930 struct expand_operand ops[4];
2931 rtx pat;
2932 tree len;
2933 tree src = CALL_EXPR_ARG (exp, 0);
2934 rtx src_reg;
2935 rtx_insn *before_strlen;
2936 machine_mode insn_mode = target_mode;
2937 enum insn_code icode = CODE_FOR_nothing;
2938 unsigned int align;
2939
2940 /* If the length can be computed at compile-time, return it. */
2941 len = c_strlen (src, 0);
2942 if (len)
2943 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2944
2945 /* If the length can be computed at compile-time and is constant
2946 integer, but there are side-effects in src, evaluate
2947 src for side-effects, then return len.
2948 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2949 can be optimized into: i++; x = 3; */
2950 len = c_strlen (src, 1);
2951 if (len && TREE_CODE (len) == INTEGER_CST)
2952 {
2953 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2954 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2955 }
2956
2957 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2958
2959 /* If SRC is not a pointer type, don't do this operation inline. */
2960 if (align == 0)
2961 return NULL_RTX;
2962
2963 /* Bail out if we can't compute strlen in the right mode. */
2964 while (insn_mode != VOIDmode)
2965 {
2966 icode = optab_handler (strlen_optab, insn_mode);
2967 if (icode != CODE_FOR_nothing)
2968 break;
2969
2970 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2971 }
2972 if (insn_mode == VOIDmode)
2973 return NULL_RTX;
2974
2975 /* Make a place to hold the source address. We will not expand
2976 the actual source until we are sure that the expansion will
2977 not fail -- there are trees that cannot be expanded twice. */
2978 src_reg = gen_reg_rtx (Pmode);
2979
2980 /* Mark the beginning of the strlen sequence so we can emit the
2981 source operand later. */
2982 before_strlen = get_last_insn ();
2983
2984 create_output_operand (&ops[0], target, insn_mode);
2985 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2986 create_integer_operand (&ops[2], 0);
2987 create_integer_operand (&ops[3], align);
2988 if (!maybe_expand_insn (icode, 4, ops))
2989 return NULL_RTX;
2990
2991 /* Now that we are assured of success, expand the source. */
2992 start_sequence ();
2993 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2994 if (pat != src_reg)
2995 {
2996 #ifdef POINTERS_EXTEND_UNSIGNED
2997 if (GET_MODE (pat) != Pmode)
2998 pat = convert_to_mode (Pmode, pat,
2999 POINTERS_EXTEND_UNSIGNED);
3000 #endif
3001 emit_move_insn (src_reg, pat);
3002 }
3003 pat = get_insns ();
3004 end_sequence ();
3005
3006 if (before_strlen)
3007 emit_insn_after (pat, before_strlen);
3008 else
3009 emit_insn_before (pat, get_insns ());
3010
3011 /* Return the value in the proper mode for this function. */
3012 if (GET_MODE (ops[0].value) == target_mode)
3013 target = ops[0].value;
3014 else if (target != 0)
3015 convert_move (target, ops[0].value, 0);
3016 else
3017 target = convert_to_mode (target_mode, ops[0].value, 0);
3018
3019 return target;
3020 }
3021 }
3022
3023 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3024 bytes from constant string DATA + OFFSET and return it as target
3025 constant. */
3026
3027 static rtx
3028 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3029 machine_mode mode)
3030 {
3031 const char *str = (const char *) data;
3032
3033 gcc_assert (offset >= 0
3034 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3035 <= strlen (str) + 1));
3036
3037 return c_readstr (str + offset, mode);
3038 }
3039
3040 /* LEN specify length of the block of memcpy/memset operation.
3041 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3042 In some cases we can make very likely guess on max size, then we
3043 set it into PROBABLE_MAX_SIZE. */
3044
3045 static void
3046 determine_block_size (tree len, rtx len_rtx,
3047 unsigned HOST_WIDE_INT *min_size,
3048 unsigned HOST_WIDE_INT *max_size,
3049 unsigned HOST_WIDE_INT *probable_max_size)
3050 {
3051 if (CONST_INT_P (len_rtx))
3052 {
3053 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3054 return;
3055 }
3056 else
3057 {
3058 wide_int min, max;
3059 enum value_range_type range_type = VR_UNDEFINED;
3060
3061 /* Determine bounds from the type. */
3062 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3063 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3064 else
3065 *min_size = 0;
3066 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3067 *probable_max_size = *max_size
3068 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3069 else
3070 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3071
3072 if (TREE_CODE (len) == SSA_NAME)
3073 range_type = get_range_info (len, &min, &max);
3074 if (range_type == VR_RANGE)
3075 {
3076 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3077 *min_size = min.to_uhwi ();
3078 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3079 *probable_max_size = *max_size = max.to_uhwi ();
3080 }
3081 else if (range_type == VR_ANTI_RANGE)
3082 {
3083 /* Anti range 0...N lets us to determine minimal size to N+1. */
3084 if (min == 0)
3085 {
3086 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3087 *min_size = max.to_uhwi () + 1;
3088 }
3089 /* Code like
3090
3091 int n;
3092 if (n < 100)
3093 memcpy (a, b, n)
3094
3095 Produce anti range allowing negative values of N. We still
3096 can use the information and make a guess that N is not negative.
3097 */
3098 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3099 *probable_max_size = min.to_uhwi () - 1;
3100 }
3101 }
3102 gcc_checking_assert (*max_size <=
3103 (unsigned HOST_WIDE_INT)
3104 GET_MODE_MASK (GET_MODE (len_rtx)));
3105 }
3106
3107 /* Helper function to do the actual work for expand_builtin_memcpy. */
3108
3109 static rtx
3110 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3111 {
3112 const char *src_str;
3113 unsigned int src_align = get_pointer_alignment (src);
3114 unsigned int dest_align = get_pointer_alignment (dest);
3115 rtx dest_mem, src_mem, dest_addr, len_rtx;
3116 HOST_WIDE_INT expected_size = -1;
3117 unsigned int expected_align = 0;
3118 unsigned HOST_WIDE_INT min_size;
3119 unsigned HOST_WIDE_INT max_size;
3120 unsigned HOST_WIDE_INT probable_max_size;
3121
3122 /* If DEST is not a pointer type, call the normal function. */
3123 if (dest_align == 0)
3124 return NULL_RTX;
3125
3126 /* If either SRC is not a pointer type, don't do this
3127 operation in-line. */
3128 if (src_align == 0)
3129 return NULL_RTX;
3130
3131 if (currently_expanding_gimple_stmt)
3132 stringop_block_profile (currently_expanding_gimple_stmt,
3133 &expected_align, &expected_size);
3134
3135 if (expected_align < dest_align)
3136 expected_align = dest_align;
3137 dest_mem = get_memory_rtx (dest, len);
3138 set_mem_align (dest_mem, dest_align);
3139 len_rtx = expand_normal (len);
3140 determine_block_size (len, len_rtx, &min_size, &max_size,
3141 &probable_max_size);
3142 src_str = c_getstr (src);
3143
3144 /* If SRC is a string constant and block move would be done
3145 by pieces, we can avoid loading the string from memory
3146 and only stored the computed constants. */
3147 if (src_str
3148 && CONST_INT_P (len_rtx)
3149 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3150 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3151 CONST_CAST (char *, src_str),
3152 dest_align, false))
3153 {
3154 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3155 builtin_memcpy_read_str,
3156 CONST_CAST (char *, src_str),
3157 dest_align, false, 0);
3158 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3159 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3160 return dest_mem;
3161 }
3162
3163 src_mem = get_memory_rtx (src, len);
3164 set_mem_align (src_mem, src_align);
3165
3166 /* Copy word part most expediently. */
3167 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3168 CALL_EXPR_TAILCALL (exp)
3169 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3170 expected_align, expected_size,
3171 min_size, max_size, probable_max_size);
3172
3173 if (dest_addr == 0)
3174 {
3175 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3176 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3177 }
3178
3179 return dest_addr;
3180 }
3181
3182 /* Expand a call EXP to the memcpy builtin.
3183 Return NULL_RTX if we failed, the caller should emit a normal call,
3184 otherwise try to get the result in TARGET, if convenient (and in
3185 mode MODE if that's convenient). */
3186
3187 static rtx
3188 expand_builtin_memcpy (tree exp, rtx target)
3189 {
3190 if (!validate_arglist (exp,
3191 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3192 return NULL_RTX;
3193 else
3194 {
3195 tree dest = CALL_EXPR_ARG (exp, 0);
3196 tree src = CALL_EXPR_ARG (exp, 1);
3197 tree len = CALL_EXPR_ARG (exp, 2);
3198 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3199 }
3200 }
3201
3202 /* Expand an instrumented call EXP to the memcpy builtin.
3203 Return NULL_RTX if we failed, the caller should emit a normal call,
3204 otherwise try to get the result in TARGET, if convenient (and in
3205 mode MODE if that's convenient). */
3206
3207 static rtx
3208 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3209 {
3210 if (!validate_arglist (exp,
3211 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3212 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3213 INTEGER_TYPE, VOID_TYPE))
3214 return NULL_RTX;
3215 else
3216 {
3217 tree dest = CALL_EXPR_ARG (exp, 0);
3218 tree src = CALL_EXPR_ARG (exp, 2);
3219 tree len = CALL_EXPR_ARG (exp, 4);
3220 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3221
3222 /* Return src bounds with the result. */
3223 if (res)
3224 {
3225 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3226 expand_normal (CALL_EXPR_ARG (exp, 1)));
3227 res = chkp_join_splitted_slot (res, bnd);
3228 }
3229 return res;
3230 }
3231 }
3232
3233 /* Expand a call EXP to the mempcpy builtin.
3234 Return NULL_RTX if we failed; the caller should emit a normal call,
3235 otherwise try to get the result in TARGET, if convenient (and in
3236 mode MODE if that's convenient). If ENDP is 0 return the
3237 destination pointer, if ENDP is 1 return the end pointer ala
3238 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3239 stpcpy. */
3240
3241 static rtx
3242 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3243 {
3244 if (!validate_arglist (exp,
3245 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3246 return NULL_RTX;
3247 else
3248 {
3249 tree dest = CALL_EXPR_ARG (exp, 0);
3250 tree src = CALL_EXPR_ARG (exp, 1);
3251 tree len = CALL_EXPR_ARG (exp, 2);
3252 return expand_builtin_mempcpy_args (dest, src, len,
3253 target, mode, /*endp=*/ 1,
3254 exp);
3255 }
3256 }
3257
3258 /* Expand an instrumented call EXP to the mempcpy builtin.
3259 Return NULL_RTX if we failed, the caller should emit a normal call,
3260 otherwise try to get the result in TARGET, if convenient (and in
3261 mode MODE if that's convenient). */
3262
3263 static rtx
3264 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3265 {
3266 if (!validate_arglist (exp,
3267 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3268 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3269 INTEGER_TYPE, VOID_TYPE))
3270 return NULL_RTX;
3271 else
3272 {
3273 tree dest = CALL_EXPR_ARG (exp, 0);
3274 tree src = CALL_EXPR_ARG (exp, 2);
3275 tree len = CALL_EXPR_ARG (exp, 4);
3276 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3277 mode, 1, exp);
3278
3279 /* Return src bounds with the result. */
3280 if (res)
3281 {
3282 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3283 expand_normal (CALL_EXPR_ARG (exp, 1)));
3284 res = chkp_join_splitted_slot (res, bnd);
3285 }
3286 return res;
3287 }
3288 }
3289
3290 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3291 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3292 so that this can also be called without constructing an actual CALL_EXPR.
3293 The other arguments and return value are the same as for
3294 expand_builtin_mempcpy. */
3295
3296 static rtx
3297 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3298 rtx target, machine_mode mode, int endp,
3299 tree orig_exp)
3300 {
3301 tree fndecl = get_callee_fndecl (orig_exp);
3302
3303 /* If return value is ignored, transform mempcpy into memcpy. */
3304 if (target == const0_rtx
3305 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3306 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3307 {
3308 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3309 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3310 dest, src, len);
3311 return expand_expr (result, target, mode, EXPAND_NORMAL);
3312 }
3313 else if (target == const0_rtx
3314 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3315 {
3316 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3317 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3318 dest, src, len);
3319 return expand_expr (result, target, mode, EXPAND_NORMAL);
3320 }
3321 else
3322 {
3323 const char *src_str;
3324 unsigned int src_align = get_pointer_alignment (src);
3325 unsigned int dest_align = get_pointer_alignment (dest);
3326 rtx dest_mem, src_mem, len_rtx;
3327
3328 /* If either SRC or DEST is not a pointer type, don't do this
3329 operation in-line. */
3330 if (dest_align == 0 || src_align == 0)
3331 return NULL_RTX;
3332
3333 /* If LEN is not constant, call the normal function. */
3334 if (! tree_fits_uhwi_p (len))
3335 return NULL_RTX;
3336
3337 len_rtx = expand_normal (len);
3338 src_str = c_getstr (src);
3339
3340 /* If SRC is a string constant and block move would be done
3341 by pieces, we can avoid loading the string from memory
3342 and only stored the computed constants. */
3343 if (src_str
3344 && CONST_INT_P (len_rtx)
3345 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3346 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3347 CONST_CAST (char *, src_str),
3348 dest_align, false))
3349 {
3350 dest_mem = get_memory_rtx (dest, len);
3351 set_mem_align (dest_mem, dest_align);
3352 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3353 builtin_memcpy_read_str,
3354 CONST_CAST (char *, src_str),
3355 dest_align, false, endp);
3356 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3357 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3358 return dest_mem;
3359 }
3360
3361 if (CONST_INT_P (len_rtx)
3362 && can_move_by_pieces (INTVAL (len_rtx),
3363 MIN (dest_align, src_align)))
3364 {
3365 dest_mem = get_memory_rtx (dest, len);
3366 set_mem_align (dest_mem, dest_align);
3367 src_mem = get_memory_rtx (src, len);
3368 set_mem_align (src_mem, src_align);
3369 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3370 MIN (dest_align, src_align), endp);
3371 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3372 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3373 return dest_mem;
3374 }
3375
3376 return NULL_RTX;
3377 }
3378 }
3379
3380 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3381 we failed, the caller should emit a normal call, otherwise try to
3382 get the result in TARGET, if convenient. If ENDP is 0 return the
3383 destination pointer, if ENDP is 1 return the end pointer ala
3384 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3385 stpcpy. */
3386
3387 static rtx
3388 expand_movstr (tree dest, tree src, rtx target, int endp)
3389 {
3390 struct expand_operand ops[3];
3391 rtx dest_mem;
3392 rtx src_mem;
3393
3394 if (!targetm.have_movstr ())
3395 return NULL_RTX;
3396
3397 dest_mem = get_memory_rtx (dest, NULL);
3398 src_mem = get_memory_rtx (src, NULL);
3399 if (!endp)
3400 {
3401 target = force_reg (Pmode, XEXP (dest_mem, 0));
3402 dest_mem = replace_equiv_address (dest_mem, target);
3403 }
3404
3405 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3406 create_fixed_operand (&ops[1], dest_mem);
3407 create_fixed_operand (&ops[2], src_mem);
3408 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3409 return NULL_RTX;
3410
3411 if (endp && target != const0_rtx)
3412 {
3413 target = ops[0].value;
3414 /* movstr is supposed to set end to the address of the NUL
3415 terminator. If the caller requested a mempcpy-like return value,
3416 adjust it. */
3417 if (endp == 1)
3418 {
3419 rtx tem = plus_constant (GET_MODE (target),
3420 gen_lowpart (GET_MODE (target), target), 1);
3421 emit_move_insn (target, force_operand (tem, NULL_RTX));
3422 }
3423 }
3424 return target;
3425 }
3426
3427 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3428 NULL_RTX if we failed the caller should emit a normal call, otherwise
3429 try to get the result in TARGET, if convenient (and in mode MODE if that's
3430 convenient). */
3431
3432 static rtx
3433 expand_builtin_strcpy (tree exp, rtx target)
3434 {
3435 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3436 {
3437 tree dest = CALL_EXPR_ARG (exp, 0);
3438 tree src = CALL_EXPR_ARG (exp, 1);
3439 return expand_builtin_strcpy_args (dest, src, target);
3440 }
3441 return NULL_RTX;
3442 }
3443
3444 /* Helper function to do the actual work for expand_builtin_strcpy. The
3445 arguments to the builtin_strcpy call DEST and SRC are broken out
3446 so that this can also be called without constructing an actual CALL_EXPR.
3447 The other arguments and return value are the same as for
3448 expand_builtin_strcpy. */
3449
3450 static rtx
3451 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3452 {
3453 return expand_movstr (dest, src, target, /*endp=*/0);
3454 }
3455
3456 /* Expand a call EXP to the stpcpy builtin.
3457 Return NULL_RTX if we failed the caller should emit a normal call,
3458 otherwise try to get the result in TARGET, if convenient (and in
3459 mode MODE if that's convenient). */
3460
3461 static rtx
3462 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3463 {
3464 tree dst, src;
3465 location_t loc = EXPR_LOCATION (exp);
3466
3467 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3468 return NULL_RTX;
3469
3470 dst = CALL_EXPR_ARG (exp, 0);
3471 src = CALL_EXPR_ARG (exp, 1);
3472
3473 /* If return value is ignored, transform stpcpy into strcpy. */
3474 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3475 {
3476 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3477 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3478 return expand_expr (result, target, mode, EXPAND_NORMAL);
3479 }
3480 else
3481 {
3482 tree len, lenp1;
3483 rtx ret;
3484
3485 /* Ensure we get an actual string whose length can be evaluated at
3486 compile-time, not an expression containing a string. This is
3487 because the latter will potentially produce pessimized code
3488 when used to produce the return value. */
3489 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3490 return expand_movstr (dst, src, target, /*endp=*/2);
3491
3492 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3493 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3494 target, mode, /*endp=*/2,
3495 exp);
3496
3497 if (ret)
3498 return ret;
3499
3500 if (TREE_CODE (len) == INTEGER_CST)
3501 {
3502 rtx len_rtx = expand_normal (len);
3503
3504 if (CONST_INT_P (len_rtx))
3505 {
3506 ret = expand_builtin_strcpy_args (dst, src, target);
3507
3508 if (ret)
3509 {
3510 if (! target)
3511 {
3512 if (mode != VOIDmode)
3513 target = gen_reg_rtx (mode);
3514 else
3515 target = gen_reg_rtx (GET_MODE (ret));
3516 }
3517 if (GET_MODE (target) != GET_MODE (ret))
3518 ret = gen_lowpart (GET_MODE (target), ret);
3519
3520 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3521 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3522 gcc_assert (ret);
3523
3524 return target;
3525 }
3526 }
3527 }
3528
3529 return expand_movstr (dst, src, target, /*endp=*/2);
3530 }
3531 }
3532
3533 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3534 bytes from constant string DATA + OFFSET and return it as target
3535 constant. */
3536
3537 rtx
3538 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3539 machine_mode mode)
3540 {
3541 const char *str = (const char *) data;
3542
3543 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3544 return const0_rtx;
3545
3546 return c_readstr (str + offset, mode);
3547 }
3548
3549 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3550 NULL_RTX if we failed the caller should emit a normal call. */
3551
3552 static rtx
3553 expand_builtin_strncpy (tree exp, rtx target)
3554 {
3555 location_t loc = EXPR_LOCATION (exp);
3556
3557 if (validate_arglist (exp,
3558 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3559 {
3560 tree dest = CALL_EXPR_ARG (exp, 0);
3561 tree src = CALL_EXPR_ARG (exp, 1);
3562 tree len = CALL_EXPR_ARG (exp, 2);
3563 tree slen = c_strlen (src, 1);
3564
3565 /* We must be passed a constant len and src parameter. */
3566 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3567 return NULL_RTX;
3568
3569 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3570
3571 /* We're required to pad with trailing zeros if the requested
3572 len is greater than strlen(s2)+1. In that case try to
3573 use store_by_pieces, if it fails, punt. */
3574 if (tree_int_cst_lt (slen, len))
3575 {
3576 unsigned int dest_align = get_pointer_alignment (dest);
3577 const char *p = c_getstr (src);
3578 rtx dest_mem;
3579
3580 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3581 || !can_store_by_pieces (tree_to_uhwi (len),
3582 builtin_strncpy_read_str,
3583 CONST_CAST (char *, p),
3584 dest_align, false))
3585 return NULL_RTX;
3586
3587 dest_mem = get_memory_rtx (dest, len);
3588 store_by_pieces (dest_mem, tree_to_uhwi (len),
3589 builtin_strncpy_read_str,
3590 CONST_CAST (char *, p), dest_align, false, 0);
3591 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3592 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3593 return dest_mem;
3594 }
3595 }
3596 return NULL_RTX;
3597 }
3598
3599 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3600 bytes from constant string DATA + OFFSET and return it as target
3601 constant. */
3602
3603 rtx
3604 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3605 machine_mode mode)
3606 {
3607 const char *c = (const char *) data;
3608 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3609
3610 memset (p, *c, GET_MODE_SIZE (mode));
3611
3612 return c_readstr (p, mode);
3613 }
3614
3615 /* Callback routine for store_by_pieces. Return the RTL of a register
3616 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3617 char value given in the RTL register data. For example, if mode is
3618 4 bytes wide, return the RTL for 0x01010101*data. */
3619
3620 static rtx
3621 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3622 machine_mode mode)
3623 {
3624 rtx target, coeff;
3625 size_t size;
3626 char *p;
3627
3628 size = GET_MODE_SIZE (mode);
3629 if (size == 1)
3630 return (rtx) data;
3631
3632 p = XALLOCAVEC (char, size);
3633 memset (p, 1, size);
3634 coeff = c_readstr (p, mode);
3635
3636 target = convert_to_mode (mode, (rtx) data, 1);
3637 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3638 return force_reg (mode, target);
3639 }
3640
3641 /* Expand expression EXP, which is a call to the memset builtin. Return
3642 NULL_RTX if we failed the caller should emit a normal call, otherwise
3643 try to get the result in TARGET, if convenient (and in mode MODE if that's
3644 convenient). */
3645
3646 static rtx
3647 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3648 {
3649 if (!validate_arglist (exp,
3650 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3651 return NULL_RTX;
3652 else
3653 {
3654 tree dest = CALL_EXPR_ARG (exp, 0);
3655 tree val = CALL_EXPR_ARG (exp, 1);
3656 tree len = CALL_EXPR_ARG (exp, 2);
3657 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3658 }
3659 }
3660
3661 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3662 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3663 try to get the result in TARGET, if convenient (and in mode MODE if that's
3664 convenient). */
3665
3666 static rtx
3667 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3668 {
3669 if (!validate_arglist (exp,
3670 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3671 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3672 return NULL_RTX;
3673 else
3674 {
3675 tree dest = CALL_EXPR_ARG (exp, 0);
3676 tree val = CALL_EXPR_ARG (exp, 2);
3677 tree len = CALL_EXPR_ARG (exp, 3);
3678 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3679
3680 /* Return src bounds with the result. */
3681 if (res)
3682 {
3683 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3684 expand_normal (CALL_EXPR_ARG (exp, 1)));
3685 res = chkp_join_splitted_slot (res, bnd);
3686 }
3687 return res;
3688 }
3689 }
3690
3691 /* Helper function to do the actual work for expand_builtin_memset. The
3692 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3693 so that this can also be called without constructing an actual CALL_EXPR.
3694 The other arguments and return value are the same as for
3695 expand_builtin_memset. */
3696
3697 static rtx
3698 expand_builtin_memset_args (tree dest, tree val, tree len,
3699 rtx target, machine_mode mode, tree orig_exp)
3700 {
3701 tree fndecl, fn;
3702 enum built_in_function fcode;
3703 machine_mode val_mode;
3704 char c;
3705 unsigned int dest_align;
3706 rtx dest_mem, dest_addr, len_rtx;
3707 HOST_WIDE_INT expected_size = -1;
3708 unsigned int expected_align = 0;
3709 unsigned HOST_WIDE_INT min_size;
3710 unsigned HOST_WIDE_INT max_size;
3711 unsigned HOST_WIDE_INT probable_max_size;
3712
3713 dest_align = get_pointer_alignment (dest);
3714
3715 /* If DEST is not a pointer type, don't do this operation in-line. */
3716 if (dest_align == 0)
3717 return NULL_RTX;
3718
3719 if (currently_expanding_gimple_stmt)
3720 stringop_block_profile (currently_expanding_gimple_stmt,
3721 &expected_align, &expected_size);
3722
3723 if (expected_align < dest_align)
3724 expected_align = dest_align;
3725
3726 /* If the LEN parameter is zero, return DEST. */
3727 if (integer_zerop (len))
3728 {
3729 /* Evaluate and ignore VAL in case it has side-effects. */
3730 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3731 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3732 }
3733
3734 /* Stabilize the arguments in case we fail. */
3735 dest = builtin_save_expr (dest);
3736 val = builtin_save_expr (val);
3737 len = builtin_save_expr (len);
3738
3739 len_rtx = expand_normal (len);
3740 determine_block_size (len, len_rtx, &min_size, &max_size,
3741 &probable_max_size);
3742 dest_mem = get_memory_rtx (dest, len);
3743 val_mode = TYPE_MODE (unsigned_char_type_node);
3744
3745 if (TREE_CODE (val) != INTEGER_CST)
3746 {
3747 rtx val_rtx;
3748
3749 val_rtx = expand_normal (val);
3750 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3751
3752 /* Assume that we can memset by pieces if we can store
3753 * the coefficients by pieces (in the required modes).
3754 * We can't pass builtin_memset_gen_str as that emits RTL. */
3755 c = 1;
3756 if (tree_fits_uhwi_p (len)
3757 && can_store_by_pieces (tree_to_uhwi (len),
3758 builtin_memset_read_str, &c, dest_align,
3759 true))
3760 {
3761 val_rtx = force_reg (val_mode, val_rtx);
3762 store_by_pieces (dest_mem, tree_to_uhwi (len),
3763 builtin_memset_gen_str, val_rtx, dest_align,
3764 true, 0);
3765 }
3766 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3767 dest_align, expected_align,
3768 expected_size, min_size, max_size,
3769 probable_max_size))
3770 goto do_libcall;
3771
3772 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3773 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3774 return dest_mem;
3775 }
3776
3777 if (target_char_cast (val, &c))
3778 goto do_libcall;
3779
3780 if (c)
3781 {
3782 if (tree_fits_uhwi_p (len)
3783 && can_store_by_pieces (tree_to_uhwi (len),
3784 builtin_memset_read_str, &c, dest_align,
3785 true))
3786 store_by_pieces (dest_mem, tree_to_uhwi (len),
3787 builtin_memset_read_str, &c, dest_align, true, 0);
3788 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3789 gen_int_mode (c, val_mode),
3790 dest_align, expected_align,
3791 expected_size, min_size, max_size,
3792 probable_max_size))
3793 goto do_libcall;
3794
3795 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3796 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3797 return dest_mem;
3798 }
3799
3800 set_mem_align (dest_mem, dest_align);
3801 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3802 CALL_EXPR_TAILCALL (orig_exp)
3803 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3804 expected_align, expected_size,
3805 min_size, max_size,
3806 probable_max_size);
3807
3808 if (dest_addr == 0)
3809 {
3810 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3811 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3812 }
3813
3814 return dest_addr;
3815
3816 do_libcall:
3817 fndecl = get_callee_fndecl (orig_exp);
3818 fcode = DECL_FUNCTION_CODE (fndecl);
3819 if (fcode == BUILT_IN_MEMSET
3820 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3821 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3822 dest, val, len);
3823 else if (fcode == BUILT_IN_BZERO)
3824 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3825 dest, len);
3826 else
3827 gcc_unreachable ();
3828 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3829 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3830 return expand_call (fn, target, target == const0_rtx);
3831 }
3832
3833 /* Expand expression EXP, which is a call to the bzero builtin. Return
3834 NULL_RTX if we failed the caller should emit a normal call. */
3835
3836 static rtx
3837 expand_builtin_bzero (tree exp)
3838 {
3839 tree dest, size;
3840 location_t loc = EXPR_LOCATION (exp);
3841
3842 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3843 return NULL_RTX;
3844
3845 dest = CALL_EXPR_ARG (exp, 0);
3846 size = CALL_EXPR_ARG (exp, 1);
3847
3848 /* New argument list transforming bzero(ptr x, int y) to
3849 memset(ptr x, int 0, size_t y). This is done this way
3850 so that if it isn't expanded inline, we fallback to
3851 calling bzero instead of memset. */
3852
3853 return expand_builtin_memset_args (dest, integer_zero_node,
3854 fold_convert_loc (loc,
3855 size_type_node, size),
3856 const0_rtx, VOIDmode, exp);
3857 }
3858
3859 /* Try to expand cmpstr operation ICODE with the given operands.
3860 Return the result rtx on success, otherwise return null. */
3861
3862 static rtx
3863 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3864 HOST_WIDE_INT align)
3865 {
3866 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3867
3868 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3869 target = NULL_RTX;
3870
3871 struct expand_operand ops[4];
3872 create_output_operand (&ops[0], target, insn_mode);
3873 create_fixed_operand (&ops[1], arg1_rtx);
3874 create_fixed_operand (&ops[2], arg2_rtx);
3875 create_integer_operand (&ops[3], align);
3876 if (maybe_expand_insn (icode, 4, ops))
3877 return ops[0].value;
3878 return NULL_RTX;
3879 }
3880
3881 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3882 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3883 otherwise return null. */
3884
3885 static rtx
3886 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3887 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3888 HOST_WIDE_INT align)
3889 {
3890 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3891
3892 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3893 target = NULL_RTX;
3894
3895 struct expand_operand ops[5];
3896 create_output_operand (&ops[0], target, insn_mode);
3897 create_fixed_operand (&ops[1], arg1_rtx);
3898 create_fixed_operand (&ops[2], arg2_rtx);
3899 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3900 TYPE_UNSIGNED (arg3_type));
3901 create_integer_operand (&ops[4], align);
3902 if (maybe_expand_insn (icode, 5, ops))
3903 return ops[0].value;
3904 return NULL_RTX;
3905 }
3906
3907 /* Expand expression EXP, which is a call to the memcmp built-in function.
3908 Return NULL_RTX if we failed and the caller should emit a normal call,
3909 otherwise try to get the result in TARGET, if convenient. */
3910
3911 static rtx
3912 expand_builtin_memcmp (tree exp, rtx target)
3913 {
3914 if (!validate_arglist (exp,
3915 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3916 return NULL_RTX;
3917
3918 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3919 implementing memcmp because it will stop if it encounters two
3920 zero bytes. */
3921 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3922 if (icode == CODE_FOR_nothing)
3923 return NULL_RTX;
3924
3925 tree arg1 = CALL_EXPR_ARG (exp, 0);
3926 tree arg2 = CALL_EXPR_ARG (exp, 1);
3927 tree len = CALL_EXPR_ARG (exp, 2);
3928
3929 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3930 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3931
3932 /* If we don't have POINTER_TYPE, call the function. */
3933 if (arg1_align == 0 || arg2_align == 0)
3934 return NULL_RTX;
3935
3936 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3937 location_t loc = EXPR_LOCATION (exp);
3938 rtx arg1_rtx = get_memory_rtx (arg1, len);
3939 rtx arg2_rtx = get_memory_rtx (arg2, len);
3940 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3941
3942 /* Set MEM_SIZE as appropriate. */
3943 if (CONST_INT_P (arg3_rtx))
3944 {
3945 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3946 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3947 }
3948
3949 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3950 TREE_TYPE (len), arg3_rtx,
3951 MIN (arg1_align, arg2_align));
3952 if (result)
3953 {
3954 /* Return the value in the proper mode for this function. */
3955 if (GET_MODE (result) == mode)
3956 return result;
3957
3958 if (target != 0)
3959 {
3960 convert_move (target, result, 0);
3961 return target;
3962 }
3963
3964 return convert_to_mode (mode, result, 0);
3965 }
3966
3967 result = target;
3968 if (! (result != 0
3969 && REG_P (result) && GET_MODE (result) == mode
3970 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3971 result = gen_reg_rtx (mode);
3972
3973 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3974 TYPE_MODE (integer_type_node), 3,
3975 XEXP (arg1_rtx, 0), Pmode,
3976 XEXP (arg2_rtx, 0), Pmode,
3977 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3978 TYPE_UNSIGNED (sizetype)),
3979 TYPE_MODE (sizetype));
3980 return result;
3981 }
3982
3983 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3984 if we failed the caller should emit a normal call, otherwise try to get
3985 the result in TARGET, if convenient. */
3986
3987 static rtx
3988 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3989 {
3990 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3991 return NULL_RTX;
3992
3993 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3994 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3995 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3996 {
3997 rtx arg1_rtx, arg2_rtx;
3998 tree fndecl, fn;
3999 tree arg1 = CALL_EXPR_ARG (exp, 0);
4000 tree arg2 = CALL_EXPR_ARG (exp, 1);
4001 rtx result = NULL_RTX;
4002
4003 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4004 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4005
4006 /* If we don't have POINTER_TYPE, call the function. */
4007 if (arg1_align == 0 || arg2_align == 0)
4008 return NULL_RTX;
4009
4010 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4011 arg1 = builtin_save_expr (arg1);
4012 arg2 = builtin_save_expr (arg2);
4013
4014 arg1_rtx = get_memory_rtx (arg1, NULL);
4015 arg2_rtx = get_memory_rtx (arg2, NULL);
4016
4017 /* Try to call cmpstrsi. */
4018 if (cmpstr_icode != CODE_FOR_nothing)
4019 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4020 MIN (arg1_align, arg2_align));
4021
4022 /* Try to determine at least one length and call cmpstrnsi. */
4023 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4024 {
4025 tree len;
4026 rtx arg3_rtx;
4027
4028 tree len1 = c_strlen (arg1, 1);
4029 tree len2 = c_strlen (arg2, 1);
4030
4031 if (len1)
4032 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4033 if (len2)
4034 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4035
4036 /* If we don't have a constant length for the first, use the length
4037 of the second, if we know it. We don't require a constant for
4038 this case; some cost analysis could be done if both are available
4039 but neither is constant. For now, assume they're equally cheap,
4040 unless one has side effects. If both strings have constant lengths,
4041 use the smaller. */
4042
4043 if (!len1)
4044 len = len2;
4045 else if (!len2)
4046 len = len1;
4047 else if (TREE_SIDE_EFFECTS (len1))
4048 len = len2;
4049 else if (TREE_SIDE_EFFECTS (len2))
4050 len = len1;
4051 else if (TREE_CODE (len1) != INTEGER_CST)
4052 len = len2;
4053 else if (TREE_CODE (len2) != INTEGER_CST)
4054 len = len1;
4055 else if (tree_int_cst_lt (len1, len2))
4056 len = len1;
4057 else
4058 len = len2;
4059
4060 /* If both arguments have side effects, we cannot optimize. */
4061 if (len && !TREE_SIDE_EFFECTS (len))
4062 {
4063 arg3_rtx = expand_normal (len);
4064 result = expand_cmpstrn_or_cmpmem
4065 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4066 arg3_rtx, MIN (arg1_align, arg2_align));
4067 }
4068 }
4069
4070 if (result)
4071 {
4072 /* Return the value in the proper mode for this function. */
4073 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4074 if (GET_MODE (result) == mode)
4075 return result;
4076 if (target == 0)
4077 return convert_to_mode (mode, result, 0);
4078 convert_move (target, result, 0);
4079 return target;
4080 }
4081
4082 /* Expand the library call ourselves using a stabilized argument
4083 list to avoid re-evaluating the function's arguments twice. */
4084 fndecl = get_callee_fndecl (exp);
4085 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4086 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4087 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4088 return expand_call (fn, target, target == const0_rtx);
4089 }
4090 return NULL_RTX;
4091 }
4092
4093 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4094 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4095 the result in TARGET, if convenient. */
4096
4097 static rtx
4098 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4099 ATTRIBUTE_UNUSED machine_mode mode)
4100 {
4101 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4102
4103 if (!validate_arglist (exp,
4104 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4105 return NULL_RTX;
4106
4107 /* If c_strlen can determine an expression for one of the string
4108 lengths, and it doesn't have side effects, then emit cmpstrnsi
4109 using length MIN(strlen(string)+1, arg3). */
4110 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4111 if (cmpstrn_icode != CODE_FOR_nothing)
4112 {
4113 tree len, len1, len2;
4114 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4115 rtx result;
4116 tree fndecl, fn;
4117 tree arg1 = CALL_EXPR_ARG (exp, 0);
4118 tree arg2 = CALL_EXPR_ARG (exp, 1);
4119 tree arg3 = CALL_EXPR_ARG (exp, 2);
4120
4121 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4122 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4123
4124 len1 = c_strlen (arg1, 1);
4125 len2 = c_strlen (arg2, 1);
4126
4127 if (len1)
4128 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4129 if (len2)
4130 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4131
4132 /* If we don't have a constant length for the first, use the length
4133 of the second, if we know it. We don't require a constant for
4134 this case; some cost analysis could be done if both are available
4135 but neither is constant. For now, assume they're equally cheap,
4136 unless one has side effects. If both strings have constant lengths,
4137 use the smaller. */
4138
4139 if (!len1)
4140 len = len2;
4141 else if (!len2)
4142 len = len1;
4143 else if (TREE_SIDE_EFFECTS (len1))
4144 len = len2;
4145 else if (TREE_SIDE_EFFECTS (len2))
4146 len = len1;
4147 else if (TREE_CODE (len1) != INTEGER_CST)
4148 len = len2;
4149 else if (TREE_CODE (len2) != INTEGER_CST)
4150 len = len1;
4151 else if (tree_int_cst_lt (len1, len2))
4152 len = len1;
4153 else
4154 len = len2;
4155
4156 /* If both arguments have side effects, we cannot optimize. */
4157 if (!len || TREE_SIDE_EFFECTS (len))
4158 return NULL_RTX;
4159
4160 /* The actual new length parameter is MIN(len,arg3). */
4161 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4162 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4163
4164 /* If we don't have POINTER_TYPE, call the function. */
4165 if (arg1_align == 0 || arg2_align == 0)
4166 return NULL_RTX;
4167
4168 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4169 arg1 = builtin_save_expr (arg1);
4170 arg2 = builtin_save_expr (arg2);
4171 len = builtin_save_expr (len);
4172
4173 arg1_rtx = get_memory_rtx (arg1, len);
4174 arg2_rtx = get_memory_rtx (arg2, len);
4175 arg3_rtx = expand_normal (len);
4176 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4177 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4178 MIN (arg1_align, arg2_align));
4179 if (result)
4180 {
4181 /* Return the value in the proper mode for this function. */
4182 mode = TYPE_MODE (TREE_TYPE (exp));
4183 if (GET_MODE (result) == mode)
4184 return result;
4185 if (target == 0)
4186 return convert_to_mode (mode, result, 0);
4187 convert_move (target, result, 0);
4188 return target;
4189 }
4190
4191 /* Expand the library call ourselves using a stabilized argument
4192 list to avoid re-evaluating the function's arguments twice. */
4193 fndecl = get_callee_fndecl (exp);
4194 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4195 arg1, arg2, len);
4196 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4197 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4198 return expand_call (fn, target, target == const0_rtx);
4199 }
4200 return NULL_RTX;
4201 }
4202
4203 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4204 if that's convenient. */
4205
4206 rtx
4207 expand_builtin_saveregs (void)
4208 {
4209 rtx val;
4210 rtx_insn *seq;
4211
4212 /* Don't do __builtin_saveregs more than once in a function.
4213 Save the result of the first call and reuse it. */
4214 if (saveregs_value != 0)
4215 return saveregs_value;
4216
4217 /* When this function is called, it means that registers must be
4218 saved on entry to this function. So we migrate the call to the
4219 first insn of this function. */
4220
4221 start_sequence ();
4222
4223 /* Do whatever the machine needs done in this case. */
4224 val = targetm.calls.expand_builtin_saveregs ();
4225
4226 seq = get_insns ();
4227 end_sequence ();
4228
4229 saveregs_value = val;
4230
4231 /* Put the insns after the NOTE that starts the function. If this
4232 is inside a start_sequence, make the outer-level insn chain current, so
4233 the code is placed at the start of the function. */
4234 push_topmost_sequence ();
4235 emit_insn_after (seq, entry_of_function ());
4236 pop_topmost_sequence ();
4237
4238 return val;
4239 }
4240
4241 /* Expand a call to __builtin_next_arg. */
4242
4243 static rtx
4244 expand_builtin_next_arg (void)
4245 {
4246 /* Checking arguments is already done in fold_builtin_next_arg
4247 that must be called before this function. */
4248 return expand_binop (ptr_mode, add_optab,
4249 crtl->args.internal_arg_pointer,
4250 crtl->args.arg_offset_rtx,
4251 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4252 }
4253
4254 /* Make it easier for the backends by protecting the valist argument
4255 from multiple evaluations. */
4256
4257 static tree
4258 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4259 {
4260 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4261
4262 /* The current way of determining the type of valist is completely
4263 bogus. We should have the information on the va builtin instead. */
4264 if (!vatype)
4265 vatype = targetm.fn_abi_va_list (cfun->decl);
4266
4267 if (TREE_CODE (vatype) == ARRAY_TYPE)
4268 {
4269 if (TREE_SIDE_EFFECTS (valist))
4270 valist = save_expr (valist);
4271
4272 /* For this case, the backends will be expecting a pointer to
4273 vatype, but it's possible we've actually been given an array
4274 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4275 So fix it. */
4276 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4277 {
4278 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4279 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4280 }
4281 }
4282 else
4283 {
4284 tree pt = build_pointer_type (vatype);
4285
4286 if (! needs_lvalue)
4287 {
4288 if (! TREE_SIDE_EFFECTS (valist))
4289 return valist;
4290
4291 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4292 TREE_SIDE_EFFECTS (valist) = 1;
4293 }
4294
4295 if (TREE_SIDE_EFFECTS (valist))
4296 valist = save_expr (valist);
4297 valist = fold_build2_loc (loc, MEM_REF,
4298 vatype, valist, build_int_cst (pt, 0));
4299 }
4300
4301 return valist;
4302 }
4303
4304 /* The "standard" definition of va_list is void*. */
4305
4306 tree
4307 std_build_builtin_va_list (void)
4308 {
4309 return ptr_type_node;
4310 }
4311
4312 /* The "standard" abi va_list is va_list_type_node. */
4313
4314 tree
4315 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4316 {
4317 return va_list_type_node;
4318 }
4319
4320 /* The "standard" type of va_list is va_list_type_node. */
4321
4322 tree
4323 std_canonical_va_list_type (tree type)
4324 {
4325 tree wtype, htype;
4326
4327 if (INDIRECT_REF_P (type))
4328 type = TREE_TYPE (type);
4329 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4330 type = TREE_TYPE (type);
4331 wtype = va_list_type_node;
4332 htype = type;
4333 /* Treat structure va_list types. */
4334 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4335 htype = TREE_TYPE (htype);
4336 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4337 {
4338 /* If va_list is an array type, the argument may have decayed
4339 to a pointer type, e.g. by being passed to another function.
4340 In that case, unwrap both types so that we can compare the
4341 underlying records. */
4342 if (TREE_CODE (htype) == ARRAY_TYPE
4343 || POINTER_TYPE_P (htype))
4344 {
4345 wtype = TREE_TYPE (wtype);
4346 htype = TREE_TYPE (htype);
4347 }
4348 }
4349 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4350 return va_list_type_node;
4351
4352 return NULL_TREE;
4353 }
4354
4355 /* The "standard" implementation of va_start: just assign `nextarg' to
4356 the variable. */
4357
4358 void
4359 std_expand_builtin_va_start (tree valist, rtx nextarg)
4360 {
4361 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4362 convert_move (va_r, nextarg, 0);
4363
4364 /* We do not have any valid bounds for the pointer, so
4365 just store zero bounds for it. */
4366 if (chkp_function_instrumented_p (current_function_decl))
4367 chkp_expand_bounds_reset_for_mem (valist,
4368 make_tree (TREE_TYPE (valist),
4369 nextarg));
4370 }
4371
4372 /* Expand EXP, a call to __builtin_va_start. */
4373
4374 static rtx
4375 expand_builtin_va_start (tree exp)
4376 {
4377 rtx nextarg;
4378 tree valist;
4379 location_t loc = EXPR_LOCATION (exp);
4380
4381 if (call_expr_nargs (exp) < 2)
4382 {
4383 error_at (loc, "too few arguments to function %<va_start%>");
4384 return const0_rtx;
4385 }
4386
4387 if (fold_builtin_next_arg (exp, true))
4388 return const0_rtx;
4389
4390 nextarg = expand_builtin_next_arg ();
4391 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4392
4393 if (targetm.expand_builtin_va_start)
4394 targetm.expand_builtin_va_start (valist, nextarg);
4395 else
4396 std_expand_builtin_va_start (valist, nextarg);
4397
4398 return const0_rtx;
4399 }
4400
4401 /* Expand EXP, a call to __builtin_va_end. */
4402
4403 static rtx
4404 expand_builtin_va_end (tree exp)
4405 {
4406 tree valist = CALL_EXPR_ARG (exp, 0);
4407
4408 /* Evaluate for side effects, if needed. I hate macros that don't
4409 do that. */
4410 if (TREE_SIDE_EFFECTS (valist))
4411 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4412
4413 return const0_rtx;
4414 }
4415
4416 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4417 builtin rather than just as an assignment in stdarg.h because of the
4418 nastiness of array-type va_list types. */
4419
4420 static rtx
4421 expand_builtin_va_copy (tree exp)
4422 {
4423 tree dst, src, t;
4424 location_t loc = EXPR_LOCATION (exp);
4425
4426 dst = CALL_EXPR_ARG (exp, 0);
4427 src = CALL_EXPR_ARG (exp, 1);
4428
4429 dst = stabilize_va_list_loc (loc, dst, 1);
4430 src = stabilize_va_list_loc (loc, src, 0);
4431
4432 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4433
4434 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4435 {
4436 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4437 TREE_SIDE_EFFECTS (t) = 1;
4438 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4439 }
4440 else
4441 {
4442 rtx dstb, srcb, size;
4443
4444 /* Evaluate to pointers. */
4445 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4446 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4447 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4448 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4449
4450 dstb = convert_memory_address (Pmode, dstb);
4451 srcb = convert_memory_address (Pmode, srcb);
4452
4453 /* "Dereference" to BLKmode memories. */
4454 dstb = gen_rtx_MEM (BLKmode, dstb);
4455 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4456 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4457 srcb = gen_rtx_MEM (BLKmode, srcb);
4458 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4459 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4460
4461 /* Copy. */
4462 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4463 }
4464
4465 return const0_rtx;
4466 }
4467
4468 /* Expand a call to one of the builtin functions __builtin_frame_address or
4469 __builtin_return_address. */
4470
4471 static rtx
4472 expand_builtin_frame_address (tree fndecl, tree exp)
4473 {
4474 /* The argument must be a nonnegative integer constant.
4475 It counts the number of frames to scan up the stack.
4476 The value is either the frame pointer value or the return
4477 address saved in that frame. */
4478 if (call_expr_nargs (exp) == 0)
4479 /* Warning about missing arg was already issued. */
4480 return const0_rtx;
4481 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4482 {
4483 error ("invalid argument to %qD", fndecl);
4484 return const0_rtx;
4485 }
4486 else
4487 {
4488 /* Number of frames to scan up the stack. */
4489 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4490
4491 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4492
4493 /* Some ports cannot access arbitrary stack frames. */
4494 if (tem == NULL)
4495 {
4496 warning (0, "unsupported argument to %qD", fndecl);
4497 return const0_rtx;
4498 }
4499
4500 if (count)
4501 {
4502 /* Warn since no effort is made to ensure that any frame
4503 beyond the current one exists or can be safely reached. */
4504 warning (OPT_Wframe_address, "calling %qD with "
4505 "a nonzero argument is unsafe", fndecl);
4506 }
4507
4508 /* For __builtin_frame_address, return what we've got. */
4509 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4510 return tem;
4511
4512 if (!REG_P (tem)
4513 && ! CONSTANT_P (tem))
4514 tem = copy_addr_to_reg (tem);
4515 return tem;
4516 }
4517 }
4518
4519 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4520 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4521 is the same as for allocate_dynamic_stack_space. */
4522
4523 static rtx
4524 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4525 {
4526 rtx op0;
4527 rtx result;
4528 bool valid_arglist;
4529 unsigned int align;
4530 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4531 == BUILT_IN_ALLOCA_WITH_ALIGN);
4532
4533 valid_arglist
4534 = (alloca_with_align
4535 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4536 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4537
4538 if (!valid_arglist)
4539 return NULL_RTX;
4540
4541 /* Compute the argument. */
4542 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4543
4544 /* Compute the alignment. */
4545 align = (alloca_with_align
4546 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4547 : BIGGEST_ALIGNMENT);
4548
4549 /* Allocate the desired space. */
4550 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4551 result = convert_memory_address (ptr_mode, result);
4552
4553 return result;
4554 }
4555
4556 /* Expand a call to bswap builtin in EXP.
4557 Return NULL_RTX if a normal call should be emitted rather than expanding the
4558 function in-line. If convenient, the result should be placed in TARGET.
4559 SUBTARGET may be used as the target for computing one of EXP's operands. */
4560
4561 static rtx
4562 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4563 rtx subtarget)
4564 {
4565 tree arg;
4566 rtx op0;
4567
4568 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4569 return NULL_RTX;
4570
4571 arg = CALL_EXPR_ARG (exp, 0);
4572 op0 = expand_expr (arg,
4573 subtarget && GET_MODE (subtarget) == target_mode
4574 ? subtarget : NULL_RTX,
4575 target_mode, EXPAND_NORMAL);
4576 if (GET_MODE (op0) != target_mode)
4577 op0 = convert_to_mode (target_mode, op0, 1);
4578
4579 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4580
4581 gcc_assert (target);
4582
4583 return convert_to_mode (target_mode, target, 1);
4584 }
4585
4586 /* Expand a call to a unary builtin in EXP.
4587 Return NULL_RTX if a normal call should be emitted rather than expanding the
4588 function in-line. If convenient, the result should be placed in TARGET.
4589 SUBTARGET may be used as the target for computing one of EXP's operands. */
4590
4591 static rtx
4592 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4593 rtx subtarget, optab op_optab)
4594 {
4595 rtx op0;
4596
4597 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4598 return NULL_RTX;
4599
4600 /* Compute the argument. */
4601 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4602 (subtarget
4603 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4604 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4605 VOIDmode, EXPAND_NORMAL);
4606 /* Compute op, into TARGET if possible.
4607 Set TARGET to wherever the result comes back. */
4608 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4609 op_optab, op0, target, op_optab != clrsb_optab);
4610 gcc_assert (target);
4611
4612 return convert_to_mode (target_mode, target, 0);
4613 }
4614
4615 /* Expand a call to __builtin_expect. We just return our argument
4616 as the builtin_expect semantic should've been already executed by
4617 tree branch prediction pass. */
4618
4619 static rtx
4620 expand_builtin_expect (tree exp, rtx target)
4621 {
4622 tree arg;
4623
4624 if (call_expr_nargs (exp) < 2)
4625 return const0_rtx;
4626 arg = CALL_EXPR_ARG (exp, 0);
4627
4628 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4629 /* When guessing was done, the hints should be already stripped away. */
4630 gcc_assert (!flag_guess_branch_prob
4631 || optimize == 0 || seen_error ());
4632 return target;
4633 }
4634
4635 /* Expand a call to __builtin_assume_aligned. We just return our first
4636 argument as the builtin_assume_aligned semantic should've been already
4637 executed by CCP. */
4638
4639 static rtx
4640 expand_builtin_assume_aligned (tree exp, rtx target)
4641 {
4642 if (call_expr_nargs (exp) < 2)
4643 return const0_rtx;
4644 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4645 EXPAND_NORMAL);
4646 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4647 && (call_expr_nargs (exp) < 3
4648 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4649 return target;
4650 }
4651
4652 void
4653 expand_builtin_trap (void)
4654 {
4655 if (targetm.have_trap ())
4656 {
4657 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4658 /* For trap insns when not accumulating outgoing args force
4659 REG_ARGS_SIZE note to prevent crossjumping of calls with
4660 different args sizes. */
4661 if (!ACCUMULATE_OUTGOING_ARGS)
4662 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4663 }
4664 else
4665 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4666 emit_barrier ();
4667 }
4668
4669 /* Expand a call to __builtin_unreachable. We do nothing except emit
4670 a barrier saying that control flow will not pass here.
4671
4672 It is the responsibility of the program being compiled to ensure
4673 that control flow does never reach __builtin_unreachable. */
4674 static void
4675 expand_builtin_unreachable (void)
4676 {
4677 emit_barrier ();
4678 }
4679
4680 /* Expand EXP, a call to fabs, fabsf or fabsl.
4681 Return NULL_RTX if a normal call should be emitted rather than expanding
4682 the function inline. If convenient, the result should be placed
4683 in TARGET. SUBTARGET may be used as the target for computing
4684 the operand. */
4685
4686 static rtx
4687 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4688 {
4689 machine_mode mode;
4690 tree arg;
4691 rtx op0;
4692
4693 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4694 return NULL_RTX;
4695
4696 arg = CALL_EXPR_ARG (exp, 0);
4697 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4698 mode = TYPE_MODE (TREE_TYPE (arg));
4699 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4700 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4701 }
4702
4703 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4704 Return NULL is a normal call should be emitted rather than expanding the
4705 function inline. If convenient, the result should be placed in TARGET.
4706 SUBTARGET may be used as the target for computing the operand. */
4707
4708 static rtx
4709 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4710 {
4711 rtx op0, op1;
4712 tree arg;
4713
4714 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4715 return NULL_RTX;
4716
4717 arg = CALL_EXPR_ARG (exp, 0);
4718 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4719
4720 arg = CALL_EXPR_ARG (exp, 1);
4721 op1 = expand_normal (arg);
4722
4723 return expand_copysign (op0, op1, target);
4724 }
4725
4726 /* Expand a call to __builtin___clear_cache. */
4727
4728 static rtx
4729 expand_builtin___clear_cache (tree exp)
4730 {
4731 if (!targetm.code_for_clear_cache)
4732 {
4733 #ifdef CLEAR_INSN_CACHE
4734 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4735 does something. Just do the default expansion to a call to
4736 __clear_cache(). */
4737 return NULL_RTX;
4738 #else
4739 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4740 does nothing. There is no need to call it. Do nothing. */
4741 return const0_rtx;
4742 #endif /* CLEAR_INSN_CACHE */
4743 }
4744
4745 /* We have a "clear_cache" insn, and it will handle everything. */
4746 tree begin, end;
4747 rtx begin_rtx, end_rtx;
4748
4749 /* We must not expand to a library call. If we did, any
4750 fallback library function in libgcc that might contain a call to
4751 __builtin___clear_cache() would recurse infinitely. */
4752 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4753 {
4754 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4755 return const0_rtx;
4756 }
4757
4758 if (targetm.have_clear_cache ())
4759 {
4760 struct expand_operand ops[2];
4761
4762 begin = CALL_EXPR_ARG (exp, 0);
4763 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4764
4765 end = CALL_EXPR_ARG (exp, 1);
4766 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4767
4768 create_address_operand (&ops[0], begin_rtx);
4769 create_address_operand (&ops[1], end_rtx);
4770 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4771 return const0_rtx;
4772 }
4773 return const0_rtx;
4774 }
4775
4776 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4777
4778 static rtx
4779 round_trampoline_addr (rtx tramp)
4780 {
4781 rtx temp, addend, mask;
4782
4783 /* If we don't need too much alignment, we'll have been guaranteed
4784 proper alignment by get_trampoline_type. */
4785 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4786 return tramp;
4787
4788 /* Round address up to desired boundary. */
4789 temp = gen_reg_rtx (Pmode);
4790 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4791 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4792
4793 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4794 temp, 0, OPTAB_LIB_WIDEN);
4795 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4796 temp, 0, OPTAB_LIB_WIDEN);
4797
4798 return tramp;
4799 }
4800
4801 static rtx
4802 expand_builtin_init_trampoline (tree exp, bool onstack)
4803 {
4804 tree t_tramp, t_func, t_chain;
4805 rtx m_tramp, r_tramp, r_chain, tmp;
4806
4807 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4808 POINTER_TYPE, VOID_TYPE))
4809 return NULL_RTX;
4810
4811 t_tramp = CALL_EXPR_ARG (exp, 0);
4812 t_func = CALL_EXPR_ARG (exp, 1);
4813 t_chain = CALL_EXPR_ARG (exp, 2);
4814
4815 r_tramp = expand_normal (t_tramp);
4816 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4817 MEM_NOTRAP_P (m_tramp) = 1;
4818
4819 /* If ONSTACK, the TRAMP argument should be the address of a field
4820 within the local function's FRAME decl. Either way, let's see if
4821 we can fill in the MEM_ATTRs for this memory. */
4822 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4823 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4824
4825 /* Creator of a heap trampoline is responsible for making sure the
4826 address is aligned to at least STACK_BOUNDARY. Normally malloc
4827 will ensure this anyhow. */
4828 tmp = round_trampoline_addr (r_tramp);
4829 if (tmp != r_tramp)
4830 {
4831 m_tramp = change_address (m_tramp, BLKmode, tmp);
4832 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4833 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4834 }
4835
4836 /* The FUNC argument should be the address of the nested function.
4837 Extract the actual function decl to pass to the hook. */
4838 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4839 t_func = TREE_OPERAND (t_func, 0);
4840 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4841
4842 r_chain = expand_normal (t_chain);
4843
4844 /* Generate insns to initialize the trampoline. */
4845 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4846
4847 if (onstack)
4848 {
4849 trampolines_created = 1;
4850
4851 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4852 "trampoline generated for nested function %qD", t_func);
4853 }
4854
4855 return const0_rtx;
4856 }
4857
4858 static rtx
4859 expand_builtin_adjust_trampoline (tree exp)
4860 {
4861 rtx tramp;
4862
4863 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4864 return NULL_RTX;
4865
4866 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4867 tramp = round_trampoline_addr (tramp);
4868 if (targetm.calls.trampoline_adjust_address)
4869 tramp = targetm.calls.trampoline_adjust_address (tramp);
4870
4871 return tramp;
4872 }
4873
4874 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4875 function. The function first checks whether the back end provides
4876 an insn to implement signbit for the respective mode. If not, it
4877 checks whether the floating point format of the value is such that
4878 the sign bit can be extracted. If that is not the case, error out.
4879 EXP is the expression that is a call to the builtin function; if
4880 convenient, the result should be placed in TARGET. */
4881 static rtx
4882 expand_builtin_signbit (tree exp, rtx target)
4883 {
4884 const struct real_format *fmt;
4885 machine_mode fmode, imode, rmode;
4886 tree arg;
4887 int word, bitpos;
4888 enum insn_code icode;
4889 rtx temp;
4890 location_t loc = EXPR_LOCATION (exp);
4891
4892 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4893 return NULL_RTX;
4894
4895 arg = CALL_EXPR_ARG (exp, 0);
4896 fmode = TYPE_MODE (TREE_TYPE (arg));
4897 rmode = TYPE_MODE (TREE_TYPE (exp));
4898 fmt = REAL_MODE_FORMAT (fmode);
4899
4900 arg = builtin_save_expr (arg);
4901
4902 /* Expand the argument yielding a RTX expression. */
4903 temp = expand_normal (arg);
4904
4905 /* Check if the back end provides an insn that handles signbit for the
4906 argument's mode. */
4907 icode = optab_handler (signbit_optab, fmode);
4908 if (icode != CODE_FOR_nothing)
4909 {
4910 rtx_insn *last = get_last_insn ();
4911 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4912 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4913 return target;
4914 delete_insns_since (last);
4915 }
4916
4917 /* For floating point formats without a sign bit, implement signbit
4918 as "ARG < 0.0". */
4919 bitpos = fmt->signbit_ro;
4920 if (bitpos < 0)
4921 {
4922 /* But we can't do this if the format supports signed zero. */
4923 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4924
4925 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4926 build_real (TREE_TYPE (arg), dconst0));
4927 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4928 }
4929
4930 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4931 {
4932 imode = int_mode_for_mode (fmode);
4933 gcc_assert (imode != BLKmode);
4934 temp = gen_lowpart (imode, temp);
4935 }
4936 else
4937 {
4938 imode = word_mode;
4939 /* Handle targets with different FP word orders. */
4940 if (FLOAT_WORDS_BIG_ENDIAN)
4941 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4942 else
4943 word = bitpos / BITS_PER_WORD;
4944 temp = operand_subword_force (temp, word, fmode);
4945 bitpos = bitpos % BITS_PER_WORD;
4946 }
4947
4948 /* Force the intermediate word_mode (or narrower) result into a
4949 register. This avoids attempting to create paradoxical SUBREGs
4950 of floating point modes below. */
4951 temp = force_reg (imode, temp);
4952
4953 /* If the bitpos is within the "result mode" lowpart, the operation
4954 can be implement with a single bitwise AND. Otherwise, we need
4955 a right shift and an AND. */
4956
4957 if (bitpos < GET_MODE_BITSIZE (rmode))
4958 {
4959 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4960
4961 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4962 temp = gen_lowpart (rmode, temp);
4963 temp = expand_binop (rmode, and_optab, temp,
4964 immed_wide_int_const (mask, rmode),
4965 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4966 }
4967 else
4968 {
4969 /* Perform a logical right shift to place the signbit in the least
4970 significant bit, then truncate the result to the desired mode
4971 and mask just this bit. */
4972 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4973 temp = gen_lowpart (rmode, temp);
4974 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4975 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4976 }
4977
4978 return temp;
4979 }
4980
4981 /* Expand fork or exec calls. TARGET is the desired target of the
4982 call. EXP is the call. FN is the
4983 identificator of the actual function. IGNORE is nonzero if the
4984 value is to be ignored. */
4985
4986 static rtx
4987 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4988 {
4989 tree id, decl;
4990 tree call;
4991
4992 /* If we are not profiling, just call the function. */
4993 if (!profile_arc_flag)
4994 return NULL_RTX;
4995
4996 /* Otherwise call the wrapper. This should be equivalent for the rest of
4997 compiler, so the code does not diverge, and the wrapper may run the
4998 code necessary for keeping the profiling sane. */
4999
5000 switch (DECL_FUNCTION_CODE (fn))
5001 {
5002 case BUILT_IN_FORK:
5003 id = get_identifier ("__gcov_fork");
5004 break;
5005
5006 case BUILT_IN_EXECL:
5007 id = get_identifier ("__gcov_execl");
5008 break;
5009
5010 case BUILT_IN_EXECV:
5011 id = get_identifier ("__gcov_execv");
5012 break;
5013
5014 case BUILT_IN_EXECLP:
5015 id = get_identifier ("__gcov_execlp");
5016 break;
5017
5018 case BUILT_IN_EXECLE:
5019 id = get_identifier ("__gcov_execle");
5020 break;
5021
5022 case BUILT_IN_EXECVP:
5023 id = get_identifier ("__gcov_execvp");
5024 break;
5025
5026 case BUILT_IN_EXECVE:
5027 id = get_identifier ("__gcov_execve");
5028 break;
5029
5030 default:
5031 gcc_unreachable ();
5032 }
5033
5034 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5035 FUNCTION_DECL, id, TREE_TYPE (fn));
5036 DECL_EXTERNAL (decl) = 1;
5037 TREE_PUBLIC (decl) = 1;
5038 DECL_ARTIFICIAL (decl) = 1;
5039 TREE_NOTHROW (decl) = 1;
5040 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5041 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5042 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5043 return expand_call (call, target, ignore);
5044 }
5045
5046
5047 \f
5048 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5049 the pointer in these functions is void*, the tree optimizers may remove
5050 casts. The mode computed in expand_builtin isn't reliable either, due
5051 to __sync_bool_compare_and_swap.
5052
5053 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5054 group of builtins. This gives us log2 of the mode size. */
5055
5056 static inline machine_mode
5057 get_builtin_sync_mode (int fcode_diff)
5058 {
5059 /* The size is not negotiable, so ask not to get BLKmode in return
5060 if the target indicates that a smaller size would be better. */
5061 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5062 }
5063
5064 /* Expand the memory expression LOC and return the appropriate memory operand
5065 for the builtin_sync operations. */
5066
5067 static rtx
5068 get_builtin_sync_mem (tree loc, machine_mode mode)
5069 {
5070 rtx addr, mem;
5071
5072 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5073 addr = convert_memory_address (Pmode, addr);
5074
5075 /* Note that we explicitly do not want any alias information for this
5076 memory, so that we kill all other live memories. Otherwise we don't
5077 satisfy the full barrier semantics of the intrinsic. */
5078 mem = validize_mem (gen_rtx_MEM (mode, addr));
5079
5080 /* The alignment needs to be at least according to that of the mode. */
5081 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5082 get_pointer_alignment (loc)));
5083 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5084 MEM_VOLATILE_P (mem) = 1;
5085
5086 return mem;
5087 }
5088
5089 /* Make sure an argument is in the right mode.
5090 EXP is the tree argument.
5091 MODE is the mode it should be in. */
5092
5093 static rtx
5094 expand_expr_force_mode (tree exp, machine_mode mode)
5095 {
5096 rtx val;
5097 machine_mode old_mode;
5098
5099 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5100 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5101 of CONST_INTs, where we know the old_mode only from the call argument. */
5102
5103 old_mode = GET_MODE (val);
5104 if (old_mode == VOIDmode)
5105 old_mode = TYPE_MODE (TREE_TYPE (exp));
5106 val = convert_modes (mode, old_mode, val, 1);
5107 return val;
5108 }
5109
5110
5111 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5112 EXP is the CALL_EXPR. CODE is the rtx code
5113 that corresponds to the arithmetic or logical operation from the name;
5114 an exception here is that NOT actually means NAND. TARGET is an optional
5115 place for us to store the results; AFTER is true if this is the
5116 fetch_and_xxx form. */
5117
5118 static rtx
5119 expand_builtin_sync_operation (machine_mode mode, tree exp,
5120 enum rtx_code code, bool after,
5121 rtx target)
5122 {
5123 rtx val, mem;
5124 location_t loc = EXPR_LOCATION (exp);
5125
5126 if (code == NOT && warn_sync_nand)
5127 {
5128 tree fndecl = get_callee_fndecl (exp);
5129 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5130
5131 static bool warned_f_a_n, warned_n_a_f;
5132
5133 switch (fcode)
5134 {
5135 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5136 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5137 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5138 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5139 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5140 if (warned_f_a_n)
5141 break;
5142
5143 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5144 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5145 warned_f_a_n = true;
5146 break;
5147
5148 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5149 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5150 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5151 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5152 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5153 if (warned_n_a_f)
5154 break;
5155
5156 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5157 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5158 warned_n_a_f = true;
5159 break;
5160
5161 default:
5162 gcc_unreachable ();
5163 }
5164 }
5165
5166 /* Expand the operands. */
5167 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5168 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5169
5170 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5171 after);
5172 }
5173
5174 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5175 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5176 true if this is the boolean form. TARGET is a place for us to store the
5177 results; this is NOT optional if IS_BOOL is true. */
5178
5179 static rtx
5180 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5181 bool is_bool, rtx target)
5182 {
5183 rtx old_val, new_val, mem;
5184 rtx *pbool, *poval;
5185
5186 /* Expand the operands. */
5187 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5188 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5189 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5190
5191 pbool = poval = NULL;
5192 if (target != const0_rtx)
5193 {
5194 if (is_bool)
5195 pbool = &target;
5196 else
5197 poval = &target;
5198 }
5199 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5200 false, MEMMODEL_SYNC_SEQ_CST,
5201 MEMMODEL_SYNC_SEQ_CST))
5202 return NULL_RTX;
5203
5204 return target;
5205 }
5206
5207 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5208 general form is actually an atomic exchange, and some targets only
5209 support a reduced form with the second argument being a constant 1.
5210 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5211 the results. */
5212
5213 static rtx
5214 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5215 rtx target)
5216 {
5217 rtx val, mem;
5218
5219 /* Expand the operands. */
5220 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5221 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5222
5223 return expand_sync_lock_test_and_set (target, mem, val);
5224 }
5225
5226 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5227
5228 static void
5229 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5230 {
5231 rtx mem;
5232
5233 /* Expand the operands. */
5234 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5235
5236 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5237 }
5238
5239 /* Given an integer representing an ``enum memmodel'', verify its
5240 correctness and return the memory model enum. */
5241
5242 static enum memmodel
5243 get_memmodel (tree exp)
5244 {
5245 rtx op;
5246 unsigned HOST_WIDE_INT val;
5247
5248 /* If the parameter is not a constant, it's a run time value so we'll just
5249 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5250 if (TREE_CODE (exp) != INTEGER_CST)
5251 return MEMMODEL_SEQ_CST;
5252
5253 op = expand_normal (exp);
5254
5255 val = INTVAL (op);
5256 if (targetm.memmodel_check)
5257 val = targetm.memmodel_check (val);
5258 else if (val & ~MEMMODEL_MASK)
5259 {
5260 warning (OPT_Winvalid_memory_model,
5261 "Unknown architecture specifier in memory model to builtin.");
5262 return MEMMODEL_SEQ_CST;
5263 }
5264
5265 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5266 if (memmodel_base (val) >= MEMMODEL_LAST)
5267 {
5268 warning (OPT_Winvalid_memory_model,
5269 "invalid memory model argument to builtin");
5270 return MEMMODEL_SEQ_CST;
5271 }
5272
5273 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5274 be conservative and promote consume to acquire. */
5275 if (val == MEMMODEL_CONSUME)
5276 val = MEMMODEL_ACQUIRE;
5277
5278 return (enum memmodel) val;
5279 }
5280
5281 /* Expand the __atomic_exchange intrinsic:
5282 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5283 EXP is the CALL_EXPR.
5284 TARGET is an optional place for us to store the results. */
5285
5286 static rtx
5287 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5288 {
5289 rtx val, mem;
5290 enum memmodel model;
5291
5292 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5293
5294 if (!flag_inline_atomics)
5295 return NULL_RTX;
5296
5297 /* Expand the operands. */
5298 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5299 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5300
5301 return expand_atomic_exchange (target, mem, val, model);
5302 }
5303
5304 /* Expand the __atomic_compare_exchange intrinsic:
5305 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5306 TYPE desired, BOOL weak,
5307 enum memmodel success,
5308 enum memmodel failure)
5309 EXP is the CALL_EXPR.
5310 TARGET is an optional place for us to store the results. */
5311
5312 static rtx
5313 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5314 rtx target)
5315 {
5316 rtx expect, desired, mem, oldval;
5317 rtx_code_label *label;
5318 enum memmodel success, failure;
5319 tree weak;
5320 bool is_weak;
5321
5322 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5323 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5324
5325 if (failure > success)
5326 {
5327 warning (OPT_Winvalid_memory_model,
5328 "failure memory model cannot be stronger than success memory "
5329 "model for %<__atomic_compare_exchange%>");
5330 success = MEMMODEL_SEQ_CST;
5331 }
5332
5333 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5334 {
5335 warning (OPT_Winvalid_memory_model,
5336 "invalid failure memory model for "
5337 "%<__atomic_compare_exchange%>");
5338 failure = MEMMODEL_SEQ_CST;
5339 success = MEMMODEL_SEQ_CST;
5340 }
5341
5342
5343 if (!flag_inline_atomics)
5344 return NULL_RTX;
5345
5346 /* Expand the operands. */
5347 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5348
5349 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5350 expect = convert_memory_address (Pmode, expect);
5351 expect = gen_rtx_MEM (mode, expect);
5352 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5353
5354 weak = CALL_EXPR_ARG (exp, 3);
5355 is_weak = false;
5356 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5357 is_weak = true;
5358
5359 if (target == const0_rtx)
5360 target = NULL;
5361
5362 /* Lest the rtl backend create a race condition with an imporoper store
5363 to memory, always create a new pseudo for OLDVAL. */
5364 oldval = NULL;
5365
5366 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5367 is_weak, success, failure))
5368 return NULL_RTX;
5369
5370 /* Conditionally store back to EXPECT, lest we create a race condition
5371 with an improper store to memory. */
5372 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5373 the normal case where EXPECT is totally private, i.e. a register. At
5374 which point the store can be unconditional. */
5375 label = gen_label_rtx ();
5376 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5377 GET_MODE (target), 1, label);
5378 emit_move_insn (expect, oldval);
5379 emit_label (label);
5380
5381 return target;
5382 }
5383
5384 /* Expand the __atomic_load intrinsic:
5385 TYPE __atomic_load (TYPE *object, enum memmodel)
5386 EXP is the CALL_EXPR.
5387 TARGET is an optional place for us to store the results. */
5388
5389 static rtx
5390 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5391 {
5392 rtx mem;
5393 enum memmodel model;
5394
5395 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5396 if (is_mm_release (model) || is_mm_acq_rel (model))
5397 {
5398 warning (OPT_Winvalid_memory_model,
5399 "invalid memory model for %<__atomic_load%>");
5400 model = MEMMODEL_SEQ_CST;
5401 }
5402
5403 if (!flag_inline_atomics)
5404 return NULL_RTX;
5405
5406 /* Expand the operand. */
5407 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5408
5409 return expand_atomic_load (target, mem, model);
5410 }
5411
5412
5413 /* Expand the __atomic_store intrinsic:
5414 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5415 EXP is the CALL_EXPR.
5416 TARGET is an optional place for us to store the results. */
5417
5418 static rtx
5419 expand_builtin_atomic_store (machine_mode mode, tree exp)
5420 {
5421 rtx mem, val;
5422 enum memmodel model;
5423
5424 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5425 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5426 || is_mm_release (model)))
5427 {
5428 warning (OPT_Winvalid_memory_model,
5429 "invalid memory model for %<__atomic_store%>");
5430 model = MEMMODEL_SEQ_CST;
5431 }
5432
5433 if (!flag_inline_atomics)
5434 return NULL_RTX;
5435
5436 /* Expand the operands. */
5437 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5438 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5439
5440 return expand_atomic_store (mem, val, model, false);
5441 }
5442
5443 /* Expand the __atomic_fetch_XXX intrinsic:
5444 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5445 EXP is the CALL_EXPR.
5446 TARGET is an optional place for us to store the results.
5447 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5448 FETCH_AFTER is true if returning the result of the operation.
5449 FETCH_AFTER is false if returning the value before the operation.
5450 IGNORE is true if the result is not used.
5451 EXT_CALL is the correct builtin for an external call if this cannot be
5452 resolved to an instruction sequence. */
5453
5454 static rtx
5455 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5456 enum rtx_code code, bool fetch_after,
5457 bool ignore, enum built_in_function ext_call)
5458 {
5459 rtx val, mem, ret;
5460 enum memmodel model;
5461 tree fndecl;
5462 tree addr;
5463
5464 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5465
5466 /* Expand the operands. */
5467 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5468 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5469
5470 /* Only try generating instructions if inlining is turned on. */
5471 if (flag_inline_atomics)
5472 {
5473 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5474 if (ret)
5475 return ret;
5476 }
5477
5478 /* Return if a different routine isn't needed for the library call. */
5479 if (ext_call == BUILT_IN_NONE)
5480 return NULL_RTX;
5481
5482 /* Change the call to the specified function. */
5483 fndecl = get_callee_fndecl (exp);
5484 addr = CALL_EXPR_FN (exp);
5485 STRIP_NOPS (addr);
5486
5487 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5488 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5489
5490 /* Expand the call here so we can emit trailing code. */
5491 ret = expand_call (exp, target, ignore);
5492
5493 /* Replace the original function just in case it matters. */
5494 TREE_OPERAND (addr, 0) = fndecl;
5495
5496 /* Then issue the arithmetic correction to return the right result. */
5497 if (!ignore)
5498 {
5499 if (code == NOT)
5500 {
5501 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5502 OPTAB_LIB_WIDEN);
5503 ret = expand_simple_unop (mode, NOT, ret, target, true);
5504 }
5505 else
5506 ret = expand_simple_binop (mode, code, ret, val, target, true,
5507 OPTAB_LIB_WIDEN);
5508 }
5509 return ret;
5510 }
5511
5512 /* Expand an atomic clear operation.
5513 void _atomic_clear (BOOL *obj, enum memmodel)
5514 EXP is the call expression. */
5515
5516 static rtx
5517 expand_builtin_atomic_clear (tree exp)
5518 {
5519 machine_mode mode;
5520 rtx mem, ret;
5521 enum memmodel model;
5522
5523 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5524 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5525 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5526
5527 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5528 {
5529 warning (OPT_Winvalid_memory_model,
5530 "invalid memory model for %<__atomic_store%>");
5531 model = MEMMODEL_SEQ_CST;
5532 }
5533
5534 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5535 Failing that, a store is issued by __atomic_store. The only way this can
5536 fail is if the bool type is larger than a word size. Unlikely, but
5537 handle it anyway for completeness. Assume a single threaded model since
5538 there is no atomic support in this case, and no barriers are required. */
5539 ret = expand_atomic_store (mem, const0_rtx, model, true);
5540 if (!ret)
5541 emit_move_insn (mem, const0_rtx);
5542 return const0_rtx;
5543 }
5544
5545 /* Expand an atomic test_and_set operation.
5546 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5547 EXP is the call expression. */
5548
5549 static rtx
5550 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5551 {
5552 rtx mem;
5553 enum memmodel model;
5554 machine_mode mode;
5555
5556 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5557 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5558 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5559
5560 return expand_atomic_test_and_set (target, mem, model);
5561 }
5562
5563
5564 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5565 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5566
5567 static tree
5568 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5569 {
5570 int size;
5571 machine_mode mode;
5572 unsigned int mode_align, type_align;
5573
5574 if (TREE_CODE (arg0) != INTEGER_CST)
5575 return NULL_TREE;
5576
5577 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5578 mode = mode_for_size (size, MODE_INT, 0);
5579 mode_align = GET_MODE_ALIGNMENT (mode);
5580
5581 if (TREE_CODE (arg1) == INTEGER_CST)
5582 {
5583 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5584
5585 /* Either this argument is null, or it's a fake pointer encoding
5586 the alignment of the object. */
5587 val = val & -val;
5588 val *= BITS_PER_UNIT;
5589
5590 if (val == 0 || mode_align < val)
5591 type_align = mode_align;
5592 else
5593 type_align = val;
5594 }
5595 else
5596 {
5597 tree ttype = TREE_TYPE (arg1);
5598
5599 /* This function is usually invoked and folded immediately by the front
5600 end before anything else has a chance to look at it. The pointer
5601 parameter at this point is usually cast to a void *, so check for that
5602 and look past the cast. */
5603 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5604 && VOID_TYPE_P (TREE_TYPE (ttype)))
5605 arg1 = TREE_OPERAND (arg1, 0);
5606
5607 ttype = TREE_TYPE (arg1);
5608 gcc_assert (POINTER_TYPE_P (ttype));
5609
5610 /* Get the underlying type of the object. */
5611 ttype = TREE_TYPE (ttype);
5612 type_align = TYPE_ALIGN (ttype);
5613 }
5614
5615 /* If the object has smaller alignment, the lock free routines cannot
5616 be used. */
5617 if (type_align < mode_align)
5618 return boolean_false_node;
5619
5620 /* Check if a compare_and_swap pattern exists for the mode which represents
5621 the required size. The pattern is not allowed to fail, so the existence
5622 of the pattern indicates support is present. */
5623 if (can_compare_and_swap_p (mode, true))
5624 return boolean_true_node;
5625 else
5626 return boolean_false_node;
5627 }
5628
5629 /* Return true if the parameters to call EXP represent an object which will
5630 always generate lock free instructions. The first argument represents the
5631 size of the object, and the second parameter is a pointer to the object
5632 itself. If NULL is passed for the object, then the result is based on
5633 typical alignment for an object of the specified size. Otherwise return
5634 false. */
5635
5636 static rtx
5637 expand_builtin_atomic_always_lock_free (tree exp)
5638 {
5639 tree size;
5640 tree arg0 = CALL_EXPR_ARG (exp, 0);
5641 tree arg1 = CALL_EXPR_ARG (exp, 1);
5642
5643 if (TREE_CODE (arg0) != INTEGER_CST)
5644 {
5645 error ("non-constant argument 1 to __atomic_always_lock_free");
5646 return const0_rtx;
5647 }
5648
5649 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5650 if (size == boolean_true_node)
5651 return const1_rtx;
5652 return const0_rtx;
5653 }
5654
5655 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5656 is lock free on this architecture. */
5657
5658 static tree
5659 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5660 {
5661 if (!flag_inline_atomics)
5662 return NULL_TREE;
5663
5664 /* If it isn't always lock free, don't generate a result. */
5665 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5666 return boolean_true_node;
5667
5668 return NULL_TREE;
5669 }
5670
5671 /* Return true if the parameters to call EXP represent an object which will
5672 always generate lock free instructions. The first argument represents the
5673 size of the object, and the second parameter is a pointer to the object
5674 itself. If NULL is passed for the object, then the result is based on
5675 typical alignment for an object of the specified size. Otherwise return
5676 NULL*/
5677
5678 static rtx
5679 expand_builtin_atomic_is_lock_free (tree exp)
5680 {
5681 tree size;
5682 tree arg0 = CALL_EXPR_ARG (exp, 0);
5683 tree arg1 = CALL_EXPR_ARG (exp, 1);
5684
5685 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5686 {
5687 error ("non-integer argument 1 to __atomic_is_lock_free");
5688 return NULL_RTX;
5689 }
5690
5691 if (!flag_inline_atomics)
5692 return NULL_RTX;
5693
5694 /* If the value is known at compile time, return the RTX for it. */
5695 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5696 if (size == boolean_true_node)
5697 return const1_rtx;
5698
5699 return NULL_RTX;
5700 }
5701
5702 /* Expand the __atomic_thread_fence intrinsic:
5703 void __atomic_thread_fence (enum memmodel)
5704 EXP is the CALL_EXPR. */
5705
5706 static void
5707 expand_builtin_atomic_thread_fence (tree exp)
5708 {
5709 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5710 expand_mem_thread_fence (model);
5711 }
5712
5713 /* Expand the __atomic_signal_fence intrinsic:
5714 void __atomic_signal_fence (enum memmodel)
5715 EXP is the CALL_EXPR. */
5716
5717 static void
5718 expand_builtin_atomic_signal_fence (tree exp)
5719 {
5720 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5721 expand_mem_signal_fence (model);
5722 }
5723
5724 /* Expand the __sync_synchronize intrinsic. */
5725
5726 static void
5727 expand_builtin_sync_synchronize (void)
5728 {
5729 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5730 }
5731
5732 static rtx
5733 expand_builtin_thread_pointer (tree exp, rtx target)
5734 {
5735 enum insn_code icode;
5736 if (!validate_arglist (exp, VOID_TYPE))
5737 return const0_rtx;
5738 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5739 if (icode != CODE_FOR_nothing)
5740 {
5741 struct expand_operand op;
5742 /* If the target is not sutitable then create a new target. */
5743 if (target == NULL_RTX
5744 || !REG_P (target)
5745 || GET_MODE (target) != Pmode)
5746 target = gen_reg_rtx (Pmode);
5747 create_output_operand (&op, target, Pmode);
5748 expand_insn (icode, 1, &op);
5749 return target;
5750 }
5751 error ("__builtin_thread_pointer is not supported on this target");
5752 return const0_rtx;
5753 }
5754
5755 static void
5756 expand_builtin_set_thread_pointer (tree exp)
5757 {
5758 enum insn_code icode;
5759 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5760 return;
5761 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5762 if (icode != CODE_FOR_nothing)
5763 {
5764 struct expand_operand op;
5765 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5766 Pmode, EXPAND_NORMAL);
5767 create_input_operand (&op, val, Pmode);
5768 expand_insn (icode, 1, &op);
5769 return;
5770 }
5771 error ("__builtin_set_thread_pointer is not supported on this target");
5772 }
5773
5774 \f
5775 /* Emit code to restore the current value of stack. */
5776
5777 static void
5778 expand_stack_restore (tree var)
5779 {
5780 rtx_insn *prev;
5781 rtx sa = expand_normal (var);
5782
5783 sa = convert_memory_address (Pmode, sa);
5784
5785 prev = get_last_insn ();
5786 emit_stack_restore (SAVE_BLOCK, sa);
5787
5788 record_new_stack_level ();
5789
5790 fixup_args_size_notes (prev, get_last_insn (), 0);
5791 }
5792
5793 /* Emit code to save the current value of stack. */
5794
5795 static rtx
5796 expand_stack_save (void)
5797 {
5798 rtx ret = NULL_RTX;
5799
5800 emit_stack_save (SAVE_BLOCK, &ret);
5801 return ret;
5802 }
5803
5804
5805 /* Expand an expression EXP that calls a built-in function,
5806 with result going to TARGET if that's convenient
5807 (and in mode MODE if that's convenient).
5808 SUBTARGET may be used as the target for computing one of EXP's operands.
5809 IGNORE is nonzero if the value is to be ignored. */
5810
5811 rtx
5812 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5813 int ignore)
5814 {
5815 tree fndecl = get_callee_fndecl (exp);
5816 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5817 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5818 int flags;
5819
5820 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5821 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5822
5823 /* When ASan is enabled, we don't want to expand some memory/string
5824 builtins and rely on libsanitizer's hooks. This allows us to avoid
5825 redundant checks and be sure, that possible overflow will be detected
5826 by ASan. */
5827
5828 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5829 return expand_call (exp, target, ignore);
5830
5831 /* When not optimizing, generate calls to library functions for a certain
5832 set of builtins. */
5833 if (!optimize
5834 && !called_as_built_in (fndecl)
5835 && fcode != BUILT_IN_FORK
5836 && fcode != BUILT_IN_EXECL
5837 && fcode != BUILT_IN_EXECV
5838 && fcode != BUILT_IN_EXECLP
5839 && fcode != BUILT_IN_EXECLE
5840 && fcode != BUILT_IN_EXECVP
5841 && fcode != BUILT_IN_EXECVE
5842 && fcode != BUILT_IN_ALLOCA
5843 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5844 && fcode != BUILT_IN_FREE
5845 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5846 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5847 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5848 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5849 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5850 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5851 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5852 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5853 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5854 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5855 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5856 && fcode != BUILT_IN_CHKP_BNDRET)
5857 return expand_call (exp, target, ignore);
5858
5859 /* The built-in function expanders test for target == const0_rtx
5860 to determine whether the function's result will be ignored. */
5861 if (ignore)
5862 target = const0_rtx;
5863
5864 /* If the result of a pure or const built-in function is ignored, and
5865 none of its arguments are volatile, we can avoid expanding the
5866 built-in call and just evaluate the arguments for side-effects. */
5867 if (target == const0_rtx
5868 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5869 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5870 {
5871 bool volatilep = false;
5872 tree arg;
5873 call_expr_arg_iterator iter;
5874
5875 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5876 if (TREE_THIS_VOLATILE (arg))
5877 {
5878 volatilep = true;
5879 break;
5880 }
5881
5882 if (! volatilep)
5883 {
5884 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5885 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5886 return const0_rtx;
5887 }
5888 }
5889
5890 /* expand_builtin_with_bounds is supposed to be used for
5891 instrumented builtin calls. */
5892 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5893
5894 switch (fcode)
5895 {
5896 CASE_FLT_FN (BUILT_IN_FABS):
5897 case BUILT_IN_FABSD32:
5898 case BUILT_IN_FABSD64:
5899 case BUILT_IN_FABSD128:
5900 target = expand_builtin_fabs (exp, target, subtarget);
5901 if (target)
5902 return target;
5903 break;
5904
5905 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5906 target = expand_builtin_copysign (exp, target, subtarget);
5907 if (target)
5908 return target;
5909 break;
5910
5911 /* Just do a normal library call if we were unable to fold
5912 the values. */
5913 CASE_FLT_FN (BUILT_IN_CABS):
5914 break;
5915
5916 CASE_FLT_FN (BUILT_IN_EXP):
5917 CASE_FLT_FN (BUILT_IN_EXP10):
5918 CASE_FLT_FN (BUILT_IN_POW10):
5919 CASE_FLT_FN (BUILT_IN_EXP2):
5920 CASE_FLT_FN (BUILT_IN_EXPM1):
5921 CASE_FLT_FN (BUILT_IN_LOGB):
5922 CASE_FLT_FN (BUILT_IN_LOG):
5923 CASE_FLT_FN (BUILT_IN_LOG10):
5924 CASE_FLT_FN (BUILT_IN_LOG2):
5925 CASE_FLT_FN (BUILT_IN_LOG1P):
5926 CASE_FLT_FN (BUILT_IN_TAN):
5927 CASE_FLT_FN (BUILT_IN_ASIN):
5928 CASE_FLT_FN (BUILT_IN_ACOS):
5929 CASE_FLT_FN (BUILT_IN_ATAN):
5930 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5931 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5932 because of possible accuracy problems. */
5933 if (! flag_unsafe_math_optimizations)
5934 break;
5935 CASE_FLT_FN (BUILT_IN_SQRT):
5936 CASE_FLT_FN (BUILT_IN_FLOOR):
5937 CASE_FLT_FN (BUILT_IN_CEIL):
5938 CASE_FLT_FN (BUILT_IN_TRUNC):
5939 CASE_FLT_FN (BUILT_IN_ROUND):
5940 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5941 CASE_FLT_FN (BUILT_IN_RINT):
5942 target = expand_builtin_mathfn (exp, target, subtarget);
5943 if (target)
5944 return target;
5945 break;
5946
5947 CASE_FLT_FN (BUILT_IN_FMA):
5948 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5949 if (target)
5950 return target;
5951 break;
5952
5953 CASE_FLT_FN (BUILT_IN_ILOGB):
5954 if (! flag_unsafe_math_optimizations)
5955 break;
5956 CASE_FLT_FN (BUILT_IN_ISINF):
5957 CASE_FLT_FN (BUILT_IN_FINITE):
5958 case BUILT_IN_ISFINITE:
5959 case BUILT_IN_ISNORMAL:
5960 target = expand_builtin_interclass_mathfn (exp, target);
5961 if (target)
5962 return target;
5963 break;
5964
5965 CASE_FLT_FN (BUILT_IN_ICEIL):
5966 CASE_FLT_FN (BUILT_IN_LCEIL):
5967 CASE_FLT_FN (BUILT_IN_LLCEIL):
5968 CASE_FLT_FN (BUILT_IN_LFLOOR):
5969 CASE_FLT_FN (BUILT_IN_IFLOOR):
5970 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5971 target = expand_builtin_int_roundingfn (exp, target);
5972 if (target)
5973 return target;
5974 break;
5975
5976 CASE_FLT_FN (BUILT_IN_IRINT):
5977 CASE_FLT_FN (BUILT_IN_LRINT):
5978 CASE_FLT_FN (BUILT_IN_LLRINT):
5979 CASE_FLT_FN (BUILT_IN_IROUND):
5980 CASE_FLT_FN (BUILT_IN_LROUND):
5981 CASE_FLT_FN (BUILT_IN_LLROUND):
5982 target = expand_builtin_int_roundingfn_2 (exp, target);
5983 if (target)
5984 return target;
5985 break;
5986
5987 CASE_FLT_FN (BUILT_IN_POWI):
5988 target = expand_builtin_powi (exp, target);
5989 if (target)
5990 return target;
5991 break;
5992
5993 CASE_FLT_FN (BUILT_IN_ATAN2):
5994 CASE_FLT_FN (BUILT_IN_LDEXP):
5995 CASE_FLT_FN (BUILT_IN_SCALB):
5996 CASE_FLT_FN (BUILT_IN_SCALBN):
5997 CASE_FLT_FN (BUILT_IN_SCALBLN):
5998 if (! flag_unsafe_math_optimizations)
5999 break;
6000
6001 CASE_FLT_FN (BUILT_IN_FMOD):
6002 CASE_FLT_FN (BUILT_IN_REMAINDER):
6003 CASE_FLT_FN (BUILT_IN_DREM):
6004 CASE_FLT_FN (BUILT_IN_POW):
6005 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6006 if (target)
6007 return target;
6008 break;
6009
6010 CASE_FLT_FN (BUILT_IN_CEXPI):
6011 target = expand_builtin_cexpi (exp, target);
6012 gcc_assert (target);
6013 return target;
6014
6015 CASE_FLT_FN (BUILT_IN_SIN):
6016 CASE_FLT_FN (BUILT_IN_COS):
6017 if (! flag_unsafe_math_optimizations)
6018 break;
6019 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6020 if (target)
6021 return target;
6022 break;
6023
6024 CASE_FLT_FN (BUILT_IN_SINCOS):
6025 if (! flag_unsafe_math_optimizations)
6026 break;
6027 target = expand_builtin_sincos (exp);
6028 if (target)
6029 return target;
6030 break;
6031
6032 case BUILT_IN_APPLY_ARGS:
6033 return expand_builtin_apply_args ();
6034
6035 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6036 FUNCTION with a copy of the parameters described by
6037 ARGUMENTS, and ARGSIZE. It returns a block of memory
6038 allocated on the stack into which is stored all the registers
6039 that might possibly be used for returning the result of a
6040 function. ARGUMENTS is the value returned by
6041 __builtin_apply_args. ARGSIZE is the number of bytes of
6042 arguments that must be copied. ??? How should this value be
6043 computed? We'll also need a safe worst case value for varargs
6044 functions. */
6045 case BUILT_IN_APPLY:
6046 if (!validate_arglist (exp, POINTER_TYPE,
6047 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6048 && !validate_arglist (exp, REFERENCE_TYPE,
6049 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6050 return const0_rtx;
6051 else
6052 {
6053 rtx ops[3];
6054
6055 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6056 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6057 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6058
6059 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6060 }
6061
6062 /* __builtin_return (RESULT) causes the function to return the
6063 value described by RESULT. RESULT is address of the block of
6064 memory returned by __builtin_apply. */
6065 case BUILT_IN_RETURN:
6066 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6067 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6068 return const0_rtx;
6069
6070 case BUILT_IN_SAVEREGS:
6071 return expand_builtin_saveregs ();
6072
6073 case BUILT_IN_VA_ARG_PACK:
6074 /* All valid uses of __builtin_va_arg_pack () are removed during
6075 inlining. */
6076 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6077 return const0_rtx;
6078
6079 case BUILT_IN_VA_ARG_PACK_LEN:
6080 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6081 inlining. */
6082 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6083 return const0_rtx;
6084
6085 /* Return the address of the first anonymous stack arg. */
6086 case BUILT_IN_NEXT_ARG:
6087 if (fold_builtin_next_arg (exp, false))
6088 return const0_rtx;
6089 return expand_builtin_next_arg ();
6090
6091 case BUILT_IN_CLEAR_CACHE:
6092 target = expand_builtin___clear_cache (exp);
6093 if (target)
6094 return target;
6095 break;
6096
6097 case BUILT_IN_CLASSIFY_TYPE:
6098 return expand_builtin_classify_type (exp);
6099
6100 case BUILT_IN_CONSTANT_P:
6101 return const0_rtx;
6102
6103 case BUILT_IN_FRAME_ADDRESS:
6104 case BUILT_IN_RETURN_ADDRESS:
6105 return expand_builtin_frame_address (fndecl, exp);
6106
6107 /* Returns the address of the area where the structure is returned.
6108 0 otherwise. */
6109 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6110 if (call_expr_nargs (exp) != 0
6111 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6112 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6113 return const0_rtx;
6114 else
6115 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6116
6117 case BUILT_IN_ALLOCA:
6118 case BUILT_IN_ALLOCA_WITH_ALIGN:
6119 /* If the allocation stems from the declaration of a variable-sized
6120 object, it cannot accumulate. */
6121 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6122 if (target)
6123 return target;
6124 break;
6125
6126 case BUILT_IN_STACK_SAVE:
6127 return expand_stack_save ();
6128
6129 case BUILT_IN_STACK_RESTORE:
6130 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6131 return const0_rtx;
6132
6133 case BUILT_IN_BSWAP16:
6134 case BUILT_IN_BSWAP32:
6135 case BUILT_IN_BSWAP64:
6136 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6137 if (target)
6138 return target;
6139 break;
6140
6141 CASE_INT_FN (BUILT_IN_FFS):
6142 target = expand_builtin_unop (target_mode, exp, target,
6143 subtarget, ffs_optab);
6144 if (target)
6145 return target;
6146 break;
6147
6148 CASE_INT_FN (BUILT_IN_CLZ):
6149 target = expand_builtin_unop (target_mode, exp, target,
6150 subtarget, clz_optab);
6151 if (target)
6152 return target;
6153 break;
6154
6155 CASE_INT_FN (BUILT_IN_CTZ):
6156 target = expand_builtin_unop (target_mode, exp, target,
6157 subtarget, ctz_optab);
6158 if (target)
6159 return target;
6160 break;
6161
6162 CASE_INT_FN (BUILT_IN_CLRSB):
6163 target = expand_builtin_unop (target_mode, exp, target,
6164 subtarget, clrsb_optab);
6165 if (target)
6166 return target;
6167 break;
6168
6169 CASE_INT_FN (BUILT_IN_POPCOUNT):
6170 target = expand_builtin_unop (target_mode, exp, target,
6171 subtarget, popcount_optab);
6172 if (target)
6173 return target;
6174 break;
6175
6176 CASE_INT_FN (BUILT_IN_PARITY):
6177 target = expand_builtin_unop (target_mode, exp, target,
6178 subtarget, parity_optab);
6179 if (target)
6180 return target;
6181 break;
6182
6183 case BUILT_IN_STRLEN:
6184 target = expand_builtin_strlen (exp, target, target_mode);
6185 if (target)
6186 return target;
6187 break;
6188
6189 case BUILT_IN_STRCPY:
6190 target = expand_builtin_strcpy (exp, target);
6191 if (target)
6192 return target;
6193 break;
6194
6195 case BUILT_IN_STRNCPY:
6196 target = expand_builtin_strncpy (exp, target);
6197 if (target)
6198 return target;
6199 break;
6200
6201 case BUILT_IN_STPCPY:
6202 target = expand_builtin_stpcpy (exp, target, mode);
6203 if (target)
6204 return target;
6205 break;
6206
6207 case BUILT_IN_MEMCPY:
6208 target = expand_builtin_memcpy (exp, target);
6209 if (target)
6210 return target;
6211 break;
6212
6213 case BUILT_IN_MEMPCPY:
6214 target = expand_builtin_mempcpy (exp, target, mode);
6215 if (target)
6216 return target;
6217 break;
6218
6219 case BUILT_IN_MEMSET:
6220 target = expand_builtin_memset (exp, target, mode);
6221 if (target)
6222 return target;
6223 break;
6224
6225 case BUILT_IN_BZERO:
6226 target = expand_builtin_bzero (exp);
6227 if (target)
6228 return target;
6229 break;
6230
6231 case BUILT_IN_STRCMP:
6232 target = expand_builtin_strcmp (exp, target);
6233 if (target)
6234 return target;
6235 break;
6236
6237 case BUILT_IN_STRNCMP:
6238 target = expand_builtin_strncmp (exp, target, mode);
6239 if (target)
6240 return target;
6241 break;
6242
6243 case BUILT_IN_BCMP:
6244 case BUILT_IN_MEMCMP:
6245 target = expand_builtin_memcmp (exp, target);
6246 if (target)
6247 return target;
6248 break;
6249
6250 case BUILT_IN_SETJMP:
6251 /* This should have been lowered to the builtins below. */
6252 gcc_unreachable ();
6253
6254 case BUILT_IN_SETJMP_SETUP:
6255 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6256 and the receiver label. */
6257 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6258 {
6259 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6260 VOIDmode, EXPAND_NORMAL);
6261 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6262 rtx_insn *label_r = label_rtx (label);
6263
6264 /* This is copied from the handling of non-local gotos. */
6265 expand_builtin_setjmp_setup (buf_addr, label_r);
6266 nonlocal_goto_handler_labels
6267 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6268 nonlocal_goto_handler_labels);
6269 /* ??? Do not let expand_label treat us as such since we would
6270 not want to be both on the list of non-local labels and on
6271 the list of forced labels. */
6272 FORCED_LABEL (label) = 0;
6273 return const0_rtx;
6274 }
6275 break;
6276
6277 case BUILT_IN_SETJMP_RECEIVER:
6278 /* __builtin_setjmp_receiver is passed the receiver label. */
6279 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6280 {
6281 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6282 rtx_insn *label_r = label_rtx (label);
6283
6284 expand_builtin_setjmp_receiver (label_r);
6285 return const0_rtx;
6286 }
6287 break;
6288
6289 /* __builtin_longjmp is passed a pointer to an array of five words.
6290 It's similar to the C library longjmp function but works with
6291 __builtin_setjmp above. */
6292 case BUILT_IN_LONGJMP:
6293 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6294 {
6295 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6296 VOIDmode, EXPAND_NORMAL);
6297 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6298
6299 if (value != const1_rtx)
6300 {
6301 error ("%<__builtin_longjmp%> second argument must be 1");
6302 return const0_rtx;
6303 }
6304
6305 expand_builtin_longjmp (buf_addr, value);
6306 return const0_rtx;
6307 }
6308 break;
6309
6310 case BUILT_IN_NONLOCAL_GOTO:
6311 target = expand_builtin_nonlocal_goto (exp);
6312 if (target)
6313 return target;
6314 break;
6315
6316 /* This updates the setjmp buffer that is its argument with the value
6317 of the current stack pointer. */
6318 case BUILT_IN_UPDATE_SETJMP_BUF:
6319 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6320 {
6321 rtx buf_addr
6322 = expand_normal (CALL_EXPR_ARG (exp, 0));
6323
6324 expand_builtin_update_setjmp_buf (buf_addr);
6325 return const0_rtx;
6326 }
6327 break;
6328
6329 case BUILT_IN_TRAP:
6330 expand_builtin_trap ();
6331 return const0_rtx;
6332
6333 case BUILT_IN_UNREACHABLE:
6334 expand_builtin_unreachable ();
6335 return const0_rtx;
6336
6337 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6338 case BUILT_IN_SIGNBITD32:
6339 case BUILT_IN_SIGNBITD64:
6340 case BUILT_IN_SIGNBITD128:
6341 target = expand_builtin_signbit (exp, target);
6342 if (target)
6343 return target;
6344 break;
6345
6346 /* Various hooks for the DWARF 2 __throw routine. */
6347 case BUILT_IN_UNWIND_INIT:
6348 expand_builtin_unwind_init ();
6349 return const0_rtx;
6350 case BUILT_IN_DWARF_CFA:
6351 return virtual_cfa_rtx;
6352 #ifdef DWARF2_UNWIND_INFO
6353 case BUILT_IN_DWARF_SP_COLUMN:
6354 return expand_builtin_dwarf_sp_column ();
6355 case BUILT_IN_INIT_DWARF_REG_SIZES:
6356 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6357 return const0_rtx;
6358 #endif
6359 case BUILT_IN_FROB_RETURN_ADDR:
6360 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6361 case BUILT_IN_EXTRACT_RETURN_ADDR:
6362 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6363 case BUILT_IN_EH_RETURN:
6364 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6365 CALL_EXPR_ARG (exp, 1));
6366 return const0_rtx;
6367 case BUILT_IN_EH_RETURN_DATA_REGNO:
6368 return expand_builtin_eh_return_data_regno (exp);
6369 case BUILT_IN_EXTEND_POINTER:
6370 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6371 case BUILT_IN_EH_POINTER:
6372 return expand_builtin_eh_pointer (exp);
6373 case BUILT_IN_EH_FILTER:
6374 return expand_builtin_eh_filter (exp);
6375 case BUILT_IN_EH_COPY_VALUES:
6376 return expand_builtin_eh_copy_values (exp);
6377
6378 case BUILT_IN_VA_START:
6379 return expand_builtin_va_start (exp);
6380 case BUILT_IN_VA_END:
6381 return expand_builtin_va_end (exp);
6382 case BUILT_IN_VA_COPY:
6383 return expand_builtin_va_copy (exp);
6384 case BUILT_IN_EXPECT:
6385 return expand_builtin_expect (exp, target);
6386 case BUILT_IN_ASSUME_ALIGNED:
6387 return expand_builtin_assume_aligned (exp, target);
6388 case BUILT_IN_PREFETCH:
6389 expand_builtin_prefetch (exp);
6390 return const0_rtx;
6391
6392 case BUILT_IN_INIT_TRAMPOLINE:
6393 return expand_builtin_init_trampoline (exp, true);
6394 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6395 return expand_builtin_init_trampoline (exp, false);
6396 case BUILT_IN_ADJUST_TRAMPOLINE:
6397 return expand_builtin_adjust_trampoline (exp);
6398
6399 case BUILT_IN_FORK:
6400 case BUILT_IN_EXECL:
6401 case BUILT_IN_EXECV:
6402 case BUILT_IN_EXECLP:
6403 case BUILT_IN_EXECLE:
6404 case BUILT_IN_EXECVP:
6405 case BUILT_IN_EXECVE:
6406 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6407 if (target)
6408 return target;
6409 break;
6410
6411 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6412 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6413 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6414 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6415 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6416 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6417 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6418 if (target)
6419 return target;
6420 break;
6421
6422 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6423 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6424 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6425 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6426 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6427 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6428 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6429 if (target)
6430 return target;
6431 break;
6432
6433 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6434 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6435 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6436 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6437 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6438 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6439 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6440 if (target)
6441 return target;
6442 break;
6443
6444 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6445 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6446 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6447 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6448 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6449 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6450 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6451 if (target)
6452 return target;
6453 break;
6454
6455 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6456 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6457 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6458 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6459 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6460 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6461 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6462 if (target)
6463 return target;
6464 break;
6465
6466 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6467 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6468 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6469 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6470 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6471 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6472 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6473 if (target)
6474 return target;
6475 break;
6476
6477 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6478 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6479 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6480 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6481 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6482 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6483 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6484 if (target)
6485 return target;
6486 break;
6487
6488 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6489 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6490 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6491 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6492 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6493 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6494 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6495 if (target)
6496 return target;
6497 break;
6498
6499 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6500 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6501 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6502 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6503 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6504 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6505 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6506 if (target)
6507 return target;
6508 break;
6509
6510 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6511 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6512 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6513 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6514 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6516 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6517 if (target)
6518 return target;
6519 break;
6520
6521 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6522 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6523 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6524 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6525 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6526 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6527 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6528 if (target)
6529 return target;
6530 break;
6531
6532 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6533 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6534 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6535 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6536 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6537 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6538 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6539 if (target)
6540 return target;
6541 break;
6542
6543 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6544 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6545 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6546 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6547 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6548 if (mode == VOIDmode)
6549 mode = TYPE_MODE (boolean_type_node);
6550 if (!target || !register_operand (target, mode))
6551 target = gen_reg_rtx (mode);
6552
6553 mode = get_builtin_sync_mode
6554 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6555 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6556 if (target)
6557 return target;
6558 break;
6559
6560 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6561 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6562 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6563 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6564 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6565 mode = get_builtin_sync_mode
6566 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6567 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6568 if (target)
6569 return target;
6570 break;
6571
6572 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6573 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6574 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6575 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6576 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6577 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6578 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6579 if (target)
6580 return target;
6581 break;
6582
6583 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6584 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6585 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6586 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6587 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6589 expand_builtin_sync_lock_release (mode, exp);
6590 return const0_rtx;
6591
6592 case BUILT_IN_SYNC_SYNCHRONIZE:
6593 expand_builtin_sync_synchronize ();
6594 return const0_rtx;
6595
6596 case BUILT_IN_ATOMIC_EXCHANGE_1:
6597 case BUILT_IN_ATOMIC_EXCHANGE_2:
6598 case BUILT_IN_ATOMIC_EXCHANGE_4:
6599 case BUILT_IN_ATOMIC_EXCHANGE_8:
6600 case BUILT_IN_ATOMIC_EXCHANGE_16:
6601 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6602 target = expand_builtin_atomic_exchange (mode, exp, target);
6603 if (target)
6604 return target;
6605 break;
6606
6607 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6608 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6609 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6610 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6611 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6612 {
6613 unsigned int nargs, z;
6614 vec<tree, va_gc> *vec;
6615
6616 mode =
6617 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6618 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6619 if (target)
6620 return target;
6621
6622 /* If this is turned into an external library call, the weak parameter
6623 must be dropped to match the expected parameter list. */
6624 nargs = call_expr_nargs (exp);
6625 vec_alloc (vec, nargs - 1);
6626 for (z = 0; z < 3; z++)
6627 vec->quick_push (CALL_EXPR_ARG (exp, z));
6628 /* Skip the boolean weak parameter. */
6629 for (z = 4; z < 6; z++)
6630 vec->quick_push (CALL_EXPR_ARG (exp, z));
6631 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6632 break;
6633 }
6634
6635 case BUILT_IN_ATOMIC_LOAD_1:
6636 case BUILT_IN_ATOMIC_LOAD_2:
6637 case BUILT_IN_ATOMIC_LOAD_4:
6638 case BUILT_IN_ATOMIC_LOAD_8:
6639 case BUILT_IN_ATOMIC_LOAD_16:
6640 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6641 target = expand_builtin_atomic_load (mode, exp, target);
6642 if (target)
6643 return target;
6644 break;
6645
6646 case BUILT_IN_ATOMIC_STORE_1:
6647 case BUILT_IN_ATOMIC_STORE_2:
6648 case BUILT_IN_ATOMIC_STORE_4:
6649 case BUILT_IN_ATOMIC_STORE_8:
6650 case BUILT_IN_ATOMIC_STORE_16:
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6652 target = expand_builtin_atomic_store (mode, exp);
6653 if (target)
6654 return const0_rtx;
6655 break;
6656
6657 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6658 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6659 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6660 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6661 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6662 {
6663 enum built_in_function lib;
6664 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6665 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6666 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6667 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6668 ignore, lib);
6669 if (target)
6670 return target;
6671 break;
6672 }
6673 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6674 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6675 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6676 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6677 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6678 {
6679 enum built_in_function lib;
6680 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6681 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6682 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6683 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6684 ignore, lib);
6685 if (target)
6686 return target;
6687 break;
6688 }
6689 case BUILT_IN_ATOMIC_AND_FETCH_1:
6690 case BUILT_IN_ATOMIC_AND_FETCH_2:
6691 case BUILT_IN_ATOMIC_AND_FETCH_4:
6692 case BUILT_IN_ATOMIC_AND_FETCH_8:
6693 case BUILT_IN_ATOMIC_AND_FETCH_16:
6694 {
6695 enum built_in_function lib;
6696 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6697 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6698 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6699 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6700 ignore, lib);
6701 if (target)
6702 return target;
6703 break;
6704 }
6705 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6706 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6707 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6708 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6709 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6710 {
6711 enum built_in_function lib;
6712 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6713 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6714 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6715 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6716 ignore, lib);
6717 if (target)
6718 return target;
6719 break;
6720 }
6721 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6722 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6723 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6724 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6725 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6726 {
6727 enum built_in_function lib;
6728 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6729 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6730 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6732 ignore, lib);
6733 if (target)
6734 return target;
6735 break;
6736 }
6737 case BUILT_IN_ATOMIC_OR_FETCH_1:
6738 case BUILT_IN_ATOMIC_OR_FETCH_2:
6739 case BUILT_IN_ATOMIC_OR_FETCH_4:
6740 case BUILT_IN_ATOMIC_OR_FETCH_8:
6741 case BUILT_IN_ATOMIC_OR_FETCH_16:
6742 {
6743 enum built_in_function lib;
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6745 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6746 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6747 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6748 ignore, lib);
6749 if (target)
6750 return target;
6751 break;
6752 }
6753 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6754 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6755 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6756 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6757 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6758 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6759 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6760 ignore, BUILT_IN_NONE);
6761 if (target)
6762 return target;
6763 break;
6764
6765 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6766 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6767 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6768 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6769 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6770 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6771 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6772 ignore, BUILT_IN_NONE);
6773 if (target)
6774 return target;
6775 break;
6776
6777 case BUILT_IN_ATOMIC_FETCH_AND_1:
6778 case BUILT_IN_ATOMIC_FETCH_AND_2:
6779 case BUILT_IN_ATOMIC_FETCH_AND_4:
6780 case BUILT_IN_ATOMIC_FETCH_AND_8:
6781 case BUILT_IN_ATOMIC_FETCH_AND_16:
6782 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6783 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6784 ignore, BUILT_IN_NONE);
6785 if (target)
6786 return target;
6787 break;
6788
6789 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6790 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6791 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6792 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6793 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6794 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6795 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6796 ignore, BUILT_IN_NONE);
6797 if (target)
6798 return target;
6799 break;
6800
6801 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6802 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6803 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6804 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6805 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6806 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6807 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6808 ignore, BUILT_IN_NONE);
6809 if (target)
6810 return target;
6811 break;
6812
6813 case BUILT_IN_ATOMIC_FETCH_OR_1:
6814 case BUILT_IN_ATOMIC_FETCH_OR_2:
6815 case BUILT_IN_ATOMIC_FETCH_OR_4:
6816 case BUILT_IN_ATOMIC_FETCH_OR_8:
6817 case BUILT_IN_ATOMIC_FETCH_OR_16:
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6819 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6820 ignore, BUILT_IN_NONE);
6821 if (target)
6822 return target;
6823 break;
6824
6825 case BUILT_IN_ATOMIC_TEST_AND_SET:
6826 return expand_builtin_atomic_test_and_set (exp, target);
6827
6828 case BUILT_IN_ATOMIC_CLEAR:
6829 return expand_builtin_atomic_clear (exp);
6830
6831 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6832 return expand_builtin_atomic_always_lock_free (exp);
6833
6834 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6835 target = expand_builtin_atomic_is_lock_free (exp);
6836 if (target)
6837 return target;
6838 break;
6839
6840 case BUILT_IN_ATOMIC_THREAD_FENCE:
6841 expand_builtin_atomic_thread_fence (exp);
6842 return const0_rtx;
6843
6844 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6845 expand_builtin_atomic_signal_fence (exp);
6846 return const0_rtx;
6847
6848 case BUILT_IN_OBJECT_SIZE:
6849 return expand_builtin_object_size (exp);
6850
6851 case BUILT_IN_MEMCPY_CHK:
6852 case BUILT_IN_MEMPCPY_CHK:
6853 case BUILT_IN_MEMMOVE_CHK:
6854 case BUILT_IN_MEMSET_CHK:
6855 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6856 if (target)
6857 return target;
6858 break;
6859
6860 case BUILT_IN_STRCPY_CHK:
6861 case BUILT_IN_STPCPY_CHK:
6862 case BUILT_IN_STRNCPY_CHK:
6863 case BUILT_IN_STPNCPY_CHK:
6864 case BUILT_IN_STRCAT_CHK:
6865 case BUILT_IN_STRNCAT_CHK:
6866 case BUILT_IN_SNPRINTF_CHK:
6867 case BUILT_IN_VSNPRINTF_CHK:
6868 maybe_emit_chk_warning (exp, fcode);
6869 break;
6870
6871 case BUILT_IN_SPRINTF_CHK:
6872 case BUILT_IN_VSPRINTF_CHK:
6873 maybe_emit_sprintf_chk_warning (exp, fcode);
6874 break;
6875
6876 case BUILT_IN_FREE:
6877 if (warn_free_nonheap_object)
6878 maybe_emit_free_warning (exp);
6879 break;
6880
6881 case BUILT_IN_THREAD_POINTER:
6882 return expand_builtin_thread_pointer (exp, target);
6883
6884 case BUILT_IN_SET_THREAD_POINTER:
6885 expand_builtin_set_thread_pointer (exp);
6886 return const0_rtx;
6887
6888 case BUILT_IN_CILK_DETACH:
6889 expand_builtin_cilk_detach (exp);
6890 return const0_rtx;
6891
6892 case BUILT_IN_CILK_POP_FRAME:
6893 expand_builtin_cilk_pop_frame (exp);
6894 return const0_rtx;
6895
6896 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6897 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6898 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6899 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6900 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6901 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6902 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6903 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6904 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6905 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6906 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6907 /* We allow user CHKP builtins if Pointer Bounds
6908 Checker is off. */
6909 if (!chkp_function_instrumented_p (current_function_decl))
6910 {
6911 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6912 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6913 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6914 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6915 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6916 return expand_normal (CALL_EXPR_ARG (exp, 0));
6917 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6918 return expand_normal (size_zero_node);
6919 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6920 return expand_normal (size_int (-1));
6921 else
6922 return const0_rtx;
6923 }
6924 /* FALLTHROUGH */
6925
6926 case BUILT_IN_CHKP_BNDMK:
6927 case BUILT_IN_CHKP_BNDSTX:
6928 case BUILT_IN_CHKP_BNDCL:
6929 case BUILT_IN_CHKP_BNDCU:
6930 case BUILT_IN_CHKP_BNDLDX:
6931 case BUILT_IN_CHKP_BNDRET:
6932 case BUILT_IN_CHKP_INTERSECT:
6933 case BUILT_IN_CHKP_NARROW:
6934 case BUILT_IN_CHKP_EXTRACT_LOWER:
6935 case BUILT_IN_CHKP_EXTRACT_UPPER:
6936 /* Software implementation of Pointer Bounds Checker is NYI.
6937 Target support is required. */
6938 error ("Your target platform does not support -fcheck-pointer-bounds");
6939 break;
6940
6941 case BUILT_IN_ACC_ON_DEVICE:
6942 /* Do library call, if we failed to expand the builtin when
6943 folding. */
6944 break;
6945
6946 default: /* just do library call, if unknown builtin */
6947 break;
6948 }
6949
6950 /* The switch statement above can drop through to cause the function
6951 to be called normally. */
6952 return expand_call (exp, target, ignore);
6953 }
6954
6955 /* Similar to expand_builtin but is used for instrumented calls. */
6956
6957 rtx
6958 expand_builtin_with_bounds (tree exp, rtx target,
6959 rtx subtarget ATTRIBUTE_UNUSED,
6960 machine_mode mode, int ignore)
6961 {
6962 tree fndecl = get_callee_fndecl (exp);
6963 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6964
6965 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6966
6967 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6968 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6969
6970 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6971 && fcode < END_CHKP_BUILTINS);
6972
6973 switch (fcode)
6974 {
6975 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6976 target = expand_builtin_memcpy_with_bounds (exp, target);
6977 if (target)
6978 return target;
6979 break;
6980
6981 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6982 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6983 if (target)
6984 return target;
6985 break;
6986
6987 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6988 target = expand_builtin_memset_with_bounds (exp, target, mode);
6989 if (target)
6990 return target;
6991 break;
6992
6993 default:
6994 break;
6995 }
6996
6997 /* The switch statement above can drop through to cause the function
6998 to be called normally. */
6999 return expand_call (exp, target, ignore);
7000 }
7001
7002 /* Determine whether a tree node represents a call to a built-in
7003 function. If the tree T is a call to a built-in function with
7004 the right number of arguments of the appropriate types, return
7005 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7006 Otherwise the return value is END_BUILTINS. */
7007
7008 enum built_in_function
7009 builtin_mathfn_code (const_tree t)
7010 {
7011 const_tree fndecl, arg, parmlist;
7012 const_tree argtype, parmtype;
7013 const_call_expr_arg_iterator iter;
7014
7015 if (TREE_CODE (t) != CALL_EXPR
7016 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7017 return END_BUILTINS;
7018
7019 fndecl = get_callee_fndecl (t);
7020 if (fndecl == NULL_TREE
7021 || TREE_CODE (fndecl) != FUNCTION_DECL
7022 || ! DECL_BUILT_IN (fndecl)
7023 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7024 return END_BUILTINS;
7025
7026 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7027 init_const_call_expr_arg_iterator (t, &iter);
7028 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7029 {
7030 /* If a function doesn't take a variable number of arguments,
7031 the last element in the list will have type `void'. */
7032 parmtype = TREE_VALUE (parmlist);
7033 if (VOID_TYPE_P (parmtype))
7034 {
7035 if (more_const_call_expr_args_p (&iter))
7036 return END_BUILTINS;
7037 return DECL_FUNCTION_CODE (fndecl);
7038 }
7039
7040 if (! more_const_call_expr_args_p (&iter))
7041 return END_BUILTINS;
7042
7043 arg = next_const_call_expr_arg (&iter);
7044 argtype = TREE_TYPE (arg);
7045
7046 if (SCALAR_FLOAT_TYPE_P (parmtype))
7047 {
7048 if (! SCALAR_FLOAT_TYPE_P (argtype))
7049 return END_BUILTINS;
7050 }
7051 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7052 {
7053 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7054 return END_BUILTINS;
7055 }
7056 else if (POINTER_TYPE_P (parmtype))
7057 {
7058 if (! POINTER_TYPE_P (argtype))
7059 return END_BUILTINS;
7060 }
7061 else if (INTEGRAL_TYPE_P (parmtype))
7062 {
7063 if (! INTEGRAL_TYPE_P (argtype))
7064 return END_BUILTINS;
7065 }
7066 else
7067 return END_BUILTINS;
7068 }
7069
7070 /* Variable-length argument list. */
7071 return DECL_FUNCTION_CODE (fndecl);
7072 }
7073
7074 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7075 evaluate to a constant. */
7076
7077 static tree
7078 fold_builtin_constant_p (tree arg)
7079 {
7080 /* We return 1 for a numeric type that's known to be a constant
7081 value at compile-time or for an aggregate type that's a
7082 literal constant. */
7083 STRIP_NOPS (arg);
7084
7085 /* If we know this is a constant, emit the constant of one. */
7086 if (CONSTANT_CLASS_P (arg)
7087 || (TREE_CODE (arg) == CONSTRUCTOR
7088 && TREE_CONSTANT (arg)))
7089 return integer_one_node;
7090 if (TREE_CODE (arg) == ADDR_EXPR)
7091 {
7092 tree op = TREE_OPERAND (arg, 0);
7093 if (TREE_CODE (op) == STRING_CST
7094 || (TREE_CODE (op) == ARRAY_REF
7095 && integer_zerop (TREE_OPERAND (op, 1))
7096 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7097 return integer_one_node;
7098 }
7099
7100 /* If this expression has side effects, show we don't know it to be a
7101 constant. Likewise if it's a pointer or aggregate type since in
7102 those case we only want literals, since those are only optimized
7103 when generating RTL, not later.
7104 And finally, if we are compiling an initializer, not code, we
7105 need to return a definite result now; there's not going to be any
7106 more optimization done. */
7107 if (TREE_SIDE_EFFECTS (arg)
7108 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7109 || POINTER_TYPE_P (TREE_TYPE (arg))
7110 || cfun == 0
7111 || folding_initializer
7112 || force_folding_builtin_constant_p)
7113 return integer_zero_node;
7114
7115 return NULL_TREE;
7116 }
7117
7118 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7119 return it as a truthvalue. */
7120
7121 static tree
7122 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7123 tree predictor)
7124 {
7125 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7126
7127 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7128 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7129 ret_type = TREE_TYPE (TREE_TYPE (fn));
7130 pred_type = TREE_VALUE (arg_types);
7131 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7132
7133 pred = fold_convert_loc (loc, pred_type, pred);
7134 expected = fold_convert_loc (loc, expected_type, expected);
7135 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7136 predictor);
7137
7138 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7139 build_int_cst (ret_type, 0));
7140 }
7141
7142 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7143 NULL_TREE if no simplification is possible. */
7144
7145 tree
7146 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7147 {
7148 tree inner, fndecl, inner_arg0;
7149 enum tree_code code;
7150
7151 /* Distribute the expected value over short-circuiting operators.
7152 See through the cast from truthvalue_type_node to long. */
7153 inner_arg0 = arg0;
7154 while (CONVERT_EXPR_P (inner_arg0)
7155 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7156 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7157 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7158
7159 /* If this is a builtin_expect within a builtin_expect keep the
7160 inner one. See through a comparison against a constant. It
7161 might have been added to create a thruthvalue. */
7162 inner = inner_arg0;
7163
7164 if (COMPARISON_CLASS_P (inner)
7165 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7166 inner = TREE_OPERAND (inner, 0);
7167
7168 if (TREE_CODE (inner) == CALL_EXPR
7169 && (fndecl = get_callee_fndecl (inner))
7170 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7171 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7172 return arg0;
7173
7174 inner = inner_arg0;
7175 code = TREE_CODE (inner);
7176 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7177 {
7178 tree op0 = TREE_OPERAND (inner, 0);
7179 tree op1 = TREE_OPERAND (inner, 1);
7180
7181 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7182 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7183 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7184
7185 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7186 }
7187
7188 /* If the argument isn't invariant then there's nothing else we can do. */
7189 if (!TREE_CONSTANT (inner_arg0))
7190 return NULL_TREE;
7191
7192 /* If we expect that a comparison against the argument will fold to
7193 a constant return the constant. In practice, this means a true
7194 constant or the address of a non-weak symbol. */
7195 inner = inner_arg0;
7196 STRIP_NOPS (inner);
7197 if (TREE_CODE (inner) == ADDR_EXPR)
7198 {
7199 do
7200 {
7201 inner = TREE_OPERAND (inner, 0);
7202 }
7203 while (TREE_CODE (inner) == COMPONENT_REF
7204 || TREE_CODE (inner) == ARRAY_REF);
7205 if ((TREE_CODE (inner) == VAR_DECL
7206 || TREE_CODE (inner) == FUNCTION_DECL)
7207 && DECL_WEAK (inner))
7208 return NULL_TREE;
7209 }
7210
7211 /* Otherwise, ARG0 already has the proper type for the return value. */
7212 return arg0;
7213 }
7214
7215 /* Fold a call to __builtin_classify_type with argument ARG. */
7216
7217 static tree
7218 fold_builtin_classify_type (tree arg)
7219 {
7220 if (arg == 0)
7221 return build_int_cst (integer_type_node, no_type_class);
7222
7223 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7224 }
7225
7226 /* Fold a call to __builtin_strlen with argument ARG. */
7227
7228 static tree
7229 fold_builtin_strlen (location_t loc, tree type, tree arg)
7230 {
7231 if (!validate_arg (arg, POINTER_TYPE))
7232 return NULL_TREE;
7233 else
7234 {
7235 tree len = c_strlen (arg, 0);
7236
7237 if (len)
7238 return fold_convert_loc (loc, type, len);
7239
7240 return NULL_TREE;
7241 }
7242 }
7243
7244 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7245
7246 static tree
7247 fold_builtin_inf (location_t loc, tree type, int warn)
7248 {
7249 REAL_VALUE_TYPE real;
7250
7251 /* __builtin_inff is intended to be usable to define INFINITY on all
7252 targets. If an infinity is not available, INFINITY expands "to a
7253 positive constant of type float that overflows at translation
7254 time", footnote "In this case, using INFINITY will violate the
7255 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7256 Thus we pedwarn to ensure this constraint violation is
7257 diagnosed. */
7258 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7259 pedwarn (loc, 0, "target format does not support infinity");
7260
7261 real_inf (&real);
7262 return build_real (type, real);
7263 }
7264
7265 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7266 NULL_TREE if no simplification can be made. */
7267
7268 static tree
7269 fold_builtin_sincos (location_t loc,
7270 tree arg0, tree arg1, tree arg2)
7271 {
7272 tree type;
7273 tree fndecl, call = NULL_TREE;
7274
7275 if (!validate_arg (arg0, REAL_TYPE)
7276 || !validate_arg (arg1, POINTER_TYPE)
7277 || !validate_arg (arg2, POINTER_TYPE))
7278 return NULL_TREE;
7279
7280 type = TREE_TYPE (arg0);
7281
7282 /* Calculate the result when the argument is a constant. */
7283 built_in_function fn = mathfn_built_in_2 (type, BUILT_IN_CEXPI);
7284 if (fn == END_BUILTINS)
7285 return NULL_TREE;
7286
7287 /* Canonicalize sincos to cexpi. */
7288 if (TREE_CODE (arg0) == REAL_CST)
7289 {
7290 tree complex_type = build_complex_type (type);
7291 call = fold_const_call (fn, complex_type, arg0);
7292 }
7293 if (!call)
7294 {
7295 if (!targetm.libc_has_function (function_c99_math_complex)
7296 || !builtin_decl_implicit_p (fn))
7297 return NULL_TREE;
7298 fndecl = builtin_decl_explicit (fn);
7299 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7300 call = builtin_save_expr (call);
7301 }
7302
7303 return build2 (COMPOUND_EXPR, void_type_node,
7304 build2 (MODIFY_EXPR, void_type_node,
7305 build_fold_indirect_ref_loc (loc, arg1),
7306 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7307 build2 (MODIFY_EXPR, void_type_node,
7308 build_fold_indirect_ref_loc (loc, arg2),
7309 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7310 }
7311
7312 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7313 arguments to the call, and TYPE is its return type.
7314 Return NULL_TREE if no simplification can be made. */
7315
7316 static tree
7317 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7318 {
7319 if (!validate_arg (arg1, POINTER_TYPE)
7320 || !validate_arg (arg2, INTEGER_TYPE)
7321 || !validate_arg (len, INTEGER_TYPE))
7322 return NULL_TREE;
7323 else
7324 {
7325 const char *p1;
7326
7327 if (TREE_CODE (arg2) != INTEGER_CST
7328 || !tree_fits_uhwi_p (len))
7329 return NULL_TREE;
7330
7331 p1 = c_getstr (arg1);
7332 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7333 {
7334 char c;
7335 const char *r;
7336 tree tem;
7337
7338 if (target_char_cast (arg2, &c))
7339 return NULL_TREE;
7340
7341 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7342
7343 if (r == NULL)
7344 return build_int_cst (TREE_TYPE (arg1), 0);
7345
7346 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7347 return fold_convert_loc (loc, type, tem);
7348 }
7349 return NULL_TREE;
7350 }
7351 }
7352
7353 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7354 Return NULL_TREE if no simplification can be made. */
7355
7356 static tree
7357 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7358 {
7359 if (!validate_arg (arg1, POINTER_TYPE)
7360 || !validate_arg (arg2, POINTER_TYPE)
7361 || !validate_arg (len, INTEGER_TYPE))
7362 return NULL_TREE;
7363
7364 /* If the LEN parameter is zero, return zero. */
7365 if (integer_zerop (len))
7366 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7367 arg1, arg2);
7368
7369 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7370 if (operand_equal_p (arg1, arg2, 0))
7371 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7372
7373 /* If len parameter is one, return an expression corresponding to
7374 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7375 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7376 {
7377 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7378 tree cst_uchar_ptr_node
7379 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7380
7381 tree ind1
7382 = fold_convert_loc (loc, integer_type_node,
7383 build1 (INDIRECT_REF, cst_uchar_node,
7384 fold_convert_loc (loc,
7385 cst_uchar_ptr_node,
7386 arg1)));
7387 tree ind2
7388 = fold_convert_loc (loc, integer_type_node,
7389 build1 (INDIRECT_REF, cst_uchar_node,
7390 fold_convert_loc (loc,
7391 cst_uchar_ptr_node,
7392 arg2)));
7393 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7394 }
7395
7396 return NULL_TREE;
7397 }
7398
7399 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7400 Return NULL_TREE if no simplification can be made. */
7401
7402 static tree
7403 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7404 {
7405 if (!validate_arg (arg1, POINTER_TYPE)
7406 || !validate_arg (arg2, POINTER_TYPE))
7407 return NULL_TREE;
7408
7409 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7410 if (operand_equal_p (arg1, arg2, 0))
7411 return integer_zero_node;
7412
7413 /* If the second arg is "", return *(const unsigned char*)arg1. */
7414 const char *p2 = c_getstr (arg2);
7415 if (p2 && *p2 == '\0')
7416 {
7417 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7418 tree cst_uchar_ptr_node
7419 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7420
7421 return fold_convert_loc (loc, integer_type_node,
7422 build1 (INDIRECT_REF, cst_uchar_node,
7423 fold_convert_loc (loc,
7424 cst_uchar_ptr_node,
7425 arg1)));
7426 }
7427
7428 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7429 const char *p1 = c_getstr (arg1);
7430 if (p1 && *p1 == '\0')
7431 {
7432 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7433 tree cst_uchar_ptr_node
7434 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7435
7436 tree temp
7437 = fold_convert_loc (loc, integer_type_node,
7438 build1 (INDIRECT_REF, cst_uchar_node,
7439 fold_convert_loc (loc,
7440 cst_uchar_ptr_node,
7441 arg2)));
7442 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7443 }
7444
7445 return NULL_TREE;
7446 }
7447
7448 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7449 Return NULL_TREE if no simplification can be made. */
7450
7451 static tree
7452 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7453 {
7454 if (!validate_arg (arg1, POINTER_TYPE)
7455 || !validate_arg (arg2, POINTER_TYPE)
7456 || !validate_arg (len, INTEGER_TYPE))
7457 return NULL_TREE;
7458
7459 /* If the LEN parameter is zero, return zero. */
7460 if (integer_zerop (len))
7461 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7462 arg1, arg2);
7463
7464 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7465 if (operand_equal_p (arg1, arg2, 0))
7466 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7467
7468 /* If the second arg is "", and the length is greater than zero,
7469 return *(const unsigned char*)arg1. */
7470 const char *p2 = c_getstr (arg2);
7471 if (p2 && *p2 == '\0'
7472 && TREE_CODE (len) == INTEGER_CST
7473 && tree_int_cst_sgn (len) == 1)
7474 {
7475 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7476 tree cst_uchar_ptr_node
7477 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7478
7479 return fold_convert_loc (loc, integer_type_node,
7480 build1 (INDIRECT_REF, cst_uchar_node,
7481 fold_convert_loc (loc,
7482 cst_uchar_ptr_node,
7483 arg1)));
7484 }
7485
7486 /* If the first arg is "", and the length is greater than zero,
7487 return -*(const unsigned char*)arg2. */
7488 const char *p1 = c_getstr (arg1);
7489 if (p1 && *p1 == '\0'
7490 && TREE_CODE (len) == INTEGER_CST
7491 && tree_int_cst_sgn (len) == 1)
7492 {
7493 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7494 tree cst_uchar_ptr_node
7495 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7496
7497 tree temp = fold_convert_loc (loc, integer_type_node,
7498 build1 (INDIRECT_REF, cst_uchar_node,
7499 fold_convert_loc (loc,
7500 cst_uchar_ptr_node,
7501 arg2)));
7502 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7503 }
7504
7505 /* If len parameter is one, return an expression corresponding to
7506 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7507 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7508 {
7509 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7510 tree cst_uchar_ptr_node
7511 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7512
7513 tree ind1 = fold_convert_loc (loc, integer_type_node,
7514 build1 (INDIRECT_REF, cst_uchar_node,
7515 fold_convert_loc (loc,
7516 cst_uchar_ptr_node,
7517 arg1)));
7518 tree ind2 = fold_convert_loc (loc, integer_type_node,
7519 build1 (INDIRECT_REF, cst_uchar_node,
7520 fold_convert_loc (loc,
7521 cst_uchar_ptr_node,
7522 arg2)));
7523 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7524 }
7525
7526 return NULL_TREE;
7527 }
7528
7529 /* Fold a call to builtin isascii with argument ARG. */
7530
7531 static tree
7532 fold_builtin_isascii (location_t loc, tree arg)
7533 {
7534 if (!validate_arg (arg, INTEGER_TYPE))
7535 return NULL_TREE;
7536 else
7537 {
7538 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7539 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7540 build_int_cst (integer_type_node,
7541 ~ (unsigned HOST_WIDE_INT) 0x7f));
7542 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7543 arg, integer_zero_node);
7544 }
7545 }
7546
7547 /* Fold a call to builtin toascii with argument ARG. */
7548
7549 static tree
7550 fold_builtin_toascii (location_t loc, tree arg)
7551 {
7552 if (!validate_arg (arg, INTEGER_TYPE))
7553 return NULL_TREE;
7554
7555 /* Transform toascii(c) -> (c & 0x7f). */
7556 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7557 build_int_cst (integer_type_node, 0x7f));
7558 }
7559
7560 /* Fold a call to builtin isdigit with argument ARG. */
7561
7562 static tree
7563 fold_builtin_isdigit (location_t loc, tree arg)
7564 {
7565 if (!validate_arg (arg, INTEGER_TYPE))
7566 return NULL_TREE;
7567 else
7568 {
7569 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7570 /* According to the C standard, isdigit is unaffected by locale.
7571 However, it definitely is affected by the target character set. */
7572 unsigned HOST_WIDE_INT target_digit0
7573 = lang_hooks.to_target_charset ('0');
7574
7575 if (target_digit0 == 0)
7576 return NULL_TREE;
7577
7578 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7579 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7580 build_int_cst (unsigned_type_node, target_digit0));
7581 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7582 build_int_cst (unsigned_type_node, 9));
7583 }
7584 }
7585
7586 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7587
7588 static tree
7589 fold_builtin_fabs (location_t loc, tree arg, tree type)
7590 {
7591 if (!validate_arg (arg, REAL_TYPE))
7592 return NULL_TREE;
7593
7594 arg = fold_convert_loc (loc, type, arg);
7595 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7596 }
7597
7598 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7599
7600 static tree
7601 fold_builtin_abs (location_t loc, tree arg, tree type)
7602 {
7603 if (!validate_arg (arg, INTEGER_TYPE))
7604 return NULL_TREE;
7605
7606 arg = fold_convert_loc (loc, type, arg);
7607 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7608 }
7609
7610 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7611
7612 static tree
7613 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7614 {
7615 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7616 if (validate_arg (arg0, REAL_TYPE)
7617 && validate_arg (arg1, REAL_TYPE)
7618 && validate_arg (arg2, REAL_TYPE)
7619 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7620 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7621
7622 return NULL_TREE;
7623 }
7624
7625 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7626
7627 static tree
7628 fold_builtin_carg (location_t loc, tree arg, tree type)
7629 {
7630 if (validate_arg (arg, COMPLEX_TYPE)
7631 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7632 {
7633 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7634
7635 if (atan2_fn)
7636 {
7637 tree new_arg = builtin_save_expr (arg);
7638 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7639 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7640 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7641 }
7642 }
7643
7644 return NULL_TREE;
7645 }
7646
7647 /* Fold a call to builtin frexp, we can assume the base is 2. */
7648
7649 static tree
7650 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7651 {
7652 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7653 return NULL_TREE;
7654
7655 STRIP_NOPS (arg0);
7656
7657 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7658 return NULL_TREE;
7659
7660 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7661
7662 /* Proceed if a valid pointer type was passed in. */
7663 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7664 {
7665 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7666 tree frac, exp;
7667
7668 switch (value->cl)
7669 {
7670 case rvc_zero:
7671 /* For +-0, return (*exp = 0, +-0). */
7672 exp = integer_zero_node;
7673 frac = arg0;
7674 break;
7675 case rvc_nan:
7676 case rvc_inf:
7677 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7678 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7679 case rvc_normal:
7680 {
7681 /* Since the frexp function always expects base 2, and in
7682 GCC normalized significands are already in the range
7683 [0.5, 1.0), we have exactly what frexp wants. */
7684 REAL_VALUE_TYPE frac_rvt = *value;
7685 SET_REAL_EXP (&frac_rvt, 0);
7686 frac = build_real (rettype, frac_rvt);
7687 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7688 }
7689 break;
7690 default:
7691 gcc_unreachable ();
7692 }
7693
7694 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7695 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7696 TREE_SIDE_EFFECTS (arg1) = 1;
7697 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7698 }
7699
7700 return NULL_TREE;
7701 }
7702
7703 /* Fold a call to builtin modf. */
7704
7705 static tree
7706 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7707 {
7708 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7709 return NULL_TREE;
7710
7711 STRIP_NOPS (arg0);
7712
7713 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7714 return NULL_TREE;
7715
7716 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7717
7718 /* Proceed if a valid pointer type was passed in. */
7719 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7720 {
7721 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7722 REAL_VALUE_TYPE trunc, frac;
7723
7724 switch (value->cl)
7725 {
7726 case rvc_nan:
7727 case rvc_zero:
7728 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7729 trunc = frac = *value;
7730 break;
7731 case rvc_inf:
7732 /* For +-Inf, return (*arg1 = arg0, +-0). */
7733 frac = dconst0;
7734 frac.sign = value->sign;
7735 trunc = *value;
7736 break;
7737 case rvc_normal:
7738 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7739 real_trunc (&trunc, VOIDmode, value);
7740 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7741 /* If the original number was negative and already
7742 integral, then the fractional part is -0.0. */
7743 if (value->sign && frac.cl == rvc_zero)
7744 frac.sign = value->sign;
7745 break;
7746 }
7747
7748 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7749 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7750 build_real (rettype, trunc));
7751 TREE_SIDE_EFFECTS (arg1) = 1;
7752 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7753 build_real (rettype, frac));
7754 }
7755
7756 return NULL_TREE;
7757 }
7758
7759 /* Given a location LOC, an interclass builtin function decl FNDECL
7760 and its single argument ARG, return an folded expression computing
7761 the same, or NULL_TREE if we either couldn't or didn't want to fold
7762 (the latter happen if there's an RTL instruction available). */
7763
7764 static tree
7765 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7766 {
7767 machine_mode mode;
7768
7769 if (!validate_arg (arg, REAL_TYPE))
7770 return NULL_TREE;
7771
7772 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7773 return NULL_TREE;
7774
7775 mode = TYPE_MODE (TREE_TYPE (arg));
7776
7777 /* If there is no optab, try generic code. */
7778 switch (DECL_FUNCTION_CODE (fndecl))
7779 {
7780 tree result;
7781
7782 CASE_FLT_FN (BUILT_IN_ISINF):
7783 {
7784 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7785 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7786 tree const type = TREE_TYPE (arg);
7787 REAL_VALUE_TYPE r;
7788 char buf[128];
7789
7790 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7791 real_from_string (&r, buf);
7792 result = build_call_expr (isgr_fn, 2,
7793 fold_build1_loc (loc, ABS_EXPR, type, arg),
7794 build_real (type, r));
7795 return result;
7796 }
7797 CASE_FLT_FN (BUILT_IN_FINITE):
7798 case BUILT_IN_ISFINITE:
7799 {
7800 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7801 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7802 tree const type = TREE_TYPE (arg);
7803 REAL_VALUE_TYPE r;
7804 char buf[128];
7805
7806 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7807 real_from_string (&r, buf);
7808 result = build_call_expr (isle_fn, 2,
7809 fold_build1_loc (loc, ABS_EXPR, type, arg),
7810 build_real (type, r));
7811 /*result = fold_build2_loc (loc, UNGT_EXPR,
7812 TREE_TYPE (TREE_TYPE (fndecl)),
7813 fold_build1_loc (loc, ABS_EXPR, type, arg),
7814 build_real (type, r));
7815 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7816 TREE_TYPE (TREE_TYPE (fndecl)),
7817 result);*/
7818 return result;
7819 }
7820 case BUILT_IN_ISNORMAL:
7821 {
7822 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7823 islessequal(fabs(x),DBL_MAX). */
7824 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7825 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7826 tree const type = TREE_TYPE (arg);
7827 REAL_VALUE_TYPE rmax, rmin;
7828 char buf[128];
7829
7830 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7831 real_from_string (&rmax, buf);
7832 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7833 real_from_string (&rmin, buf);
7834 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7835 result = build_call_expr (isle_fn, 2, arg,
7836 build_real (type, rmax));
7837 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
7838 build_call_expr (isge_fn, 2, arg,
7839 build_real (type, rmin)));
7840 return result;
7841 }
7842 default:
7843 break;
7844 }
7845
7846 return NULL_TREE;
7847 }
7848
7849 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7850 ARG is the argument for the call. */
7851
7852 static tree
7853 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7854 {
7855 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7856
7857 if (!validate_arg (arg, REAL_TYPE))
7858 return NULL_TREE;
7859
7860 switch (builtin_index)
7861 {
7862 case BUILT_IN_ISINF:
7863 if (!HONOR_INFINITIES (arg))
7864 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7865
7866 return NULL_TREE;
7867
7868 case BUILT_IN_ISINF_SIGN:
7869 {
7870 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7871 /* In a boolean context, GCC will fold the inner COND_EXPR to
7872 1. So e.g. "if (isinf_sign(x))" would be folded to just
7873 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7874 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
7875 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7876 tree tmp = NULL_TREE;
7877
7878 arg = builtin_save_expr (arg);
7879
7880 if (signbit_fn && isinf_fn)
7881 {
7882 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7883 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7884
7885 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7886 signbit_call, integer_zero_node);
7887 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7888 isinf_call, integer_zero_node);
7889
7890 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7891 integer_minus_one_node, integer_one_node);
7892 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7893 isinf_call, tmp,
7894 integer_zero_node);
7895 }
7896
7897 return tmp;
7898 }
7899
7900 case BUILT_IN_ISFINITE:
7901 if (!HONOR_NANS (arg)
7902 && !HONOR_INFINITIES (arg))
7903 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7904
7905 return NULL_TREE;
7906
7907 case BUILT_IN_ISNAN:
7908 if (!HONOR_NANS (arg))
7909 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7910
7911 arg = builtin_save_expr (arg);
7912 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7913
7914 default:
7915 gcc_unreachable ();
7916 }
7917 }
7918
7919 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7920 This builtin will generate code to return the appropriate floating
7921 point classification depending on the value of the floating point
7922 number passed in. The possible return values must be supplied as
7923 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7924 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7925 one floating point argument which is "type generic". */
7926
7927 static tree
7928 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7929 {
7930 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7931 arg, type, res, tmp;
7932 machine_mode mode;
7933 REAL_VALUE_TYPE r;
7934 char buf[128];
7935
7936 /* Verify the required arguments in the original call. */
7937 if (nargs != 6
7938 || !validate_arg (args[0], INTEGER_TYPE)
7939 || !validate_arg (args[1], INTEGER_TYPE)
7940 || !validate_arg (args[2], INTEGER_TYPE)
7941 || !validate_arg (args[3], INTEGER_TYPE)
7942 || !validate_arg (args[4], INTEGER_TYPE)
7943 || !validate_arg (args[5], REAL_TYPE))
7944 return NULL_TREE;
7945
7946 fp_nan = args[0];
7947 fp_infinite = args[1];
7948 fp_normal = args[2];
7949 fp_subnormal = args[3];
7950 fp_zero = args[4];
7951 arg = args[5];
7952 type = TREE_TYPE (arg);
7953 mode = TYPE_MODE (type);
7954 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7955
7956 /* fpclassify(x) ->
7957 isnan(x) ? FP_NAN :
7958 (fabs(x) == Inf ? FP_INFINITE :
7959 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7960 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7961
7962 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7963 build_real (type, dconst0));
7964 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7965 tmp, fp_zero, fp_subnormal);
7966
7967 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7968 real_from_string (&r, buf);
7969 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7970 arg, build_real (type, r));
7971 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7972
7973 if (HONOR_INFINITIES (mode))
7974 {
7975 real_inf (&r);
7976 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7977 build_real (type, r));
7978 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7979 fp_infinite, res);
7980 }
7981
7982 if (HONOR_NANS (mode))
7983 {
7984 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7985 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7986 }
7987
7988 return res;
7989 }
7990
7991 /* Fold a call to an unordered comparison function such as
7992 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
7993 being called and ARG0 and ARG1 are the arguments for the call.
7994 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7995 the opposite of the desired result. UNORDERED_CODE is used
7996 for modes that can hold NaNs and ORDERED_CODE is used for
7997 the rest. */
7998
7999 static tree
8000 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8001 enum tree_code unordered_code,
8002 enum tree_code ordered_code)
8003 {
8004 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8005 enum tree_code code;
8006 tree type0, type1;
8007 enum tree_code code0, code1;
8008 tree cmp_type = NULL_TREE;
8009
8010 type0 = TREE_TYPE (arg0);
8011 type1 = TREE_TYPE (arg1);
8012
8013 code0 = TREE_CODE (type0);
8014 code1 = TREE_CODE (type1);
8015
8016 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8017 /* Choose the wider of two real types. */
8018 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8019 ? type0 : type1;
8020 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8021 cmp_type = type0;
8022 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8023 cmp_type = type1;
8024
8025 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8026 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8027
8028 if (unordered_code == UNORDERED_EXPR)
8029 {
8030 if (!HONOR_NANS (arg0))
8031 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8032 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8033 }
8034
8035 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8036 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8037 fold_build2_loc (loc, code, type, arg0, arg1));
8038 }
8039
8040 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8041 arithmetics if it can never overflow, or into internal functions that
8042 return both result of arithmetics and overflowed boolean flag in
8043 a complex integer result, or some other check for overflow. */
8044
8045 static tree
8046 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8047 tree arg0, tree arg1, tree arg2)
8048 {
8049 enum internal_fn ifn = IFN_LAST;
8050 tree type = TREE_TYPE (TREE_TYPE (arg2));
8051 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8052 switch (fcode)
8053 {
8054 case BUILT_IN_ADD_OVERFLOW:
8055 case BUILT_IN_SADD_OVERFLOW:
8056 case BUILT_IN_SADDL_OVERFLOW:
8057 case BUILT_IN_SADDLL_OVERFLOW:
8058 case BUILT_IN_UADD_OVERFLOW:
8059 case BUILT_IN_UADDL_OVERFLOW:
8060 case BUILT_IN_UADDLL_OVERFLOW:
8061 ifn = IFN_ADD_OVERFLOW;
8062 break;
8063 case BUILT_IN_SUB_OVERFLOW:
8064 case BUILT_IN_SSUB_OVERFLOW:
8065 case BUILT_IN_SSUBL_OVERFLOW:
8066 case BUILT_IN_SSUBLL_OVERFLOW:
8067 case BUILT_IN_USUB_OVERFLOW:
8068 case BUILT_IN_USUBL_OVERFLOW:
8069 case BUILT_IN_USUBLL_OVERFLOW:
8070 ifn = IFN_SUB_OVERFLOW;
8071 break;
8072 case BUILT_IN_MUL_OVERFLOW:
8073 case BUILT_IN_SMUL_OVERFLOW:
8074 case BUILT_IN_SMULL_OVERFLOW:
8075 case BUILT_IN_SMULLL_OVERFLOW:
8076 case BUILT_IN_UMUL_OVERFLOW:
8077 case BUILT_IN_UMULL_OVERFLOW:
8078 case BUILT_IN_UMULLL_OVERFLOW:
8079 ifn = IFN_MUL_OVERFLOW;
8080 break;
8081 default:
8082 gcc_unreachable ();
8083 }
8084 tree ctype = build_complex_type (type);
8085 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8086 2, arg0, arg1);
8087 tree tgt = save_expr (call);
8088 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8089 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8090 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8091 tree store
8092 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8093 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8094 }
8095
8096 /* Fold a call to built-in function FNDECL with 0 arguments.
8097 This function returns NULL_TREE if no simplification was possible. */
8098
8099 static tree
8100 fold_builtin_0 (location_t loc, tree fndecl)
8101 {
8102 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8103 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8104 switch (fcode)
8105 {
8106 CASE_FLT_FN (BUILT_IN_INF):
8107 case BUILT_IN_INFD32:
8108 case BUILT_IN_INFD64:
8109 case BUILT_IN_INFD128:
8110 return fold_builtin_inf (loc, type, true);
8111
8112 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8113 return fold_builtin_inf (loc, type, false);
8114
8115 case BUILT_IN_CLASSIFY_TYPE:
8116 return fold_builtin_classify_type (NULL_TREE);
8117
8118 default:
8119 break;
8120 }
8121 return NULL_TREE;
8122 }
8123
8124 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8125 This function returns NULL_TREE if no simplification was possible. */
8126
8127 static tree
8128 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8129 {
8130 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8131 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8132
8133 if (TREE_CODE (arg0) == ERROR_MARK)
8134 return NULL_TREE;
8135
8136 if (tree ret = fold_const_call (fcode, type, arg0))
8137 return ret;
8138
8139 switch (fcode)
8140 {
8141 case BUILT_IN_CONSTANT_P:
8142 {
8143 tree val = fold_builtin_constant_p (arg0);
8144
8145 /* Gimplification will pull the CALL_EXPR for the builtin out of
8146 an if condition. When not optimizing, we'll not CSE it back.
8147 To avoid link error types of regressions, return false now. */
8148 if (!val && !optimize)
8149 val = integer_zero_node;
8150
8151 return val;
8152 }
8153
8154 case BUILT_IN_CLASSIFY_TYPE:
8155 return fold_builtin_classify_type (arg0);
8156
8157 case BUILT_IN_STRLEN:
8158 return fold_builtin_strlen (loc, type, arg0);
8159
8160 CASE_FLT_FN (BUILT_IN_FABS):
8161 case BUILT_IN_FABSD32:
8162 case BUILT_IN_FABSD64:
8163 case BUILT_IN_FABSD128:
8164 return fold_builtin_fabs (loc, arg0, type);
8165
8166 case BUILT_IN_ABS:
8167 case BUILT_IN_LABS:
8168 case BUILT_IN_LLABS:
8169 case BUILT_IN_IMAXABS:
8170 return fold_builtin_abs (loc, arg0, type);
8171
8172 CASE_FLT_FN (BUILT_IN_CONJ):
8173 if (validate_arg (arg0, COMPLEX_TYPE)
8174 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8175 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8176 break;
8177
8178 CASE_FLT_FN (BUILT_IN_CREAL):
8179 if (validate_arg (arg0, COMPLEX_TYPE)
8180 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8181 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8182 break;
8183
8184 CASE_FLT_FN (BUILT_IN_CIMAG):
8185 if (validate_arg (arg0, COMPLEX_TYPE)
8186 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8187 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8188 break;
8189
8190 CASE_FLT_FN (BUILT_IN_CARG):
8191 return fold_builtin_carg (loc, arg0, type);
8192
8193 case BUILT_IN_ISASCII:
8194 return fold_builtin_isascii (loc, arg0);
8195
8196 case BUILT_IN_TOASCII:
8197 return fold_builtin_toascii (loc, arg0);
8198
8199 case BUILT_IN_ISDIGIT:
8200 return fold_builtin_isdigit (loc, arg0);
8201
8202 CASE_FLT_FN (BUILT_IN_FINITE):
8203 case BUILT_IN_FINITED32:
8204 case BUILT_IN_FINITED64:
8205 case BUILT_IN_FINITED128:
8206 case BUILT_IN_ISFINITE:
8207 {
8208 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8209 if (ret)
8210 return ret;
8211 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8212 }
8213
8214 CASE_FLT_FN (BUILT_IN_ISINF):
8215 case BUILT_IN_ISINFD32:
8216 case BUILT_IN_ISINFD64:
8217 case BUILT_IN_ISINFD128:
8218 {
8219 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8220 if (ret)
8221 return ret;
8222 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8223 }
8224
8225 case BUILT_IN_ISNORMAL:
8226 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8227
8228 case BUILT_IN_ISINF_SIGN:
8229 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8230
8231 CASE_FLT_FN (BUILT_IN_ISNAN):
8232 case BUILT_IN_ISNAND32:
8233 case BUILT_IN_ISNAND64:
8234 case BUILT_IN_ISNAND128:
8235 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8236
8237 case BUILT_IN_FREE:
8238 if (integer_zerop (arg0))
8239 return build_empty_stmt (loc);
8240 break;
8241
8242 default:
8243 break;
8244 }
8245
8246 return NULL_TREE;
8247
8248 }
8249
8250 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8251 This function returns NULL_TREE if no simplification was possible. */
8252
8253 static tree
8254 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8255 {
8256 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8257 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8258
8259 if (TREE_CODE (arg0) == ERROR_MARK
8260 || TREE_CODE (arg1) == ERROR_MARK)
8261 return NULL_TREE;
8262
8263 if (tree ret = fold_const_call (fcode, type, arg0, arg1))
8264 return ret;
8265
8266 switch (fcode)
8267 {
8268 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8269 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8270 if (validate_arg (arg0, REAL_TYPE)
8271 && validate_arg (arg1, POINTER_TYPE))
8272 return do_mpfr_lgamma_r (arg0, arg1, type);
8273 break;
8274
8275 CASE_FLT_FN (BUILT_IN_FREXP):
8276 return fold_builtin_frexp (loc, arg0, arg1, type);
8277
8278 CASE_FLT_FN (BUILT_IN_MODF):
8279 return fold_builtin_modf (loc, arg0, arg1, type);
8280
8281 case BUILT_IN_STRSTR:
8282 return fold_builtin_strstr (loc, arg0, arg1, type);
8283
8284 case BUILT_IN_STRSPN:
8285 return fold_builtin_strspn (loc, arg0, arg1);
8286
8287 case BUILT_IN_STRCSPN:
8288 return fold_builtin_strcspn (loc, arg0, arg1);
8289
8290 case BUILT_IN_STRCHR:
8291 case BUILT_IN_INDEX:
8292 return fold_builtin_strchr (loc, arg0, arg1, type);
8293
8294 case BUILT_IN_STRRCHR:
8295 case BUILT_IN_RINDEX:
8296 return fold_builtin_strrchr (loc, arg0, arg1, type);
8297
8298 case BUILT_IN_STRCMP:
8299 return fold_builtin_strcmp (loc, arg0, arg1);
8300
8301 case BUILT_IN_STRPBRK:
8302 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8303
8304 case BUILT_IN_EXPECT:
8305 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8306
8307 case BUILT_IN_ISGREATER:
8308 return fold_builtin_unordered_cmp (loc, fndecl,
8309 arg0, arg1, UNLE_EXPR, LE_EXPR);
8310 case BUILT_IN_ISGREATEREQUAL:
8311 return fold_builtin_unordered_cmp (loc, fndecl,
8312 arg0, arg1, UNLT_EXPR, LT_EXPR);
8313 case BUILT_IN_ISLESS:
8314 return fold_builtin_unordered_cmp (loc, fndecl,
8315 arg0, arg1, UNGE_EXPR, GE_EXPR);
8316 case BUILT_IN_ISLESSEQUAL:
8317 return fold_builtin_unordered_cmp (loc, fndecl,
8318 arg0, arg1, UNGT_EXPR, GT_EXPR);
8319 case BUILT_IN_ISLESSGREATER:
8320 return fold_builtin_unordered_cmp (loc, fndecl,
8321 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8322 case BUILT_IN_ISUNORDERED:
8323 return fold_builtin_unordered_cmp (loc, fndecl,
8324 arg0, arg1, UNORDERED_EXPR,
8325 NOP_EXPR);
8326
8327 /* We do the folding for va_start in the expander. */
8328 case BUILT_IN_VA_START:
8329 break;
8330
8331 case BUILT_IN_OBJECT_SIZE:
8332 return fold_builtin_object_size (arg0, arg1);
8333
8334 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8335 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8336
8337 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8338 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8339
8340 default:
8341 break;
8342 }
8343 return NULL_TREE;
8344 }
8345
8346 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8347 and ARG2.
8348 This function returns NULL_TREE if no simplification was possible. */
8349
8350 static tree
8351 fold_builtin_3 (location_t loc, tree fndecl,
8352 tree arg0, tree arg1, tree arg2)
8353 {
8354 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8355 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8356
8357 if (TREE_CODE (arg0) == ERROR_MARK
8358 || TREE_CODE (arg1) == ERROR_MARK
8359 || TREE_CODE (arg2) == ERROR_MARK)
8360 return NULL_TREE;
8361
8362 if (tree ret = fold_const_call (fcode, type, arg0, arg1, arg2))
8363 return ret;
8364
8365 switch (fcode)
8366 {
8367
8368 CASE_FLT_FN (BUILT_IN_SINCOS):
8369 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8370
8371 CASE_FLT_FN (BUILT_IN_FMA):
8372 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8373
8374 CASE_FLT_FN (BUILT_IN_REMQUO):
8375 if (validate_arg (arg0, REAL_TYPE)
8376 && validate_arg (arg1, REAL_TYPE)
8377 && validate_arg (arg2, POINTER_TYPE))
8378 return do_mpfr_remquo (arg0, arg1, arg2);
8379 break;
8380
8381 case BUILT_IN_STRNCMP:
8382 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8383
8384 case BUILT_IN_MEMCHR:
8385 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8386
8387 case BUILT_IN_BCMP:
8388 case BUILT_IN_MEMCMP:
8389 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8390
8391 case BUILT_IN_EXPECT:
8392 return fold_builtin_expect (loc, arg0, arg1, arg2);
8393
8394 case BUILT_IN_ADD_OVERFLOW:
8395 case BUILT_IN_SUB_OVERFLOW:
8396 case BUILT_IN_MUL_OVERFLOW:
8397 case BUILT_IN_SADD_OVERFLOW:
8398 case BUILT_IN_SADDL_OVERFLOW:
8399 case BUILT_IN_SADDLL_OVERFLOW:
8400 case BUILT_IN_SSUB_OVERFLOW:
8401 case BUILT_IN_SSUBL_OVERFLOW:
8402 case BUILT_IN_SSUBLL_OVERFLOW:
8403 case BUILT_IN_SMUL_OVERFLOW:
8404 case BUILT_IN_SMULL_OVERFLOW:
8405 case BUILT_IN_SMULLL_OVERFLOW:
8406 case BUILT_IN_UADD_OVERFLOW:
8407 case BUILT_IN_UADDL_OVERFLOW:
8408 case BUILT_IN_UADDLL_OVERFLOW:
8409 case BUILT_IN_USUB_OVERFLOW:
8410 case BUILT_IN_USUBL_OVERFLOW:
8411 case BUILT_IN_USUBLL_OVERFLOW:
8412 case BUILT_IN_UMUL_OVERFLOW:
8413 case BUILT_IN_UMULL_OVERFLOW:
8414 case BUILT_IN_UMULLL_OVERFLOW:
8415 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8416
8417 default:
8418 break;
8419 }
8420 return NULL_TREE;
8421 }
8422
8423 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8424 arguments. IGNORE is true if the result of the
8425 function call is ignored. This function returns NULL_TREE if no
8426 simplification was possible. */
8427
8428 tree
8429 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8430 {
8431 tree ret = NULL_TREE;
8432
8433 switch (nargs)
8434 {
8435 case 0:
8436 ret = fold_builtin_0 (loc, fndecl);
8437 break;
8438 case 1:
8439 ret = fold_builtin_1 (loc, fndecl, args[0]);
8440 break;
8441 case 2:
8442 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8443 break;
8444 case 3:
8445 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8446 break;
8447 default:
8448 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8449 break;
8450 }
8451 if (ret)
8452 {
8453 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8454 SET_EXPR_LOCATION (ret, loc);
8455 TREE_NO_WARNING (ret) = 1;
8456 return ret;
8457 }
8458 return NULL_TREE;
8459 }
8460
8461 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8462 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8463 of arguments in ARGS to be omitted. OLDNARGS is the number of
8464 elements in ARGS. */
8465
8466 static tree
8467 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8468 int skip, tree fndecl, int n, va_list newargs)
8469 {
8470 int nargs = oldnargs - skip + n;
8471 tree *buffer;
8472
8473 if (n > 0)
8474 {
8475 int i, j;
8476
8477 buffer = XALLOCAVEC (tree, nargs);
8478 for (i = 0; i < n; i++)
8479 buffer[i] = va_arg (newargs, tree);
8480 for (j = skip; j < oldnargs; j++, i++)
8481 buffer[i] = args[j];
8482 }
8483 else
8484 buffer = args + skip;
8485
8486 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8487 }
8488
8489 /* Return true if FNDECL shouldn't be folded right now.
8490 If a built-in function has an inline attribute always_inline
8491 wrapper, defer folding it after always_inline functions have
8492 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8493 might not be performed. */
8494
8495 bool
8496 avoid_folding_inline_builtin (tree fndecl)
8497 {
8498 return (DECL_DECLARED_INLINE_P (fndecl)
8499 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8500 && cfun
8501 && !cfun->always_inline_functions_inlined
8502 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8503 }
8504
8505 /* A wrapper function for builtin folding that prevents warnings for
8506 "statement without effect" and the like, caused by removing the
8507 call node earlier than the warning is generated. */
8508
8509 tree
8510 fold_call_expr (location_t loc, tree exp, bool ignore)
8511 {
8512 tree ret = NULL_TREE;
8513 tree fndecl = get_callee_fndecl (exp);
8514 if (fndecl
8515 && TREE_CODE (fndecl) == FUNCTION_DECL
8516 && DECL_BUILT_IN (fndecl)
8517 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8518 yet. Defer folding until we see all the arguments
8519 (after inlining). */
8520 && !CALL_EXPR_VA_ARG_PACK (exp))
8521 {
8522 int nargs = call_expr_nargs (exp);
8523
8524 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8525 instead last argument is __builtin_va_arg_pack (). Defer folding
8526 even in that case, until arguments are finalized. */
8527 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8528 {
8529 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8530 if (fndecl2
8531 && TREE_CODE (fndecl2) == FUNCTION_DECL
8532 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8533 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8534 return NULL_TREE;
8535 }
8536
8537 if (avoid_folding_inline_builtin (fndecl))
8538 return NULL_TREE;
8539
8540 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8541 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8542 CALL_EXPR_ARGP (exp), ignore);
8543 else
8544 {
8545 tree *args = CALL_EXPR_ARGP (exp);
8546 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8547 if (ret)
8548 return ret;
8549 }
8550 }
8551 return NULL_TREE;
8552 }
8553
8554 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8555 N arguments are passed in the array ARGARRAY. Return a folded
8556 expression or NULL_TREE if no simplification was possible. */
8557
8558 tree
8559 fold_builtin_call_array (location_t loc, tree,
8560 tree fn,
8561 int n,
8562 tree *argarray)
8563 {
8564 if (TREE_CODE (fn) != ADDR_EXPR)
8565 return NULL_TREE;
8566
8567 tree fndecl = TREE_OPERAND (fn, 0);
8568 if (TREE_CODE (fndecl) == FUNCTION_DECL
8569 && DECL_BUILT_IN (fndecl))
8570 {
8571 /* If last argument is __builtin_va_arg_pack (), arguments to this
8572 function are not finalized yet. Defer folding until they are. */
8573 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8574 {
8575 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8576 if (fndecl2
8577 && TREE_CODE (fndecl2) == FUNCTION_DECL
8578 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8579 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8580 return NULL_TREE;
8581 }
8582 if (avoid_folding_inline_builtin (fndecl))
8583 return NULL_TREE;
8584 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8585 return targetm.fold_builtin (fndecl, n, argarray, false);
8586 else
8587 return fold_builtin_n (loc, fndecl, argarray, n, false);
8588 }
8589
8590 return NULL_TREE;
8591 }
8592
8593 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8594 along with N new arguments specified as the "..." parameters. SKIP
8595 is the number of arguments in EXP to be omitted. This function is used
8596 to do varargs-to-varargs transformations. */
8597
8598 static tree
8599 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8600 {
8601 va_list ap;
8602 tree t;
8603
8604 va_start (ap, n);
8605 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8606 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8607 va_end (ap);
8608
8609 return t;
8610 }
8611
8612 /* Validate a single argument ARG against a tree code CODE representing
8613 a type. */
8614
8615 static bool
8616 validate_arg (const_tree arg, enum tree_code code)
8617 {
8618 if (!arg)
8619 return false;
8620 else if (code == POINTER_TYPE)
8621 return POINTER_TYPE_P (TREE_TYPE (arg));
8622 else if (code == INTEGER_TYPE)
8623 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8624 return code == TREE_CODE (TREE_TYPE (arg));
8625 }
8626
8627 /* This function validates the types of a function call argument list
8628 against a specified list of tree_codes. If the last specifier is a 0,
8629 that represents an ellipses, otherwise the last specifier must be a
8630 VOID_TYPE.
8631
8632 This is the GIMPLE version of validate_arglist. Eventually we want to
8633 completely convert builtins.c to work from GIMPLEs and the tree based
8634 validate_arglist will then be removed. */
8635
8636 bool
8637 validate_gimple_arglist (const gcall *call, ...)
8638 {
8639 enum tree_code code;
8640 bool res = 0;
8641 va_list ap;
8642 const_tree arg;
8643 size_t i;
8644
8645 va_start (ap, call);
8646 i = 0;
8647
8648 do
8649 {
8650 code = (enum tree_code) va_arg (ap, int);
8651 switch (code)
8652 {
8653 case 0:
8654 /* This signifies an ellipses, any further arguments are all ok. */
8655 res = true;
8656 goto end;
8657 case VOID_TYPE:
8658 /* This signifies an endlink, if no arguments remain, return
8659 true, otherwise return false. */
8660 res = (i == gimple_call_num_args (call));
8661 goto end;
8662 default:
8663 /* If no parameters remain or the parameter's code does not
8664 match the specified code, return false. Otherwise continue
8665 checking any remaining arguments. */
8666 arg = gimple_call_arg (call, i++);
8667 if (!validate_arg (arg, code))
8668 goto end;
8669 break;
8670 }
8671 }
8672 while (1);
8673
8674 /* We need gotos here since we can only have one VA_CLOSE in a
8675 function. */
8676 end: ;
8677 va_end (ap);
8678
8679 return res;
8680 }
8681
8682 /* Default target-specific builtin expander that does nothing. */
8683
8684 rtx
8685 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8686 rtx target ATTRIBUTE_UNUSED,
8687 rtx subtarget ATTRIBUTE_UNUSED,
8688 machine_mode mode ATTRIBUTE_UNUSED,
8689 int ignore ATTRIBUTE_UNUSED)
8690 {
8691 return NULL_RTX;
8692 }
8693
8694 /* Returns true is EXP represents data that would potentially reside
8695 in a readonly section. */
8696
8697 bool
8698 readonly_data_expr (tree exp)
8699 {
8700 STRIP_NOPS (exp);
8701
8702 if (TREE_CODE (exp) != ADDR_EXPR)
8703 return false;
8704
8705 exp = get_base_address (TREE_OPERAND (exp, 0));
8706 if (!exp)
8707 return false;
8708
8709 /* Make sure we call decl_readonly_section only for trees it
8710 can handle (since it returns true for everything it doesn't
8711 understand). */
8712 if (TREE_CODE (exp) == STRING_CST
8713 || TREE_CODE (exp) == CONSTRUCTOR
8714 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8715 return decl_readonly_section (exp, 0);
8716 else
8717 return false;
8718 }
8719
8720 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8721 to the call, and TYPE is its return type.
8722
8723 Return NULL_TREE if no simplification was possible, otherwise return the
8724 simplified form of the call as a tree.
8725
8726 The simplified form may be a constant or other expression which
8727 computes the same value, but in a more efficient manner (including
8728 calls to other builtin functions).
8729
8730 The call may contain arguments which need to be evaluated, but
8731 which are not useful to determine the result of the call. In
8732 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8733 COMPOUND_EXPR will be an argument which must be evaluated.
8734 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8735 COMPOUND_EXPR in the chain will contain the tree for the simplified
8736 form of the builtin function call. */
8737
8738 static tree
8739 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8740 {
8741 if (!validate_arg (s1, POINTER_TYPE)
8742 || !validate_arg (s2, POINTER_TYPE))
8743 return NULL_TREE;
8744 else
8745 {
8746 tree fn;
8747 const char *p1, *p2;
8748
8749 p2 = c_getstr (s2);
8750 if (p2 == NULL)
8751 return NULL_TREE;
8752
8753 p1 = c_getstr (s1);
8754 if (p1 != NULL)
8755 {
8756 const char *r = strstr (p1, p2);
8757 tree tem;
8758
8759 if (r == NULL)
8760 return build_int_cst (TREE_TYPE (s1), 0);
8761
8762 /* Return an offset into the constant string argument. */
8763 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8764 return fold_convert_loc (loc, type, tem);
8765 }
8766
8767 /* The argument is const char *, and the result is char *, so we need
8768 a type conversion here to avoid a warning. */
8769 if (p2[0] == '\0')
8770 return fold_convert_loc (loc, type, s1);
8771
8772 if (p2[1] != '\0')
8773 return NULL_TREE;
8774
8775 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8776 if (!fn)
8777 return NULL_TREE;
8778
8779 /* New argument list transforming strstr(s1, s2) to
8780 strchr(s1, s2[0]). */
8781 return build_call_expr_loc (loc, fn, 2, s1,
8782 build_int_cst (integer_type_node, p2[0]));
8783 }
8784 }
8785
8786 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8787 the call, and TYPE is its return type.
8788
8789 Return NULL_TREE if no simplification was possible, otherwise return the
8790 simplified form of the call as a tree.
8791
8792 The simplified form may be a constant or other expression which
8793 computes the same value, but in a more efficient manner (including
8794 calls to other builtin functions).
8795
8796 The call may contain arguments which need to be evaluated, but
8797 which are not useful to determine the result of the call. In
8798 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8799 COMPOUND_EXPR will be an argument which must be evaluated.
8800 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8801 COMPOUND_EXPR in the chain will contain the tree for the simplified
8802 form of the builtin function call. */
8803
8804 static tree
8805 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8806 {
8807 if (!validate_arg (s1, POINTER_TYPE)
8808 || !validate_arg (s2, INTEGER_TYPE))
8809 return NULL_TREE;
8810 else
8811 {
8812 const char *p1;
8813
8814 if (TREE_CODE (s2) != INTEGER_CST)
8815 return NULL_TREE;
8816
8817 p1 = c_getstr (s1);
8818 if (p1 != NULL)
8819 {
8820 char c;
8821 const char *r;
8822 tree tem;
8823
8824 if (target_char_cast (s2, &c))
8825 return NULL_TREE;
8826
8827 r = strchr (p1, c);
8828
8829 if (r == NULL)
8830 return build_int_cst (TREE_TYPE (s1), 0);
8831
8832 /* Return an offset into the constant string argument. */
8833 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8834 return fold_convert_loc (loc, type, tem);
8835 }
8836 return NULL_TREE;
8837 }
8838 }
8839
8840 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8841 the call, and TYPE is its return type.
8842
8843 Return NULL_TREE if no simplification was possible, otherwise return the
8844 simplified form of the call as a tree.
8845
8846 The simplified form may be a constant or other expression which
8847 computes the same value, but in a more efficient manner (including
8848 calls to other builtin functions).
8849
8850 The call may contain arguments which need to be evaluated, but
8851 which are not useful to determine the result of the call. In
8852 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8853 COMPOUND_EXPR will be an argument which must be evaluated.
8854 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8855 COMPOUND_EXPR in the chain will contain the tree for the simplified
8856 form of the builtin function call. */
8857
8858 static tree
8859 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8860 {
8861 if (!validate_arg (s1, POINTER_TYPE)
8862 || !validate_arg (s2, INTEGER_TYPE))
8863 return NULL_TREE;
8864 else
8865 {
8866 tree fn;
8867 const char *p1;
8868
8869 if (TREE_CODE (s2) != INTEGER_CST)
8870 return NULL_TREE;
8871
8872 p1 = c_getstr (s1);
8873 if (p1 != NULL)
8874 {
8875 char c;
8876 const char *r;
8877 tree tem;
8878
8879 if (target_char_cast (s2, &c))
8880 return NULL_TREE;
8881
8882 r = strrchr (p1, c);
8883
8884 if (r == NULL)
8885 return build_int_cst (TREE_TYPE (s1), 0);
8886
8887 /* Return an offset into the constant string argument. */
8888 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8889 return fold_convert_loc (loc, type, tem);
8890 }
8891
8892 if (! integer_zerop (s2))
8893 return NULL_TREE;
8894
8895 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8896 if (!fn)
8897 return NULL_TREE;
8898
8899 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
8900 return build_call_expr_loc (loc, fn, 2, s1, s2);
8901 }
8902 }
8903
8904 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8905 to the call, and TYPE is its return type.
8906
8907 Return NULL_TREE if no simplification was possible, otherwise return the
8908 simplified form of the call as a tree.
8909
8910 The simplified form may be a constant or other expression which
8911 computes the same value, but in a more efficient manner (including
8912 calls to other builtin functions).
8913
8914 The call may contain arguments which need to be evaluated, but
8915 which are not useful to determine the result of the call. In
8916 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8917 COMPOUND_EXPR will be an argument which must be evaluated.
8918 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8919 COMPOUND_EXPR in the chain will contain the tree for the simplified
8920 form of the builtin function call. */
8921
8922 static tree
8923 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8924 {
8925 if (!validate_arg (s1, POINTER_TYPE)
8926 || !validate_arg (s2, POINTER_TYPE))
8927 return NULL_TREE;
8928 else
8929 {
8930 tree fn;
8931 const char *p1, *p2;
8932
8933 p2 = c_getstr (s2);
8934 if (p2 == NULL)
8935 return NULL_TREE;
8936
8937 p1 = c_getstr (s1);
8938 if (p1 != NULL)
8939 {
8940 const char *r = strpbrk (p1, p2);
8941 tree tem;
8942
8943 if (r == NULL)
8944 return build_int_cst (TREE_TYPE (s1), 0);
8945
8946 /* Return an offset into the constant string argument. */
8947 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8948 return fold_convert_loc (loc, type, tem);
8949 }
8950
8951 if (p2[0] == '\0')
8952 /* strpbrk(x, "") == NULL.
8953 Evaluate and ignore s1 in case it had side-effects. */
8954 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8955
8956 if (p2[1] != '\0')
8957 return NULL_TREE; /* Really call strpbrk. */
8958
8959 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8960 if (!fn)
8961 return NULL_TREE;
8962
8963 /* New argument list transforming strpbrk(s1, s2) to
8964 strchr(s1, s2[0]). */
8965 return build_call_expr_loc (loc, fn, 2, s1,
8966 build_int_cst (integer_type_node, p2[0]));
8967 }
8968 }
8969
8970 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8971 to the call.
8972
8973 Return NULL_TREE if no simplification was possible, otherwise return the
8974 simplified form of the call as a tree.
8975
8976 The simplified form may be a constant or other expression which
8977 computes the same value, but in a more efficient manner (including
8978 calls to other builtin functions).
8979
8980 The call may contain arguments which need to be evaluated, but
8981 which are not useful to determine the result of the call. In
8982 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8983 COMPOUND_EXPR will be an argument which must be evaluated.
8984 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8985 COMPOUND_EXPR in the chain will contain the tree for the simplified
8986 form of the builtin function call. */
8987
8988 static tree
8989 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8990 {
8991 if (!validate_arg (s1, POINTER_TYPE)
8992 || !validate_arg (s2, POINTER_TYPE))
8993 return NULL_TREE;
8994 else
8995 {
8996 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8997
8998 /* If either argument is "", return NULL_TREE. */
8999 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9000 /* Evaluate and ignore both arguments in case either one has
9001 side-effects. */
9002 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9003 s1, s2);
9004 return NULL_TREE;
9005 }
9006 }
9007
9008 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9009 to the call.
9010
9011 Return NULL_TREE if no simplification was possible, otherwise return the
9012 simplified form of the call as a tree.
9013
9014 The simplified form may be a constant or other expression which
9015 computes the same value, but in a more efficient manner (including
9016 calls to other builtin functions).
9017
9018 The call may contain arguments which need to be evaluated, but
9019 which are not useful to determine the result of the call. In
9020 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9021 COMPOUND_EXPR will be an argument which must be evaluated.
9022 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9023 COMPOUND_EXPR in the chain will contain the tree for the simplified
9024 form of the builtin function call. */
9025
9026 static tree
9027 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9028 {
9029 if (!validate_arg (s1, POINTER_TYPE)
9030 || !validate_arg (s2, POINTER_TYPE))
9031 return NULL_TREE;
9032 else
9033 {
9034 /* If the first argument is "", return NULL_TREE. */
9035 const char *p1 = c_getstr (s1);
9036 if (p1 && *p1 == '\0')
9037 {
9038 /* Evaluate and ignore argument s2 in case it has
9039 side-effects. */
9040 return omit_one_operand_loc (loc, size_type_node,
9041 size_zero_node, s2);
9042 }
9043
9044 /* If the second argument is "", return __builtin_strlen(s1). */
9045 const char *p2 = c_getstr (s2);
9046 if (p2 && *p2 == '\0')
9047 {
9048 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9049
9050 /* If the replacement _DECL isn't initialized, don't do the
9051 transformation. */
9052 if (!fn)
9053 return NULL_TREE;
9054
9055 return build_call_expr_loc (loc, fn, 1, s1);
9056 }
9057 return NULL_TREE;
9058 }
9059 }
9060
9061 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9062 produced. False otherwise. This is done so that we don't output the error
9063 or warning twice or three times. */
9064
9065 bool
9066 fold_builtin_next_arg (tree exp, bool va_start_p)
9067 {
9068 tree fntype = TREE_TYPE (current_function_decl);
9069 int nargs = call_expr_nargs (exp);
9070 tree arg;
9071 /* There is good chance the current input_location points inside the
9072 definition of the va_start macro (perhaps on the token for
9073 builtin) in a system header, so warnings will not be emitted.
9074 Use the location in real source code. */
9075 source_location current_location =
9076 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9077 NULL);
9078
9079 if (!stdarg_p (fntype))
9080 {
9081 error ("%<va_start%> used in function with fixed args");
9082 return true;
9083 }
9084
9085 if (va_start_p)
9086 {
9087 if (va_start_p && (nargs != 2))
9088 {
9089 error ("wrong number of arguments to function %<va_start%>");
9090 return true;
9091 }
9092 arg = CALL_EXPR_ARG (exp, 1);
9093 }
9094 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9095 when we checked the arguments and if needed issued a warning. */
9096 else
9097 {
9098 if (nargs == 0)
9099 {
9100 /* Evidently an out of date version of <stdarg.h>; can't validate
9101 va_start's second argument, but can still work as intended. */
9102 warning_at (current_location,
9103 OPT_Wvarargs,
9104 "%<__builtin_next_arg%> called without an argument");
9105 return true;
9106 }
9107 else if (nargs > 1)
9108 {
9109 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9110 return true;
9111 }
9112 arg = CALL_EXPR_ARG (exp, 0);
9113 }
9114
9115 if (TREE_CODE (arg) == SSA_NAME)
9116 arg = SSA_NAME_VAR (arg);
9117
9118 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9119 or __builtin_next_arg (0) the first time we see it, after checking
9120 the arguments and if needed issuing a warning. */
9121 if (!integer_zerop (arg))
9122 {
9123 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9124
9125 /* Strip off all nops for the sake of the comparison. This
9126 is not quite the same as STRIP_NOPS. It does more.
9127 We must also strip off INDIRECT_EXPR for C++ reference
9128 parameters. */
9129 while (CONVERT_EXPR_P (arg)
9130 || TREE_CODE (arg) == INDIRECT_REF)
9131 arg = TREE_OPERAND (arg, 0);
9132 if (arg != last_parm)
9133 {
9134 /* FIXME: Sometimes with the tree optimizers we can get the
9135 not the last argument even though the user used the last
9136 argument. We just warn and set the arg to be the last
9137 argument so that we will get wrong-code because of
9138 it. */
9139 warning_at (current_location,
9140 OPT_Wvarargs,
9141 "second parameter of %<va_start%> not last named argument");
9142 }
9143
9144 /* Undefined by C99 7.15.1.4p4 (va_start):
9145 "If the parameter parmN is declared with the register storage
9146 class, with a function or array type, or with a type that is
9147 not compatible with the type that results after application of
9148 the default argument promotions, the behavior is undefined."
9149 */
9150 else if (DECL_REGISTER (arg))
9151 {
9152 warning_at (current_location,
9153 OPT_Wvarargs,
9154 "undefined behaviour when second parameter of "
9155 "%<va_start%> is declared with %<register%> storage");
9156 }
9157
9158 /* We want to verify the second parameter just once before the tree
9159 optimizers are run and then avoid keeping it in the tree,
9160 as otherwise we could warn even for correct code like:
9161 void foo (int i, ...)
9162 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9163 if (va_start_p)
9164 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9165 else
9166 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9167 }
9168 return false;
9169 }
9170
9171
9172 /* Expand a call EXP to __builtin_object_size. */
9173
9174 static rtx
9175 expand_builtin_object_size (tree exp)
9176 {
9177 tree ost;
9178 int object_size_type;
9179 tree fndecl = get_callee_fndecl (exp);
9180
9181 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9182 {
9183 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9184 exp, fndecl);
9185 expand_builtin_trap ();
9186 return const0_rtx;
9187 }
9188
9189 ost = CALL_EXPR_ARG (exp, 1);
9190 STRIP_NOPS (ost);
9191
9192 if (TREE_CODE (ost) != INTEGER_CST
9193 || tree_int_cst_sgn (ost) < 0
9194 || compare_tree_int (ost, 3) > 0)
9195 {
9196 error ("%Klast argument of %D is not integer constant between 0 and 3",
9197 exp, fndecl);
9198 expand_builtin_trap ();
9199 return const0_rtx;
9200 }
9201
9202 object_size_type = tree_to_shwi (ost);
9203
9204 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9205 }
9206
9207 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9208 FCODE is the BUILT_IN_* to use.
9209 Return NULL_RTX if we failed; the caller should emit a normal call,
9210 otherwise try to get the result in TARGET, if convenient (and in
9211 mode MODE if that's convenient). */
9212
9213 static rtx
9214 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9215 enum built_in_function fcode)
9216 {
9217 tree dest, src, len, size;
9218
9219 if (!validate_arglist (exp,
9220 POINTER_TYPE,
9221 fcode == BUILT_IN_MEMSET_CHK
9222 ? INTEGER_TYPE : POINTER_TYPE,
9223 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9224 return NULL_RTX;
9225
9226 dest = CALL_EXPR_ARG (exp, 0);
9227 src = CALL_EXPR_ARG (exp, 1);
9228 len = CALL_EXPR_ARG (exp, 2);
9229 size = CALL_EXPR_ARG (exp, 3);
9230
9231 if (! tree_fits_uhwi_p (size))
9232 return NULL_RTX;
9233
9234 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9235 {
9236 tree fn;
9237
9238 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9239 {
9240 warning_at (tree_nonartificial_location (exp),
9241 0, "%Kcall to %D will always overflow destination buffer",
9242 exp, get_callee_fndecl (exp));
9243 return NULL_RTX;
9244 }
9245
9246 fn = NULL_TREE;
9247 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9248 mem{cpy,pcpy,move,set} is available. */
9249 switch (fcode)
9250 {
9251 case BUILT_IN_MEMCPY_CHK:
9252 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9253 break;
9254 case BUILT_IN_MEMPCPY_CHK:
9255 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9256 break;
9257 case BUILT_IN_MEMMOVE_CHK:
9258 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9259 break;
9260 case BUILT_IN_MEMSET_CHK:
9261 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9262 break;
9263 default:
9264 break;
9265 }
9266
9267 if (! fn)
9268 return NULL_RTX;
9269
9270 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9271 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9272 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9273 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9274 }
9275 else if (fcode == BUILT_IN_MEMSET_CHK)
9276 return NULL_RTX;
9277 else
9278 {
9279 unsigned int dest_align = get_pointer_alignment (dest);
9280
9281 /* If DEST is not a pointer type, call the normal function. */
9282 if (dest_align == 0)
9283 return NULL_RTX;
9284
9285 /* If SRC and DEST are the same (and not volatile), do nothing. */
9286 if (operand_equal_p (src, dest, 0))
9287 {
9288 tree expr;
9289
9290 if (fcode != BUILT_IN_MEMPCPY_CHK)
9291 {
9292 /* Evaluate and ignore LEN in case it has side-effects. */
9293 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9294 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9295 }
9296
9297 expr = fold_build_pointer_plus (dest, len);
9298 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9299 }
9300
9301 /* __memmove_chk special case. */
9302 if (fcode == BUILT_IN_MEMMOVE_CHK)
9303 {
9304 unsigned int src_align = get_pointer_alignment (src);
9305
9306 if (src_align == 0)
9307 return NULL_RTX;
9308
9309 /* If src is categorized for a readonly section we can use
9310 normal __memcpy_chk. */
9311 if (readonly_data_expr (src))
9312 {
9313 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9314 if (!fn)
9315 return NULL_RTX;
9316 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9317 dest, src, len, size);
9318 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9319 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9320 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9321 }
9322 }
9323 return NULL_RTX;
9324 }
9325 }
9326
9327 /* Emit warning if a buffer overflow is detected at compile time. */
9328
9329 static void
9330 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9331 {
9332 int is_strlen = 0;
9333 tree len, size;
9334 location_t loc = tree_nonartificial_location (exp);
9335
9336 switch (fcode)
9337 {
9338 case BUILT_IN_STRCPY_CHK:
9339 case BUILT_IN_STPCPY_CHK:
9340 /* For __strcat_chk the warning will be emitted only if overflowing
9341 by at least strlen (dest) + 1 bytes. */
9342 case BUILT_IN_STRCAT_CHK:
9343 len = CALL_EXPR_ARG (exp, 1);
9344 size = CALL_EXPR_ARG (exp, 2);
9345 is_strlen = 1;
9346 break;
9347 case BUILT_IN_STRNCAT_CHK:
9348 case BUILT_IN_STRNCPY_CHK:
9349 case BUILT_IN_STPNCPY_CHK:
9350 len = CALL_EXPR_ARG (exp, 2);
9351 size = CALL_EXPR_ARG (exp, 3);
9352 break;
9353 case BUILT_IN_SNPRINTF_CHK:
9354 case BUILT_IN_VSNPRINTF_CHK:
9355 len = CALL_EXPR_ARG (exp, 1);
9356 size = CALL_EXPR_ARG (exp, 3);
9357 break;
9358 default:
9359 gcc_unreachable ();
9360 }
9361
9362 if (!len || !size)
9363 return;
9364
9365 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9366 return;
9367
9368 if (is_strlen)
9369 {
9370 len = c_strlen (len, 1);
9371 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9372 return;
9373 }
9374 else if (fcode == BUILT_IN_STRNCAT_CHK)
9375 {
9376 tree src = CALL_EXPR_ARG (exp, 1);
9377 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9378 return;
9379 src = c_strlen (src, 1);
9380 if (! src || ! tree_fits_uhwi_p (src))
9381 {
9382 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9383 exp, get_callee_fndecl (exp));
9384 return;
9385 }
9386 else if (tree_int_cst_lt (src, size))
9387 return;
9388 }
9389 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9390 return;
9391
9392 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9393 exp, get_callee_fndecl (exp));
9394 }
9395
9396 /* Emit warning if a buffer overflow is detected at compile time
9397 in __sprintf_chk/__vsprintf_chk calls. */
9398
9399 static void
9400 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9401 {
9402 tree size, len, fmt;
9403 const char *fmt_str;
9404 int nargs = call_expr_nargs (exp);
9405
9406 /* Verify the required arguments in the original call. */
9407
9408 if (nargs < 4)
9409 return;
9410 size = CALL_EXPR_ARG (exp, 2);
9411 fmt = CALL_EXPR_ARG (exp, 3);
9412
9413 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9414 return;
9415
9416 /* Check whether the format is a literal string constant. */
9417 fmt_str = c_getstr (fmt);
9418 if (fmt_str == NULL)
9419 return;
9420
9421 if (!init_target_chars ())
9422 return;
9423
9424 /* If the format doesn't contain % args or %%, we know its size. */
9425 if (strchr (fmt_str, target_percent) == 0)
9426 len = build_int_cstu (size_type_node, strlen (fmt_str));
9427 /* If the format is "%s" and first ... argument is a string literal,
9428 we know it too. */
9429 else if (fcode == BUILT_IN_SPRINTF_CHK
9430 && strcmp (fmt_str, target_percent_s) == 0)
9431 {
9432 tree arg;
9433
9434 if (nargs < 5)
9435 return;
9436 arg = CALL_EXPR_ARG (exp, 4);
9437 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9438 return;
9439
9440 len = c_strlen (arg, 1);
9441 if (!len || ! tree_fits_uhwi_p (len))
9442 return;
9443 }
9444 else
9445 return;
9446
9447 if (! tree_int_cst_lt (len, size))
9448 warning_at (tree_nonartificial_location (exp),
9449 0, "%Kcall to %D will always overflow destination buffer",
9450 exp, get_callee_fndecl (exp));
9451 }
9452
9453 /* Emit warning if a free is called with address of a variable. */
9454
9455 static void
9456 maybe_emit_free_warning (tree exp)
9457 {
9458 tree arg = CALL_EXPR_ARG (exp, 0);
9459
9460 STRIP_NOPS (arg);
9461 if (TREE_CODE (arg) != ADDR_EXPR)
9462 return;
9463
9464 arg = get_base_address (TREE_OPERAND (arg, 0));
9465 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9466 return;
9467
9468 if (SSA_VAR_P (arg))
9469 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9470 "%Kattempt to free a non-heap object %qD", exp, arg);
9471 else
9472 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9473 "%Kattempt to free a non-heap object", exp);
9474 }
9475
9476 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9477 if possible. */
9478
9479 static tree
9480 fold_builtin_object_size (tree ptr, tree ost)
9481 {
9482 unsigned HOST_WIDE_INT bytes;
9483 int object_size_type;
9484
9485 if (!validate_arg (ptr, POINTER_TYPE)
9486 || !validate_arg (ost, INTEGER_TYPE))
9487 return NULL_TREE;
9488
9489 STRIP_NOPS (ost);
9490
9491 if (TREE_CODE (ost) != INTEGER_CST
9492 || tree_int_cst_sgn (ost) < 0
9493 || compare_tree_int (ost, 3) > 0)
9494 return NULL_TREE;
9495
9496 object_size_type = tree_to_shwi (ost);
9497
9498 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9499 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9500 and (size_t) 0 for types 2 and 3. */
9501 if (TREE_SIDE_EFFECTS (ptr))
9502 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9503
9504 if (TREE_CODE (ptr) == ADDR_EXPR)
9505 {
9506 bytes = compute_builtin_object_size (ptr, object_size_type);
9507 if (wi::fits_to_tree_p (bytes, size_type_node))
9508 return build_int_cstu (size_type_node, bytes);
9509 }
9510 else if (TREE_CODE (ptr) == SSA_NAME)
9511 {
9512 /* If object size is not known yet, delay folding until
9513 later. Maybe subsequent passes will help determining
9514 it. */
9515 bytes = compute_builtin_object_size (ptr, object_size_type);
9516 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
9517 && wi::fits_to_tree_p (bytes, size_type_node))
9518 return build_int_cstu (size_type_node, bytes);
9519 }
9520
9521 return NULL_TREE;
9522 }
9523
9524 /* Builtins with folding operations that operate on "..." arguments
9525 need special handling; we need to store the arguments in a convenient
9526 data structure before attempting any folding. Fortunately there are
9527 only a few builtins that fall into this category. FNDECL is the
9528 function, EXP is the CALL_EXPR for the call. */
9529
9530 static tree
9531 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9532 {
9533 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9534 tree ret = NULL_TREE;
9535
9536 switch (fcode)
9537 {
9538 case BUILT_IN_FPCLASSIFY:
9539 ret = fold_builtin_fpclassify (loc, args, nargs);
9540 break;
9541
9542 default:
9543 break;
9544 }
9545 if (ret)
9546 {
9547 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9548 SET_EXPR_LOCATION (ret, loc);
9549 TREE_NO_WARNING (ret) = 1;
9550 return ret;
9551 }
9552 return NULL_TREE;
9553 }
9554
9555 /* Initialize format string characters in the target charset. */
9556
9557 bool
9558 init_target_chars (void)
9559 {
9560 static bool init;
9561 if (!init)
9562 {
9563 target_newline = lang_hooks.to_target_charset ('\n');
9564 target_percent = lang_hooks.to_target_charset ('%');
9565 target_c = lang_hooks.to_target_charset ('c');
9566 target_s = lang_hooks.to_target_charset ('s');
9567 if (target_newline == 0 || target_percent == 0 || target_c == 0
9568 || target_s == 0)
9569 return false;
9570
9571 target_percent_c[0] = target_percent;
9572 target_percent_c[1] = target_c;
9573 target_percent_c[2] = '\0';
9574
9575 target_percent_s[0] = target_percent;
9576 target_percent_s[1] = target_s;
9577 target_percent_s[2] = '\0';
9578
9579 target_percent_s_newline[0] = target_percent;
9580 target_percent_s_newline[1] = target_s;
9581 target_percent_s_newline[2] = target_newline;
9582 target_percent_s_newline[3] = '\0';
9583
9584 init = true;
9585 }
9586 return true;
9587 }
9588
9589 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9590 and no overflow/underflow occurred. INEXACT is true if M was not
9591 exactly calculated. TYPE is the tree type for the result. This
9592 function assumes that you cleared the MPFR flags and then
9593 calculated M to see if anything subsequently set a flag prior to
9594 entering this function. Return NULL_TREE if any checks fail. */
9595
9596 static tree
9597 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9598 {
9599 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9600 overflow/underflow occurred. If -frounding-math, proceed iff the
9601 result of calling FUNC was exact. */
9602 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9603 && (!flag_rounding_math || !inexact))
9604 {
9605 REAL_VALUE_TYPE rr;
9606
9607 real_from_mpfr (&rr, m, type, GMP_RNDN);
9608 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9609 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9610 but the mpft_t is not, then we underflowed in the
9611 conversion. */
9612 if (real_isfinite (&rr)
9613 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9614 {
9615 REAL_VALUE_TYPE rmode;
9616
9617 real_convert (&rmode, TYPE_MODE (type), &rr);
9618 /* Proceed iff the specified mode can hold the value. */
9619 if (real_identical (&rmode, &rr))
9620 return build_real (type, rmode);
9621 }
9622 }
9623 return NULL_TREE;
9624 }
9625
9626 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9627 number and no overflow/underflow occurred. INEXACT is true if M
9628 was not exactly calculated. TYPE is the tree type for the result.
9629 This function assumes that you cleared the MPFR flags and then
9630 calculated M to see if anything subsequently set a flag prior to
9631 entering this function. Return NULL_TREE if any checks fail, if
9632 FORCE_CONVERT is true, then bypass the checks. */
9633
9634 static tree
9635 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9636 {
9637 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9638 overflow/underflow occurred. If -frounding-math, proceed iff the
9639 result of calling FUNC was exact. */
9640 if (force_convert
9641 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9642 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9643 && (!flag_rounding_math || !inexact)))
9644 {
9645 REAL_VALUE_TYPE re, im;
9646
9647 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9648 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9649 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9650 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9651 but the mpft_t is not, then we underflowed in the
9652 conversion. */
9653 if (force_convert
9654 || (real_isfinite (&re) && real_isfinite (&im)
9655 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9656 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9657 {
9658 REAL_VALUE_TYPE re_mode, im_mode;
9659
9660 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9661 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9662 /* Proceed iff the specified mode can hold the value. */
9663 if (force_convert
9664 || (real_identical (&re_mode, &re)
9665 && real_identical (&im_mode, &im)))
9666 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9667 build_real (TREE_TYPE (type), im_mode));
9668 }
9669 }
9670 return NULL_TREE;
9671 }
9672
9673 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9674 the pointer *(ARG_QUO) and return the result. The type is taken
9675 from the type of ARG0 and is used for setting the precision of the
9676 calculation and results. */
9677
9678 static tree
9679 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9680 {
9681 tree const type = TREE_TYPE (arg0);
9682 tree result = NULL_TREE;
9683
9684 STRIP_NOPS (arg0);
9685 STRIP_NOPS (arg1);
9686
9687 /* To proceed, MPFR must exactly represent the target floating point
9688 format, which only happens when the target base equals two. */
9689 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9690 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9691 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9692 {
9693 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9694 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9695
9696 if (real_isfinite (ra0) && real_isfinite (ra1))
9697 {
9698 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9699 const int prec = fmt->p;
9700 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9701 tree result_rem;
9702 long integer_quo;
9703 mpfr_t m0, m1;
9704
9705 mpfr_inits2 (prec, m0, m1, NULL);
9706 mpfr_from_real (m0, ra0, GMP_RNDN);
9707 mpfr_from_real (m1, ra1, GMP_RNDN);
9708 mpfr_clear_flags ();
9709 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9710 /* Remquo is independent of the rounding mode, so pass
9711 inexact=0 to do_mpfr_ckconv(). */
9712 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9713 mpfr_clears (m0, m1, NULL);
9714 if (result_rem)
9715 {
9716 /* MPFR calculates quo in the host's long so it may
9717 return more bits in quo than the target int can hold
9718 if sizeof(host long) > sizeof(target int). This can
9719 happen even for native compilers in LP64 mode. In
9720 these cases, modulo the quo value with the largest
9721 number that the target int can hold while leaving one
9722 bit for the sign. */
9723 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9724 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9725
9726 /* Dereference the quo pointer argument. */
9727 arg_quo = build_fold_indirect_ref (arg_quo);
9728 /* Proceed iff a valid pointer type was passed in. */
9729 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9730 {
9731 /* Set the value. */
9732 tree result_quo
9733 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9734 build_int_cst (TREE_TYPE (arg_quo),
9735 integer_quo));
9736 TREE_SIDE_EFFECTS (result_quo) = 1;
9737 /* Combine the quo assignment with the rem. */
9738 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9739 result_quo, result_rem));
9740 }
9741 }
9742 }
9743 }
9744 return result;
9745 }
9746
9747 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9748 resulting value as a tree with type TYPE. The mpfr precision is
9749 set to the precision of TYPE. We assume that this mpfr function
9750 returns zero if the result could be calculated exactly within the
9751 requested precision. In addition, the integer pointer represented
9752 by ARG_SG will be dereferenced and set to the appropriate signgam
9753 (-1,1) value. */
9754
9755 static tree
9756 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9757 {
9758 tree result = NULL_TREE;
9759
9760 STRIP_NOPS (arg);
9761
9762 /* To proceed, MPFR must exactly represent the target floating point
9763 format, which only happens when the target base equals two. Also
9764 verify ARG is a constant and that ARG_SG is an int pointer. */
9765 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9766 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9767 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9768 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9769 {
9770 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9771
9772 /* In addition to NaN and Inf, the argument cannot be zero or a
9773 negative integer. */
9774 if (real_isfinite (ra)
9775 && ra->cl != rvc_zero
9776 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9777 {
9778 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9779 const int prec = fmt->p;
9780 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9781 int inexact, sg;
9782 mpfr_t m;
9783 tree result_lg;
9784
9785 mpfr_init2 (m, prec);
9786 mpfr_from_real (m, ra, GMP_RNDN);
9787 mpfr_clear_flags ();
9788 inexact = mpfr_lgamma (m, &sg, m, rnd);
9789 result_lg = do_mpfr_ckconv (m, type, inexact);
9790 mpfr_clear (m);
9791 if (result_lg)
9792 {
9793 tree result_sg;
9794
9795 /* Dereference the arg_sg pointer argument. */
9796 arg_sg = build_fold_indirect_ref (arg_sg);
9797 /* Assign the signgam value into *arg_sg. */
9798 result_sg = fold_build2 (MODIFY_EXPR,
9799 TREE_TYPE (arg_sg), arg_sg,
9800 build_int_cst (TREE_TYPE (arg_sg), sg));
9801 TREE_SIDE_EFFECTS (result_sg) = 1;
9802 /* Combine the signgam assignment with the lgamma result. */
9803 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9804 result_sg, result_lg));
9805 }
9806 }
9807 }
9808
9809 return result;
9810 }
9811
9812 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9813 mpc function FUNC on it and return the resulting value as a tree
9814 with type TYPE. The mpfr precision is set to the precision of
9815 TYPE. We assume that function FUNC returns zero if the result
9816 could be calculated exactly within the requested precision. If
9817 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9818 in the arguments and/or results. */
9819
9820 tree
9821 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9822 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9823 {
9824 tree result = NULL_TREE;
9825
9826 STRIP_NOPS (arg0);
9827 STRIP_NOPS (arg1);
9828
9829 /* To proceed, MPFR must exactly represent the target floating point
9830 format, which only happens when the target base equals two. */
9831 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9832 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9833 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9834 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9835 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9836 {
9837 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9838 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9839 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9840 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9841
9842 if (do_nonfinite
9843 || (real_isfinite (re0) && real_isfinite (im0)
9844 && real_isfinite (re1) && real_isfinite (im1)))
9845 {
9846 const struct real_format *const fmt =
9847 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9848 const int prec = fmt->p;
9849 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9850 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9851 int inexact;
9852 mpc_t m0, m1;
9853
9854 mpc_init2 (m0, prec);
9855 mpc_init2 (m1, prec);
9856 mpfr_from_real (mpc_realref (m0), re0, rnd);
9857 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9858 mpfr_from_real (mpc_realref (m1), re1, rnd);
9859 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9860 mpfr_clear_flags ();
9861 inexact = func (m0, m0, m1, crnd);
9862 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9863 mpc_clear (m0);
9864 mpc_clear (m1);
9865 }
9866 }
9867
9868 return result;
9869 }
9870
9871 /* A wrapper function for builtin folding that prevents warnings for
9872 "statement without effect" and the like, caused by removing the
9873 call node earlier than the warning is generated. */
9874
9875 tree
9876 fold_call_stmt (gcall *stmt, bool ignore)
9877 {
9878 tree ret = NULL_TREE;
9879 tree fndecl = gimple_call_fndecl (stmt);
9880 location_t loc = gimple_location (stmt);
9881 if (fndecl
9882 && TREE_CODE (fndecl) == FUNCTION_DECL
9883 && DECL_BUILT_IN (fndecl)
9884 && !gimple_call_va_arg_pack_p (stmt))
9885 {
9886 int nargs = gimple_call_num_args (stmt);
9887 tree *args = (nargs > 0
9888 ? gimple_call_arg_ptr (stmt, 0)
9889 : &error_mark_node);
9890
9891 if (avoid_folding_inline_builtin (fndecl))
9892 return NULL_TREE;
9893 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9894 {
9895 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9896 }
9897 else
9898 {
9899 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9900 if (ret)
9901 {
9902 /* Propagate location information from original call to
9903 expansion of builtin. Otherwise things like
9904 maybe_emit_chk_warning, that operate on the expansion
9905 of a builtin, will use the wrong location information. */
9906 if (gimple_has_location (stmt))
9907 {
9908 tree realret = ret;
9909 if (TREE_CODE (ret) == NOP_EXPR)
9910 realret = TREE_OPERAND (ret, 0);
9911 if (CAN_HAVE_LOCATION_P (realret)
9912 && !EXPR_HAS_LOCATION (realret))
9913 SET_EXPR_LOCATION (realret, loc);
9914 return realret;
9915 }
9916 return ret;
9917 }
9918 }
9919 }
9920 return NULL_TREE;
9921 }
9922
9923 /* Look up the function in builtin_decl that corresponds to DECL
9924 and set ASMSPEC as its user assembler name. DECL must be a
9925 function decl that declares a builtin. */
9926
9927 void
9928 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9929 {
9930 tree builtin;
9931 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9932 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9933 && asmspec != 0);
9934
9935 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9936 set_user_assembler_name (builtin, asmspec);
9937 switch (DECL_FUNCTION_CODE (decl))
9938 {
9939 case BUILT_IN_MEMCPY:
9940 init_block_move_fn (asmspec);
9941 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
9942 break;
9943 case BUILT_IN_MEMSET:
9944 init_block_clear_fn (asmspec);
9945 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
9946 break;
9947 case BUILT_IN_MEMMOVE:
9948 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
9949 break;
9950 case BUILT_IN_MEMCMP:
9951 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
9952 break;
9953 case BUILT_IN_ABORT:
9954 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
9955 break;
9956 case BUILT_IN_FFS:
9957 if (INT_TYPE_SIZE < BITS_PER_WORD)
9958 {
9959 set_user_assembler_libfunc ("ffs", asmspec);
9960 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
9961 MODE_INT, 0), "ffs");
9962 }
9963 break;
9964 default:
9965 break;
9966 }
9967 }
9968
9969 /* Return true if DECL is a builtin that expands to a constant or similarly
9970 simple code. */
9971 bool
9972 is_simple_builtin (tree decl)
9973 {
9974 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9975 switch (DECL_FUNCTION_CODE (decl))
9976 {
9977 /* Builtins that expand to constants. */
9978 case BUILT_IN_CONSTANT_P:
9979 case BUILT_IN_EXPECT:
9980 case BUILT_IN_OBJECT_SIZE:
9981 case BUILT_IN_UNREACHABLE:
9982 /* Simple register moves or loads from stack. */
9983 case BUILT_IN_ASSUME_ALIGNED:
9984 case BUILT_IN_RETURN_ADDRESS:
9985 case BUILT_IN_EXTRACT_RETURN_ADDR:
9986 case BUILT_IN_FROB_RETURN_ADDR:
9987 case BUILT_IN_RETURN:
9988 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9989 case BUILT_IN_FRAME_ADDRESS:
9990 case BUILT_IN_VA_END:
9991 case BUILT_IN_STACK_SAVE:
9992 case BUILT_IN_STACK_RESTORE:
9993 /* Exception state returns or moves registers around. */
9994 case BUILT_IN_EH_FILTER:
9995 case BUILT_IN_EH_POINTER:
9996 case BUILT_IN_EH_COPY_VALUES:
9997 return true;
9998
9999 default:
10000 return false;
10001 }
10002
10003 return false;
10004 }
10005
10006 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10007 most probably expanded inline into reasonably simple code. This is a
10008 superset of is_simple_builtin. */
10009 bool
10010 is_inexpensive_builtin (tree decl)
10011 {
10012 if (!decl)
10013 return false;
10014 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10015 return true;
10016 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10017 switch (DECL_FUNCTION_CODE (decl))
10018 {
10019 case BUILT_IN_ABS:
10020 case BUILT_IN_ALLOCA:
10021 case BUILT_IN_ALLOCA_WITH_ALIGN:
10022 case BUILT_IN_BSWAP16:
10023 case BUILT_IN_BSWAP32:
10024 case BUILT_IN_BSWAP64:
10025 case BUILT_IN_CLZ:
10026 case BUILT_IN_CLZIMAX:
10027 case BUILT_IN_CLZL:
10028 case BUILT_IN_CLZLL:
10029 case BUILT_IN_CTZ:
10030 case BUILT_IN_CTZIMAX:
10031 case BUILT_IN_CTZL:
10032 case BUILT_IN_CTZLL:
10033 case BUILT_IN_FFS:
10034 case BUILT_IN_FFSIMAX:
10035 case BUILT_IN_FFSL:
10036 case BUILT_IN_FFSLL:
10037 case BUILT_IN_IMAXABS:
10038 case BUILT_IN_FINITE:
10039 case BUILT_IN_FINITEF:
10040 case BUILT_IN_FINITEL:
10041 case BUILT_IN_FINITED32:
10042 case BUILT_IN_FINITED64:
10043 case BUILT_IN_FINITED128:
10044 case BUILT_IN_FPCLASSIFY:
10045 case BUILT_IN_ISFINITE:
10046 case BUILT_IN_ISINF_SIGN:
10047 case BUILT_IN_ISINF:
10048 case BUILT_IN_ISINFF:
10049 case BUILT_IN_ISINFL:
10050 case BUILT_IN_ISINFD32:
10051 case BUILT_IN_ISINFD64:
10052 case BUILT_IN_ISINFD128:
10053 case BUILT_IN_ISNAN:
10054 case BUILT_IN_ISNANF:
10055 case BUILT_IN_ISNANL:
10056 case BUILT_IN_ISNAND32:
10057 case BUILT_IN_ISNAND64:
10058 case BUILT_IN_ISNAND128:
10059 case BUILT_IN_ISNORMAL:
10060 case BUILT_IN_ISGREATER:
10061 case BUILT_IN_ISGREATEREQUAL:
10062 case BUILT_IN_ISLESS:
10063 case BUILT_IN_ISLESSEQUAL:
10064 case BUILT_IN_ISLESSGREATER:
10065 case BUILT_IN_ISUNORDERED:
10066 case BUILT_IN_VA_ARG_PACK:
10067 case BUILT_IN_VA_ARG_PACK_LEN:
10068 case BUILT_IN_VA_COPY:
10069 case BUILT_IN_TRAP:
10070 case BUILT_IN_SAVEREGS:
10071 case BUILT_IN_POPCOUNTL:
10072 case BUILT_IN_POPCOUNTLL:
10073 case BUILT_IN_POPCOUNTIMAX:
10074 case BUILT_IN_POPCOUNT:
10075 case BUILT_IN_PARITYL:
10076 case BUILT_IN_PARITYLL:
10077 case BUILT_IN_PARITYIMAX:
10078 case BUILT_IN_PARITY:
10079 case BUILT_IN_LABS:
10080 case BUILT_IN_LLABS:
10081 case BUILT_IN_PREFETCH:
10082 case BUILT_IN_ACC_ON_DEVICE:
10083 return true;
10084
10085 default:
10086 return is_simple_builtin (decl);
10087 }
10088
10089 return false;
10090 }