PR 68432: Add a target hook to control size/speed optab choices
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "fold-const-call.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "tree-object-size.h"
48 #include "realmpfr.h"
49 #include "cfgrtl.h"
50 #include "except.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "libfuncs.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "langhooks.h"
59 #include "value-prof.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "tree-chkp.h"
64 #include "rtl-chkp.h"
65 #include "internal-fn.h"
66 #include "case-cfn-macros.h"
67
68
69 struct target_builtins default_target_builtins;
70 #if SWITCHABLE_TARGET
71 struct target_builtins *this_target_builtins = &default_target_builtins;
72 #endif
73
74 /* Define the names of the builtin function types and codes. */
75 const char *const built_in_class_names[BUILT_IN_LAST]
76 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
77
78 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
79 const char * built_in_names[(int) END_BUILTINS] =
80 {
81 #include "builtins.def"
82 };
83
84 /* Setup an array of builtin_info_type, make sure each element decl is
85 initialized to NULL_TREE. */
86 builtin_info_type builtin_info[(int)END_BUILTINS];
87
88 /* Non-zero if __builtin_constant_p should be folded right away. */
89 bool force_folding_builtin_constant_p;
90
91 static rtx c_readstr (const char *, machine_mode);
92 static int target_char_cast (tree, char *);
93 static rtx get_memory_rtx (tree, tree);
94 static int apply_args_size (void);
95 static int apply_result_size (void);
96 static rtx result_vector (int, rtx);
97 static void expand_builtin_prefetch (tree);
98 static rtx expand_builtin_apply_args (void);
99 static rtx expand_builtin_apply_args_1 (void);
100 static rtx expand_builtin_apply (rtx, rtx, rtx);
101 static void expand_builtin_return (rtx);
102 static enum type_class type_to_class (tree);
103 static rtx expand_builtin_classify_type (tree);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_strcmp (tree, rtx);
116 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
117 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
118 static rtx expand_builtin_memcpy (tree, rtx);
119 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
120 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
121 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
122 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
124 machine_mode, int, tree);
125 static rtx expand_builtin_strcpy (tree, rtx);
126 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
127 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, machine_mode);
131 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
135 static rtx expand_builtin_alloca (tree, bool);
136 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
137 static rtx expand_builtin_frame_address (tree, tree);
138 static tree stabilize_va_list_loc (location_t, tree, int);
139 static rtx expand_builtin_expect (tree, rtx);
140 static tree fold_builtin_constant_p (tree);
141 static tree fold_builtin_classify_type (tree);
142 static tree fold_builtin_strlen (location_t, tree, tree);
143 static tree fold_builtin_inf (location_t, tree, int);
144 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
145 static bool validate_arg (const_tree, enum tree_code code);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_strchr (location_t, tree, tree, tree);
149 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
151 static tree fold_builtin_strcmp (location_t, tree, tree);
152 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
153 static tree fold_builtin_isascii (location_t, tree);
154 static tree fold_builtin_toascii (location_t, tree);
155 static tree fold_builtin_isdigit (location_t, tree);
156 static tree fold_builtin_fabs (location_t, tree, tree);
157 static tree fold_builtin_abs (location_t, tree, tree);
158 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
159 enum tree_code);
160 static tree fold_builtin_0 (location_t, tree);
161 static tree fold_builtin_1 (location_t, tree, tree);
162 static tree fold_builtin_2 (location_t, tree, tree, tree);
163 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_varargs (location_t, tree, tree*, int);
165
166 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
167 static tree fold_builtin_strstr (location_t, tree, tree, tree);
168 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
169 static tree fold_builtin_strspn (location_t, tree, tree);
170 static tree fold_builtin_strcspn (location_t, tree, tree);
171
172 static rtx expand_builtin_object_size (tree);
173 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
174 enum built_in_function);
175 static void maybe_emit_chk_warning (tree, enum built_in_function);
176 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
177 static void maybe_emit_free_warning (tree);
178 static tree fold_builtin_object_size (tree, tree);
179
180 unsigned HOST_WIDE_INT target_newline;
181 unsigned HOST_WIDE_INT target_percent;
182 static unsigned HOST_WIDE_INT target_c;
183 static unsigned HOST_WIDE_INT target_s;
184 char target_percent_c[3];
185 char target_percent_s[3];
186 char target_percent_s_newline[4];
187 static tree do_mpfr_remquo (tree, tree, tree);
188 static tree do_mpfr_lgamma_r (tree, tree, tree);
189 static void expand_builtin_sync_synchronize (void);
190
191 /* Return true if NAME starts with __builtin_ or __sync_. */
192
193 static bool
194 is_builtin_name (const char *name)
195 {
196 if (strncmp (name, "__builtin_", 10) == 0)
197 return true;
198 if (strncmp (name, "__sync_", 7) == 0)
199 return true;
200 if (strncmp (name, "__atomic_", 9) == 0)
201 return true;
202 if (flag_cilkplus
203 && (!strcmp (name, "__cilkrts_detach")
204 || !strcmp (name, "__cilkrts_pop_frame")))
205 return true;
206 return false;
207 }
208
209
210 /* Return true if DECL is a function symbol representing a built-in. */
211
212 bool
213 is_builtin_fn (tree decl)
214 {
215 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
216 }
217
218 /* Return true if NODE should be considered for inline expansion regardless
219 of the optimization level. This means whenever a function is invoked with
220 its "internal" name, which normally contains the prefix "__builtin". */
221
222 bool
223 called_as_built_in (tree node)
224 {
225 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
226 we want the name used to call the function, not the name it
227 will have. */
228 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
229 return is_builtin_name (name);
230 }
231
232 /* Compute values M and N such that M divides (address of EXP - N) and such
233 that N < M. If these numbers can be determined, store M in alignp and N in
234 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
235 *alignp and any bit-offset to *bitposp.
236
237 Note that the address (and thus the alignment) computed here is based
238 on the address to which a symbol resolves, whereas DECL_ALIGN is based
239 on the address at which an object is actually located. These two
240 addresses are not always the same. For example, on ARM targets,
241 the address &foo of a Thumb function foo() has the lowest bit set,
242 whereas foo() itself starts on an even address.
243
244 If ADDR_P is true we are taking the address of the memory reference EXP
245 and thus cannot rely on the access taking place. */
246
247 static bool
248 get_object_alignment_2 (tree exp, unsigned int *alignp,
249 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
250 {
251 HOST_WIDE_INT bitsize, bitpos;
252 tree offset;
253 machine_mode mode;
254 int unsignedp, reversep, volatilep;
255 unsigned int align = BITS_PER_UNIT;
256 bool known_alignment = false;
257
258 /* Get the innermost object and the constant (bitpos) and possibly
259 variable (offset) offset of the access. */
260 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
261 &unsignedp, &reversep, &volatilep, true);
262
263 /* Extract alignment information from the innermost object and
264 possibly adjust bitpos and offset. */
265 if (TREE_CODE (exp) == FUNCTION_DECL)
266 {
267 /* Function addresses can encode extra information besides their
268 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
269 allows the low bit to be used as a virtual bit, we know
270 that the address itself must be at least 2-byte aligned. */
271 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
272 align = 2 * BITS_PER_UNIT;
273 }
274 else if (TREE_CODE (exp) == LABEL_DECL)
275 ;
276 else if (TREE_CODE (exp) == CONST_DECL)
277 {
278 /* The alignment of a CONST_DECL is determined by its initializer. */
279 exp = DECL_INITIAL (exp);
280 align = TYPE_ALIGN (TREE_TYPE (exp));
281 if (CONSTANT_CLASS_P (exp))
282 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
283
284 known_alignment = true;
285 }
286 else if (DECL_P (exp))
287 {
288 align = DECL_ALIGN (exp);
289 known_alignment = true;
290 }
291 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
292 {
293 align = TYPE_ALIGN (TREE_TYPE (exp));
294 }
295 else if (TREE_CODE (exp) == INDIRECT_REF
296 || TREE_CODE (exp) == MEM_REF
297 || TREE_CODE (exp) == TARGET_MEM_REF)
298 {
299 tree addr = TREE_OPERAND (exp, 0);
300 unsigned ptr_align;
301 unsigned HOST_WIDE_INT ptr_bitpos;
302 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
303
304 /* If the address is explicitely aligned, handle that. */
305 if (TREE_CODE (addr) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
307 {
308 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
309 ptr_bitmask *= BITS_PER_UNIT;
310 align = ptr_bitmask & -ptr_bitmask;
311 addr = TREE_OPERAND (addr, 0);
312 }
313
314 known_alignment
315 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
316 align = MAX (ptr_align, align);
317
318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos &= ptr_bitmask;
320
321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
323 if (TREE_CODE (exp) == TARGET_MEM_REF)
324 {
325 if (TMR_INDEX (exp))
326 {
327 unsigned HOST_WIDE_INT step = 1;
328 if (TMR_STEP (exp))
329 step = TREE_INT_CST_LOW (TMR_STEP (exp));
330 align = MIN (align, (step & -step) * BITS_PER_UNIT);
331 }
332 if (TMR_INDEX2 (exp))
333 align = BITS_PER_UNIT;
334 known_alignment = false;
335 }
336
337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
340 alignment knowledge and if using that alignment would
341 improve the situation. */
342 if (!addr_p && !known_alignment
343 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
344 align = TYPE_ALIGN (TREE_TYPE (exp));
345 else
346 {
347 /* Else adjust bitpos accordingly. */
348 bitpos += ptr_bitpos;
349 if (TREE_CODE (exp) == MEM_REF
350 || TREE_CODE (exp) == TARGET_MEM_REF)
351 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
352 }
353 }
354 else if (TREE_CODE (exp) == STRING_CST)
355 {
356 /* STRING_CST are the only constant objects we allow to be not
357 wrapped inside a CONST_DECL. */
358 align = TYPE_ALIGN (TREE_TYPE (exp));
359 if (CONSTANT_CLASS_P (exp))
360 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
361
362 known_alignment = true;
363 }
364
365 /* If there is a non-constant offset part extract the maximum
366 alignment that can prevail. */
367 if (offset)
368 {
369 unsigned int trailing_zeros = tree_ctz (offset);
370 if (trailing_zeros < HOST_BITS_PER_INT)
371 {
372 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
373 if (inner)
374 align = MIN (align, inner);
375 }
376 }
377
378 *alignp = align;
379 *bitposp = bitpos & (*alignp - 1);
380 return known_alignment;
381 }
382
383 /* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387
388 bool
389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
391 {
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
393 }
394
395 /* Return the alignment in bits of EXP, an object. */
396
397 unsigned int
398 get_object_alignment (tree exp)
399 {
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
402
403 get_object_alignment_1 (exp, &align, &bitpos);
404
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
407
408 if (bitpos != 0)
409 align = (bitpos & -bitpos);
410 return align;
411 }
412
413 /* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
417
418 If EXP is not a pointer, false is returned too. */
419
420 bool
421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
423 {
424 STRIP_NOPS (exp);
425
426 if (TREE_CODE (exp) == ADDR_EXPR)
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 {
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
438 {
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
441 {
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
445 }
446 }
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
450 }
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
453 {
454 unsigned int ptr_align, ptr_misalign;
455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 {
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
461 /* We cannot really tell whether this result is an approximation. */
462 return true;
463 }
464 else
465 {
466 *bitposp = 0;
467 *alignp = BITS_PER_UNIT;
468 return false;
469 }
470 }
471 else if (TREE_CODE (exp) == INTEGER_CST)
472 {
473 *alignp = BIGGEST_ALIGNMENT;
474 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
475 & (BIGGEST_ALIGNMENT - 1));
476 return true;
477 }
478
479 *bitposp = 0;
480 *alignp = BITS_PER_UNIT;
481 return false;
482 }
483
484 /* Return the alignment in bits of EXP, a pointer valued expression.
485 The alignment returned is, by default, the alignment of the thing that
486 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
487
488 Otherwise, look at the expression to see if we can do better, i.e., if the
489 expression is actually pointing at an object whose alignment is tighter. */
490
491 unsigned int
492 get_pointer_alignment (tree exp)
493 {
494 unsigned HOST_WIDE_INT bitpos = 0;
495 unsigned int align;
496
497 get_pointer_alignment_1 (exp, &align, &bitpos);
498
499 /* align and bitpos now specify known low bits of the pointer.
500 ptr & (align - 1) == bitpos. */
501
502 if (bitpos != 0)
503 align = (bitpos & -bitpos);
504
505 return align;
506 }
507
508 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
509 way, because it could contain a zero byte in the middle.
510 TREE_STRING_LENGTH is the size of the character array, not the string.
511
512 ONLY_VALUE should be nonzero if the result is not going to be emitted
513 into the instruction stream and zero if it is going to be expanded.
514 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
515 is returned, otherwise NULL, since
516 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
517 evaluate the side-effects.
518
519 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
520 accesses. Note that this implies the result is not going to be emitted
521 into the instruction stream.
522
523 The value returned is of type `ssizetype'.
524
525 Unfortunately, string_constant can't access the values of const char
526 arrays with initializers, so neither can we do so here. */
527
528 tree
529 c_strlen (tree src, int only_value)
530 {
531 tree offset_node;
532 HOST_WIDE_INT offset;
533 int max;
534 const char *ptr;
535 location_t loc;
536
537 STRIP_NOPS (src);
538 if (TREE_CODE (src) == COND_EXPR
539 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
540 {
541 tree len1, len2;
542
543 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
544 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
545 if (tree_int_cst_equal (len1, len2))
546 return len1;
547 }
548
549 if (TREE_CODE (src) == COMPOUND_EXPR
550 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
551 return c_strlen (TREE_OPERAND (src, 1), only_value);
552
553 loc = EXPR_LOC_OR_LOC (src, input_location);
554
555 src = string_constant (src, &offset_node);
556 if (src == 0)
557 return NULL_TREE;
558
559 max = TREE_STRING_LENGTH (src) - 1;
560 ptr = TREE_STRING_POINTER (src);
561
562 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
563 {
564 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
565 compute the offset to the following null if we don't know where to
566 start searching for it. */
567 int i;
568
569 for (i = 0; i < max; i++)
570 if (ptr[i] == 0)
571 return NULL_TREE;
572
573 /* We don't know the starting offset, but we do know that the string
574 has no internal zero bytes. We can assume that the offset falls
575 within the bounds of the string; otherwise, the programmer deserves
576 what he gets. Subtract the offset from the length of the string,
577 and return that. This would perhaps not be valid if we were dealing
578 with named arrays in addition to literal string constants. */
579
580 return size_diffop_loc (loc, size_int (max), offset_node);
581 }
582
583 /* We have a known offset into the string. Start searching there for
584 a null character if we can represent it as a single HOST_WIDE_INT. */
585 if (offset_node == 0)
586 offset = 0;
587 else if (! tree_fits_shwi_p (offset_node))
588 offset = -1;
589 else
590 offset = tree_to_shwi (offset_node);
591
592 /* If the offset is known to be out of bounds, warn, and call strlen at
593 runtime. */
594 if (offset < 0 || offset > max)
595 {
596 /* Suppress multiple warnings for propagated constant strings. */
597 if (only_value != 2
598 && !TREE_NO_WARNING (src))
599 {
600 warning_at (loc, 0, "offset outside bounds of constant string");
601 TREE_NO_WARNING (src) = 1;
602 }
603 return NULL_TREE;
604 }
605
606 /* Use strlen to search for the first zero byte. Since any strings
607 constructed with build_string will have nulls appended, we win even
608 if we get handed something like (char[4])"abcd".
609
610 Since OFFSET is our starting index into the string, no further
611 calculation is needed. */
612 return ssize_int (strlen (ptr + offset));
613 }
614
615 /* Return a constant integer corresponding to target reading
616 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
617
618 static rtx
619 c_readstr (const char *str, machine_mode mode)
620 {
621 HOST_WIDE_INT ch;
622 unsigned int i, j;
623 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
624
625 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
626 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
627 / HOST_BITS_PER_WIDE_INT;
628
629 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
630 for (i = 0; i < len; i++)
631 tmp[i] = 0;
632
633 ch = 1;
634 for (i = 0; i < GET_MODE_SIZE (mode); i++)
635 {
636 j = i;
637 if (WORDS_BIG_ENDIAN)
638 j = GET_MODE_SIZE (mode) - i - 1;
639 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
640 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
641 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
642 j *= BITS_PER_UNIT;
643
644 if (ch)
645 ch = (unsigned char) str[i];
646 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
647 }
648
649 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
650 return immed_wide_int_const (c, mode);
651 }
652
653 /* Cast a target constant CST to target CHAR and if that value fits into
654 host char type, return zero and put that value into variable pointed to by
655 P. */
656
657 static int
658 target_char_cast (tree cst, char *p)
659 {
660 unsigned HOST_WIDE_INT val, hostval;
661
662 if (TREE_CODE (cst) != INTEGER_CST
663 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
664 return 1;
665
666 /* Do not care if it fits or not right here. */
667 val = TREE_INT_CST_LOW (cst);
668
669 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
670 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
671
672 hostval = val;
673 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
674 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
675
676 if (val != hostval)
677 return 1;
678
679 *p = hostval;
680 return 0;
681 }
682
683 /* Similar to save_expr, but assumes that arbitrary code is not executed
684 in between the multiple evaluations. In particular, we assume that a
685 non-addressable local variable will not be modified. */
686
687 static tree
688 builtin_save_expr (tree exp)
689 {
690 if (TREE_CODE (exp) == SSA_NAME
691 || (TREE_ADDRESSABLE (exp) == 0
692 && (TREE_CODE (exp) == PARM_DECL
693 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
694 return exp;
695
696 return save_expr (exp);
697 }
698
699 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
700 times to get the address of either a higher stack frame, or a return
701 address located within it (depending on FNDECL_CODE). */
702
703 static rtx
704 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
705 {
706 int i;
707 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
708 if (tem == NULL_RTX)
709 {
710 /* For a zero count with __builtin_return_address, we don't care what
711 frame address we return, because target-specific definitions will
712 override us. Therefore frame pointer elimination is OK, and using
713 the soft frame pointer is OK.
714
715 For a nonzero count, or a zero count with __builtin_frame_address,
716 we require a stable offset from the current frame pointer to the
717 previous one, so we must use the hard frame pointer, and
718 we must disable frame pointer elimination. */
719 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
720 tem = frame_pointer_rtx;
721 else
722 {
723 tem = hard_frame_pointer_rtx;
724
725 /* Tell reload not to eliminate the frame pointer. */
726 crtl->accesses_prior_frames = 1;
727 }
728 }
729
730 if (count > 0)
731 SETUP_FRAME_ADDRESSES ();
732
733 /* On the SPARC, the return address is not in the frame, it is in a
734 register. There is no way to access it off of the current frame
735 pointer, but it can be accessed off the previous frame pointer by
736 reading the value from the register window save area. */
737 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
738 count--;
739
740 /* Scan back COUNT frames to the specified frame. */
741 for (i = 0; i < count; i++)
742 {
743 /* Assume the dynamic chain pointer is in the word that the
744 frame address points to, unless otherwise specified. */
745 tem = DYNAMIC_CHAIN_ADDRESS (tem);
746 tem = memory_address (Pmode, tem);
747 tem = gen_frame_mem (Pmode, tem);
748 tem = copy_to_reg (tem);
749 }
750
751 /* For __builtin_frame_address, return what we've got. But, on
752 the SPARC for example, we may have to add a bias. */
753 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
754 return FRAME_ADDR_RTX (tem);
755
756 /* For __builtin_return_address, get the return address from that frame. */
757 #ifdef RETURN_ADDR_RTX
758 tem = RETURN_ADDR_RTX (count, tem);
759 #else
760 tem = memory_address (Pmode,
761 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
762 tem = gen_frame_mem (Pmode, tem);
763 #endif
764 return tem;
765 }
766
767 /* Alias set used for setjmp buffer. */
768 static alias_set_type setjmp_alias_set = -1;
769
770 /* Construct the leading half of a __builtin_setjmp call. Control will
771 return to RECEIVER_LABEL. This is also called directly by the SJLJ
772 exception handling code. */
773
774 void
775 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
776 {
777 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
778 rtx stack_save;
779 rtx mem;
780
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
783
784 buf_addr = convert_memory_address (Pmode, buf_addr);
785
786 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
787
788 /* We store the frame pointer and the address of receiver_label in
789 the buffer and use the rest of it for the stack save area, which
790 is machine-dependent. */
791
792 mem = gen_rtx_MEM (Pmode, buf_addr);
793 set_mem_alias_set (mem, setjmp_alias_set);
794 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
795
796 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
797 GET_MODE_SIZE (Pmode))),
798 set_mem_alias_set (mem, setjmp_alias_set);
799
800 emit_move_insn (validize_mem (mem),
801 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
802
803 stack_save = gen_rtx_MEM (sa_mode,
804 plus_constant (Pmode, buf_addr,
805 2 * GET_MODE_SIZE (Pmode)));
806 set_mem_alias_set (stack_save, setjmp_alias_set);
807 emit_stack_save (SAVE_NONLOCAL, &stack_save);
808
809 /* If there is further processing to do, do it. */
810 if (targetm.have_builtin_setjmp_setup ())
811 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
812
813 /* We have a nonlocal label. */
814 cfun->has_nonlocal_label = 1;
815 }
816
817 /* Construct the trailing part of a __builtin_setjmp call. This is
818 also called directly by the SJLJ exception handling code.
819 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
820
821 void
822 expand_builtin_setjmp_receiver (rtx receiver_label)
823 {
824 rtx chain;
825
826 /* Mark the FP as used when we get here, so we have to make sure it's
827 marked as used by this function. */
828 emit_use (hard_frame_pointer_rtx);
829
830 /* Mark the static chain as clobbered here so life information
831 doesn't get messed up for it. */
832 chain = targetm.calls.static_chain (current_function_decl, true);
833 if (chain && REG_P (chain))
834 emit_clobber (chain);
835
836 /* Now put in the code to restore the frame pointer, and argument
837 pointer, if needed. */
838 if (! targetm.have_nonlocal_goto ())
839 {
840 /* First adjust our frame pointer to its actual value. It was
841 previously set to the start of the virtual area corresponding to
842 the stacked variables when we branched here and now needs to be
843 adjusted to the actual hardware fp value.
844
845 Assignments to virtual registers are converted by
846 instantiate_virtual_regs into the corresponding assignment
847 to the underlying register (fp in this case) that makes
848 the original assignment true.
849 So the following insn will actually be decrementing fp by
850 STARTING_FRAME_OFFSET. */
851 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
852
853 /* Restoring the frame pointer also modifies the hard frame pointer.
854 Mark it used (so that the previous assignment remains live once
855 the frame pointer is eliminated) and clobbered (to represent the
856 implicit update from the assignment). */
857 emit_use (hard_frame_pointer_rtx);
858 emit_clobber (hard_frame_pointer_rtx);
859 }
860
861 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
862 {
863 #ifdef ELIMINABLE_REGS
864 /* If the argument pointer can be eliminated in favor of the
865 frame pointer, we don't need to restore it. We assume here
866 that if such an elimination is present, it can always be used.
867 This is the case on all known machines; if we don't make this
868 assumption, we do unnecessary saving on many machines. */
869 size_t i;
870 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
871
872 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
873 if (elim_regs[i].from == ARG_POINTER_REGNUM
874 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
875 break;
876
877 if (i == ARRAY_SIZE (elim_regs))
878 #endif
879 {
880 /* Now restore our arg pointer from the address at which it
881 was saved in our stack frame. */
882 emit_move_insn (crtl->args.internal_arg_pointer,
883 copy_to_reg (get_arg_pointer_save_area ()));
884 }
885 }
886
887 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
888 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
889 else if (targetm.have_nonlocal_goto_receiver ())
890 emit_insn (targetm.gen_nonlocal_goto_receiver ());
891 else
892 { /* Nothing */ }
893
894 /* We must not allow the code we just generated to be reordered by
895 scheduling. Specifically, the update of the frame pointer must
896 happen immediately, not later. */
897 emit_insn (gen_blockage ());
898 }
899
900 /* __builtin_longjmp is passed a pointer to an array of five words (not
901 all will be used on all machines). It operates similarly to the C
902 library function of the same name, but is more efficient. Much of
903 the code below is copied from the handling of non-local gotos. */
904
905 static void
906 expand_builtin_longjmp (rtx buf_addr, rtx value)
907 {
908 rtx fp, lab, stack;
909 rtx_insn *insn, *last;
910 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
911
912 /* DRAP is needed for stack realign if longjmp is expanded to current
913 function */
914 if (SUPPORTS_STACK_ALIGNMENT)
915 crtl->need_drap = true;
916
917 if (setjmp_alias_set == -1)
918 setjmp_alias_set = new_alias_set ();
919
920 buf_addr = convert_memory_address (Pmode, buf_addr);
921
922 buf_addr = force_reg (Pmode, buf_addr);
923
924 /* We require that the user must pass a second argument of 1, because
925 that is what builtin_setjmp will return. */
926 gcc_assert (value == const1_rtx);
927
928 last = get_last_insn ();
929 if (targetm.have_builtin_longjmp ())
930 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
931 else
932 {
933 fp = gen_rtx_MEM (Pmode, buf_addr);
934 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
935 GET_MODE_SIZE (Pmode)));
936
937 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
938 2 * GET_MODE_SIZE (Pmode)));
939 set_mem_alias_set (fp, setjmp_alias_set);
940 set_mem_alias_set (lab, setjmp_alias_set);
941 set_mem_alias_set (stack, setjmp_alias_set);
942
943 /* Pick up FP, label, and SP from the block and jump. This code is
944 from expand_goto in stmt.c; see there for detailed comments. */
945 if (targetm.have_nonlocal_goto ())
946 /* We have to pass a value to the nonlocal_goto pattern that will
947 get copied into the static_chain pointer, but it does not matter
948 what that value is, because builtin_setjmp does not use it. */
949 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
950 else
951 {
952 lab = copy_to_reg (lab);
953
954 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
955 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
956
957 emit_move_insn (hard_frame_pointer_rtx, fp);
958 emit_stack_restore (SAVE_NONLOCAL, stack);
959
960 emit_use (hard_frame_pointer_rtx);
961 emit_use (stack_pointer_rtx);
962 emit_indirect_jump (lab);
963 }
964 }
965
966 /* Search backwards and mark the jump insn as a non-local goto.
967 Note that this precludes the use of __builtin_longjmp to a
968 __builtin_setjmp target in the same function. However, we've
969 already cautioned the user that these functions are for
970 internal exception handling use only. */
971 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
972 {
973 gcc_assert (insn != last);
974
975 if (JUMP_P (insn))
976 {
977 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
978 break;
979 }
980 else if (CALL_P (insn))
981 break;
982 }
983 }
984
985 static inline bool
986 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
987 {
988 return (iter->i < iter->n);
989 }
990
991 /* This function validates the types of a function call argument list
992 against a specified list of tree_codes. If the last specifier is a 0,
993 that represents an ellipses, otherwise the last specifier must be a
994 VOID_TYPE. */
995
996 static bool
997 validate_arglist (const_tree callexpr, ...)
998 {
999 enum tree_code code;
1000 bool res = 0;
1001 va_list ap;
1002 const_call_expr_arg_iterator iter;
1003 const_tree arg;
1004
1005 va_start (ap, callexpr);
1006 init_const_call_expr_arg_iterator (callexpr, &iter);
1007
1008 do
1009 {
1010 code = (enum tree_code) va_arg (ap, int);
1011 switch (code)
1012 {
1013 case 0:
1014 /* This signifies an ellipses, any further arguments are all ok. */
1015 res = true;
1016 goto end;
1017 case VOID_TYPE:
1018 /* This signifies an endlink, if no arguments remain, return
1019 true, otherwise return false. */
1020 res = !more_const_call_expr_args_p (&iter);
1021 goto end;
1022 default:
1023 /* If no parameters remain or the parameter's code does not
1024 match the specified code, return false. Otherwise continue
1025 checking any remaining arguments. */
1026 arg = next_const_call_expr_arg (&iter);
1027 if (!validate_arg (arg, code))
1028 goto end;
1029 break;
1030 }
1031 }
1032 while (1);
1033
1034 /* We need gotos here since we can only have one VA_CLOSE in a
1035 function. */
1036 end: ;
1037 va_end (ap);
1038
1039 return res;
1040 }
1041
1042 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1043 and the address of the save area. */
1044
1045 static rtx
1046 expand_builtin_nonlocal_goto (tree exp)
1047 {
1048 tree t_label, t_save_area;
1049 rtx r_label, r_save_area, r_fp, r_sp;
1050 rtx_insn *insn;
1051
1052 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1053 return NULL_RTX;
1054
1055 t_label = CALL_EXPR_ARG (exp, 0);
1056 t_save_area = CALL_EXPR_ARG (exp, 1);
1057
1058 r_label = expand_normal (t_label);
1059 r_label = convert_memory_address (Pmode, r_label);
1060 r_save_area = expand_normal (t_save_area);
1061 r_save_area = convert_memory_address (Pmode, r_save_area);
1062 /* Copy the address of the save location to a register just in case it was
1063 based on the frame pointer. */
1064 r_save_area = copy_to_reg (r_save_area);
1065 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1066 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1067 plus_constant (Pmode, r_save_area,
1068 GET_MODE_SIZE (Pmode)));
1069
1070 crtl->has_nonlocal_goto = 1;
1071
1072 /* ??? We no longer need to pass the static chain value, afaik. */
1073 if (targetm.have_nonlocal_goto ())
1074 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1075 else
1076 {
1077 r_label = copy_to_reg (r_label);
1078
1079 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1080 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1081
1082 /* Restore frame pointer for containing function. */
1083 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1084 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1085
1086 /* USE of hard_frame_pointer_rtx added for consistency;
1087 not clear if really needed. */
1088 emit_use (hard_frame_pointer_rtx);
1089 emit_use (stack_pointer_rtx);
1090
1091 /* If the architecture is using a GP register, we must
1092 conservatively assume that the target function makes use of it.
1093 The prologue of functions with nonlocal gotos must therefore
1094 initialize the GP register to the appropriate value, and we
1095 must then make sure that this value is live at the point
1096 of the jump. (Note that this doesn't necessarily apply
1097 to targets with a nonlocal_goto pattern; they are free
1098 to implement it in their own way. Note also that this is
1099 a no-op if the GP register is a global invariant.) */
1100 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1101 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1102 emit_use (pic_offset_table_rtx);
1103
1104 emit_indirect_jump (r_label);
1105 }
1106
1107 /* Search backwards to the jump insn and mark it as a
1108 non-local goto. */
1109 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1110 {
1111 if (JUMP_P (insn))
1112 {
1113 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1114 break;
1115 }
1116 else if (CALL_P (insn))
1117 break;
1118 }
1119
1120 return const0_rtx;
1121 }
1122
1123 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1124 (not all will be used on all machines) that was passed to __builtin_setjmp.
1125 It updates the stack pointer in that block to the current value. This is
1126 also called directly by the SJLJ exception handling code. */
1127
1128 void
1129 expand_builtin_update_setjmp_buf (rtx buf_addr)
1130 {
1131 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1132 rtx stack_save
1133 = gen_rtx_MEM (sa_mode,
1134 memory_address
1135 (sa_mode,
1136 plus_constant (Pmode, buf_addr,
1137 2 * GET_MODE_SIZE (Pmode))));
1138
1139 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1140 }
1141
1142 /* Expand a call to __builtin_prefetch. For a target that does not support
1143 data prefetch, evaluate the memory address argument in case it has side
1144 effects. */
1145
1146 static void
1147 expand_builtin_prefetch (tree exp)
1148 {
1149 tree arg0, arg1, arg2;
1150 int nargs;
1151 rtx op0, op1, op2;
1152
1153 if (!validate_arglist (exp, POINTER_TYPE, 0))
1154 return;
1155
1156 arg0 = CALL_EXPR_ARG (exp, 0);
1157
1158 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1159 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1160 locality). */
1161 nargs = call_expr_nargs (exp);
1162 if (nargs > 1)
1163 arg1 = CALL_EXPR_ARG (exp, 1);
1164 else
1165 arg1 = integer_zero_node;
1166 if (nargs > 2)
1167 arg2 = CALL_EXPR_ARG (exp, 2);
1168 else
1169 arg2 = integer_three_node;
1170
1171 /* Argument 0 is an address. */
1172 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1173
1174 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1175 if (TREE_CODE (arg1) != INTEGER_CST)
1176 {
1177 error ("second argument to %<__builtin_prefetch%> must be a constant");
1178 arg1 = integer_zero_node;
1179 }
1180 op1 = expand_normal (arg1);
1181 /* Argument 1 must be either zero or one. */
1182 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1183 {
1184 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1185 " using zero");
1186 op1 = const0_rtx;
1187 }
1188
1189 /* Argument 2 (locality) must be a compile-time constant int. */
1190 if (TREE_CODE (arg2) != INTEGER_CST)
1191 {
1192 error ("third argument to %<__builtin_prefetch%> must be a constant");
1193 arg2 = integer_zero_node;
1194 }
1195 op2 = expand_normal (arg2);
1196 /* Argument 2 must be 0, 1, 2, or 3. */
1197 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1198 {
1199 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1200 op2 = const0_rtx;
1201 }
1202
1203 if (targetm.have_prefetch ())
1204 {
1205 struct expand_operand ops[3];
1206
1207 create_address_operand (&ops[0], op0);
1208 create_integer_operand (&ops[1], INTVAL (op1));
1209 create_integer_operand (&ops[2], INTVAL (op2));
1210 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1211 return;
1212 }
1213
1214 /* Don't do anything with direct references to volatile memory, but
1215 generate code to handle other side effects. */
1216 if (!MEM_P (op0) && side_effects_p (op0))
1217 emit_insn (op0);
1218 }
1219
1220 /* Get a MEM rtx for expression EXP which is the address of an operand
1221 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1222 the maximum length of the block of memory that might be accessed or
1223 NULL if unknown. */
1224
1225 static rtx
1226 get_memory_rtx (tree exp, tree len)
1227 {
1228 tree orig_exp = exp;
1229 rtx addr, mem;
1230
1231 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1232 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1233 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1234 exp = TREE_OPERAND (exp, 0);
1235
1236 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1237 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1238
1239 /* Get an expression we can use to find the attributes to assign to MEM.
1240 First remove any nops. */
1241 while (CONVERT_EXPR_P (exp)
1242 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1243 exp = TREE_OPERAND (exp, 0);
1244
1245 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1246 (as builtin stringops may alias with anything). */
1247 exp = fold_build2 (MEM_REF,
1248 build_array_type (char_type_node,
1249 build_range_type (sizetype,
1250 size_one_node, len)),
1251 exp, build_int_cst (ptr_type_node, 0));
1252
1253 /* If the MEM_REF has no acceptable address, try to get the base object
1254 from the original address we got, and build an all-aliasing
1255 unknown-sized access to that one. */
1256 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1257 set_mem_attributes (mem, exp, 0);
1258 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1259 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1260 0))))
1261 {
1262 exp = build_fold_addr_expr (exp);
1263 exp = fold_build2 (MEM_REF,
1264 build_array_type (char_type_node,
1265 build_range_type (sizetype,
1266 size_zero_node,
1267 NULL)),
1268 exp, build_int_cst (ptr_type_node, 0));
1269 set_mem_attributes (mem, exp, 0);
1270 }
1271 set_mem_alias_set (mem, 0);
1272 return mem;
1273 }
1274 \f
1275 /* Built-in functions to perform an untyped call and return. */
1276
1277 #define apply_args_mode \
1278 (this_target_builtins->x_apply_args_mode)
1279 #define apply_result_mode \
1280 (this_target_builtins->x_apply_result_mode)
1281
1282 /* Return the size required for the block returned by __builtin_apply_args,
1283 and initialize apply_args_mode. */
1284
1285 static int
1286 apply_args_size (void)
1287 {
1288 static int size = -1;
1289 int align;
1290 unsigned int regno;
1291 machine_mode mode;
1292
1293 /* The values computed by this function never change. */
1294 if (size < 0)
1295 {
1296 /* The first value is the incoming arg-pointer. */
1297 size = GET_MODE_SIZE (Pmode);
1298
1299 /* The second value is the structure value address unless this is
1300 passed as an "invisible" first argument. */
1301 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1302 size += GET_MODE_SIZE (Pmode);
1303
1304 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1305 if (FUNCTION_ARG_REGNO_P (regno))
1306 {
1307 mode = targetm.calls.get_raw_arg_mode (regno);
1308
1309 gcc_assert (mode != VOIDmode);
1310
1311 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1312 if (size % align != 0)
1313 size = CEIL (size, align) * align;
1314 size += GET_MODE_SIZE (mode);
1315 apply_args_mode[regno] = mode;
1316 }
1317 else
1318 {
1319 apply_args_mode[regno] = VOIDmode;
1320 }
1321 }
1322 return size;
1323 }
1324
1325 /* Return the size required for the block returned by __builtin_apply,
1326 and initialize apply_result_mode. */
1327
1328 static int
1329 apply_result_size (void)
1330 {
1331 static int size = -1;
1332 int align, regno;
1333 machine_mode mode;
1334
1335 /* The values computed by this function never change. */
1336 if (size < 0)
1337 {
1338 size = 0;
1339
1340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1341 if (targetm.calls.function_value_regno_p (regno))
1342 {
1343 mode = targetm.calls.get_raw_result_mode (regno);
1344
1345 gcc_assert (mode != VOIDmode);
1346
1347 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1348 if (size % align != 0)
1349 size = CEIL (size, align) * align;
1350 size += GET_MODE_SIZE (mode);
1351 apply_result_mode[regno] = mode;
1352 }
1353 else
1354 apply_result_mode[regno] = VOIDmode;
1355
1356 /* Allow targets that use untyped_call and untyped_return to override
1357 the size so that machine-specific information can be stored here. */
1358 #ifdef APPLY_RESULT_SIZE
1359 size = APPLY_RESULT_SIZE;
1360 #endif
1361 }
1362 return size;
1363 }
1364
1365 /* Create a vector describing the result block RESULT. If SAVEP is true,
1366 the result block is used to save the values; otherwise it is used to
1367 restore the values. */
1368
1369 static rtx
1370 result_vector (int savep, rtx result)
1371 {
1372 int regno, size, align, nelts;
1373 machine_mode mode;
1374 rtx reg, mem;
1375 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1376
1377 size = nelts = 0;
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if ((mode = apply_result_mode[regno]) != VOIDmode)
1380 {
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1385 mem = adjust_address (result, mode, size);
1386 savevec[nelts++] = (savep
1387 ? gen_rtx_SET (mem, reg)
1388 : gen_rtx_SET (reg, mem));
1389 size += GET_MODE_SIZE (mode);
1390 }
1391 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1392 }
1393
1394 /* Save the state required to perform an untyped call with the same
1395 arguments as were passed to the current function. */
1396
1397 static rtx
1398 expand_builtin_apply_args_1 (void)
1399 {
1400 rtx registers, tem;
1401 int size, align, regno;
1402 machine_mode mode;
1403 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1404
1405 /* Create a block where the arg-pointer, structure value address,
1406 and argument registers can be saved. */
1407 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1408
1409 /* Walk past the arg-pointer and structure value address. */
1410 size = GET_MODE_SIZE (Pmode);
1411 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1412 size += GET_MODE_SIZE (Pmode);
1413
1414 /* Save each register used in calling a function to the block. */
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1416 if ((mode = apply_args_mode[regno]) != VOIDmode)
1417 {
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421
1422 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1423
1424 emit_move_insn (adjust_address (registers, mode, size), tem);
1425 size += GET_MODE_SIZE (mode);
1426 }
1427
1428 /* Save the arg pointer to the block. */
1429 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1430 /* We need the pointer as the caller actually passed them to us, not
1431 as we might have pretended they were passed. Make sure it's a valid
1432 operand, as emit_move_insn isn't expected to handle a PLUS. */
1433 if (STACK_GROWS_DOWNWARD)
1434 tem
1435 = force_operand (plus_constant (Pmode, tem,
1436 crtl->args.pretend_args_size),
1437 NULL_RTX);
1438 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1439
1440 size = GET_MODE_SIZE (Pmode);
1441
1442 /* Save the structure value address unless this is passed as an
1443 "invisible" first argument. */
1444 if (struct_incoming_value)
1445 {
1446 emit_move_insn (adjust_address (registers, Pmode, size),
1447 copy_to_reg (struct_incoming_value));
1448 size += GET_MODE_SIZE (Pmode);
1449 }
1450
1451 /* Return the address of the block. */
1452 return copy_addr_to_reg (XEXP (registers, 0));
1453 }
1454
1455 /* __builtin_apply_args returns block of memory allocated on
1456 the stack into which is stored the arg pointer, structure
1457 value address, static chain, and all the registers that might
1458 possibly be used in performing a function call. The code is
1459 moved to the start of the function so the incoming values are
1460 saved. */
1461
1462 static rtx
1463 expand_builtin_apply_args (void)
1464 {
1465 /* Don't do __builtin_apply_args more than once in a function.
1466 Save the result of the first call and reuse it. */
1467 if (apply_args_value != 0)
1468 return apply_args_value;
1469 {
1470 /* When this function is called, it means that registers must be
1471 saved on entry to this function. So we migrate the
1472 call to the first insn of this function. */
1473 rtx temp;
1474
1475 start_sequence ();
1476 temp = expand_builtin_apply_args_1 ();
1477 rtx_insn *seq = get_insns ();
1478 end_sequence ();
1479
1480 apply_args_value = temp;
1481
1482 /* Put the insns after the NOTE that starts the function.
1483 If this is inside a start_sequence, make the outer-level insn
1484 chain current, so the code is placed at the start of the
1485 function. If internal_arg_pointer is a non-virtual pseudo,
1486 it needs to be placed after the function that initializes
1487 that pseudo. */
1488 push_topmost_sequence ();
1489 if (REG_P (crtl->args.internal_arg_pointer)
1490 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1491 emit_insn_before (seq, parm_birth_insn);
1492 else
1493 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1494 pop_topmost_sequence ();
1495 return temp;
1496 }
1497 }
1498
1499 /* Perform an untyped call and save the state required to perform an
1500 untyped return of whatever value was returned by the given function. */
1501
1502 static rtx
1503 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1504 {
1505 int size, align, regno;
1506 machine_mode mode;
1507 rtx incoming_args, result, reg, dest, src;
1508 rtx_call_insn *call_insn;
1509 rtx old_stack_level = 0;
1510 rtx call_fusage = 0;
1511 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1512
1513 arguments = convert_memory_address (Pmode, arguments);
1514
1515 /* Create a block where the return registers can be saved. */
1516 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1517
1518 /* Fetch the arg pointer from the ARGUMENTS block. */
1519 incoming_args = gen_reg_rtx (Pmode);
1520 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1521 if (!STACK_GROWS_DOWNWARD)
1522 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1523 incoming_args, 0, OPTAB_LIB_WIDEN);
1524
1525 /* Push a new argument block and copy the arguments. Do not allow
1526 the (potential) memcpy call below to interfere with our stack
1527 manipulations. */
1528 do_pending_stack_adjust ();
1529 NO_DEFER_POP;
1530
1531 /* Save the stack with nonlocal if available. */
1532 if (targetm.have_save_stack_nonlocal ())
1533 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1534 else
1535 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1536
1537 /* Allocate a block of memory onto the stack and copy the memory
1538 arguments to the outgoing arguments address. We can pass TRUE
1539 as the 4th argument because we just saved the stack pointer
1540 and will restore it right after the call. */
1541 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1542
1543 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1544 may have already set current_function_calls_alloca to true.
1545 current_function_calls_alloca won't be set if argsize is zero,
1546 so we have to guarantee need_drap is true here. */
1547 if (SUPPORTS_STACK_ALIGNMENT)
1548 crtl->need_drap = true;
1549
1550 dest = virtual_outgoing_args_rtx;
1551 if (!STACK_GROWS_DOWNWARD)
1552 {
1553 if (CONST_INT_P (argsize))
1554 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1555 else
1556 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1557 }
1558 dest = gen_rtx_MEM (BLKmode, dest);
1559 set_mem_align (dest, PARM_BOUNDARY);
1560 src = gen_rtx_MEM (BLKmode, incoming_args);
1561 set_mem_align (src, PARM_BOUNDARY);
1562 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1563
1564 /* Refer to the argument block. */
1565 apply_args_size ();
1566 arguments = gen_rtx_MEM (BLKmode, arguments);
1567 set_mem_align (arguments, PARM_BOUNDARY);
1568
1569 /* Walk past the arg-pointer and structure value address. */
1570 size = GET_MODE_SIZE (Pmode);
1571 if (struct_value)
1572 size += GET_MODE_SIZE (Pmode);
1573
1574 /* Restore each of the registers previously saved. Make USE insns
1575 for each of these registers for use in making the call. */
1576 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1577 if ((mode = apply_args_mode[regno]) != VOIDmode)
1578 {
1579 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1580 if (size % align != 0)
1581 size = CEIL (size, align) * align;
1582 reg = gen_rtx_REG (mode, regno);
1583 emit_move_insn (reg, adjust_address (arguments, mode, size));
1584 use_reg (&call_fusage, reg);
1585 size += GET_MODE_SIZE (mode);
1586 }
1587
1588 /* Restore the structure value address unless this is passed as an
1589 "invisible" first argument. */
1590 size = GET_MODE_SIZE (Pmode);
1591 if (struct_value)
1592 {
1593 rtx value = gen_reg_rtx (Pmode);
1594 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1595 emit_move_insn (struct_value, value);
1596 if (REG_P (struct_value))
1597 use_reg (&call_fusage, struct_value);
1598 size += GET_MODE_SIZE (Pmode);
1599 }
1600
1601 /* All arguments and registers used for the call are set up by now! */
1602 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1603
1604 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1605 and we don't want to load it into a register as an optimization,
1606 because prepare_call_address already did it if it should be done. */
1607 if (GET_CODE (function) != SYMBOL_REF)
1608 function = memory_address (FUNCTION_MODE, function);
1609
1610 /* Generate the actual call instruction and save the return value. */
1611 if (targetm.have_untyped_call ())
1612 {
1613 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1614 emit_call_insn (targetm.gen_untyped_call (mem, result,
1615 result_vector (1, result)));
1616 }
1617 else if (targetm.have_call_value ())
1618 {
1619 rtx valreg = 0;
1620
1621 /* Locate the unique return register. It is not possible to
1622 express a call that sets more than one return register using
1623 call_value; use untyped_call for that. In fact, untyped_call
1624 only needs to save the return registers in the given block. */
1625 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1626 if ((mode = apply_result_mode[regno]) != VOIDmode)
1627 {
1628 gcc_assert (!valreg); /* have_untyped_call required. */
1629
1630 valreg = gen_rtx_REG (mode, regno);
1631 }
1632
1633 emit_insn (targetm.gen_call_value (valreg,
1634 gen_rtx_MEM (FUNCTION_MODE, function),
1635 const0_rtx, NULL_RTX, const0_rtx));
1636
1637 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1638 }
1639 else
1640 gcc_unreachable ();
1641
1642 /* Find the CALL insn we just emitted, and attach the register usage
1643 information. */
1644 call_insn = last_call_insn ();
1645 add_function_usage_to (call_insn, call_fusage);
1646
1647 /* Restore the stack. */
1648 if (targetm.have_save_stack_nonlocal ())
1649 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1650 else
1651 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1652 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1653
1654 OK_DEFER_POP;
1655
1656 /* Return the address of the result block. */
1657 result = copy_addr_to_reg (XEXP (result, 0));
1658 return convert_memory_address (ptr_mode, result);
1659 }
1660
1661 /* Perform an untyped return. */
1662
1663 static void
1664 expand_builtin_return (rtx result)
1665 {
1666 int size, align, regno;
1667 machine_mode mode;
1668 rtx reg;
1669 rtx_insn *call_fusage = 0;
1670
1671 result = convert_memory_address (Pmode, result);
1672
1673 apply_result_size ();
1674 result = gen_rtx_MEM (BLKmode, result);
1675
1676 if (targetm.have_untyped_return ())
1677 {
1678 rtx vector = result_vector (0, result);
1679 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1680 emit_barrier ();
1681 return;
1682 }
1683
1684 /* Restore the return value and note that each value is used. */
1685 size = 0;
1686 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1687 if ((mode = apply_result_mode[regno]) != VOIDmode)
1688 {
1689 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1690 if (size % align != 0)
1691 size = CEIL (size, align) * align;
1692 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1693 emit_move_insn (reg, adjust_address (result, mode, size));
1694
1695 push_to_sequence (call_fusage);
1696 emit_use (reg);
1697 call_fusage = get_insns ();
1698 end_sequence ();
1699 size += GET_MODE_SIZE (mode);
1700 }
1701
1702 /* Put the USE insns before the return. */
1703 emit_insn (call_fusage);
1704
1705 /* Return whatever values was restored by jumping directly to the end
1706 of the function. */
1707 expand_naked_return ();
1708 }
1709
1710 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1711
1712 static enum type_class
1713 type_to_class (tree type)
1714 {
1715 switch (TREE_CODE (type))
1716 {
1717 case VOID_TYPE: return void_type_class;
1718 case INTEGER_TYPE: return integer_type_class;
1719 case ENUMERAL_TYPE: return enumeral_type_class;
1720 case BOOLEAN_TYPE: return boolean_type_class;
1721 case POINTER_TYPE: return pointer_type_class;
1722 case REFERENCE_TYPE: return reference_type_class;
1723 case OFFSET_TYPE: return offset_type_class;
1724 case REAL_TYPE: return real_type_class;
1725 case COMPLEX_TYPE: return complex_type_class;
1726 case FUNCTION_TYPE: return function_type_class;
1727 case METHOD_TYPE: return method_type_class;
1728 case RECORD_TYPE: return record_type_class;
1729 case UNION_TYPE:
1730 case QUAL_UNION_TYPE: return union_type_class;
1731 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1732 ? string_type_class : array_type_class);
1733 case LANG_TYPE: return lang_type_class;
1734 default: return no_type_class;
1735 }
1736 }
1737
1738 /* Expand a call EXP to __builtin_classify_type. */
1739
1740 static rtx
1741 expand_builtin_classify_type (tree exp)
1742 {
1743 if (call_expr_nargs (exp))
1744 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1745 return GEN_INT (no_type_class);
1746 }
1747
1748 /* This helper macro, meant to be used in mathfn_built_in below,
1749 determines which among a set of three builtin math functions is
1750 appropriate for a given type mode. The `F' and `L' cases are
1751 automatically generated from the `double' case. */
1752 #define CASE_MATHFN(MATHFN) \
1753 CASE_CFN_##MATHFN: \
1754 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1755 fcodel = BUILT_IN_##MATHFN##L ; break;
1756 /* Similar to above, but appends _R after any F/L suffix. */
1757 #define CASE_MATHFN_REENT(MATHFN) \
1758 case CFN_BUILT_IN_##MATHFN##_R: \
1759 case CFN_BUILT_IN_##MATHFN##F_R: \
1760 case CFN_BUILT_IN_##MATHFN##L_R: \
1761 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1762 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1763
1764 /* Return a function equivalent to FN but operating on floating-point
1765 values of type TYPE, or END_BUILTINS if no such function exists.
1766 This is purely an operation on function codes; it does not guarantee
1767 that the target actually has an implementation of the function. */
1768
1769 static built_in_function
1770 mathfn_built_in_2 (tree type, combined_fn fn)
1771 {
1772 built_in_function fcode, fcodef, fcodel;
1773
1774 switch (fn)
1775 {
1776 CASE_MATHFN (ACOS)
1777 CASE_MATHFN (ACOSH)
1778 CASE_MATHFN (ASIN)
1779 CASE_MATHFN (ASINH)
1780 CASE_MATHFN (ATAN)
1781 CASE_MATHFN (ATAN2)
1782 CASE_MATHFN (ATANH)
1783 CASE_MATHFN (CBRT)
1784 CASE_MATHFN (CEIL)
1785 CASE_MATHFN (CEXPI)
1786 CASE_MATHFN (COPYSIGN)
1787 CASE_MATHFN (COS)
1788 CASE_MATHFN (COSH)
1789 CASE_MATHFN (DREM)
1790 CASE_MATHFN (ERF)
1791 CASE_MATHFN (ERFC)
1792 CASE_MATHFN (EXP)
1793 CASE_MATHFN (EXP10)
1794 CASE_MATHFN (EXP2)
1795 CASE_MATHFN (EXPM1)
1796 CASE_MATHFN (FABS)
1797 CASE_MATHFN (FDIM)
1798 CASE_MATHFN (FLOOR)
1799 CASE_MATHFN (FMA)
1800 CASE_MATHFN (FMAX)
1801 CASE_MATHFN (FMIN)
1802 CASE_MATHFN (FMOD)
1803 CASE_MATHFN (FREXP)
1804 CASE_MATHFN (GAMMA)
1805 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1806 CASE_MATHFN (HUGE_VAL)
1807 CASE_MATHFN (HYPOT)
1808 CASE_MATHFN (ILOGB)
1809 CASE_MATHFN (ICEIL)
1810 CASE_MATHFN (IFLOOR)
1811 CASE_MATHFN (INF)
1812 CASE_MATHFN (IRINT)
1813 CASE_MATHFN (IROUND)
1814 CASE_MATHFN (ISINF)
1815 CASE_MATHFN (J0)
1816 CASE_MATHFN (J1)
1817 CASE_MATHFN (JN)
1818 CASE_MATHFN (LCEIL)
1819 CASE_MATHFN (LDEXP)
1820 CASE_MATHFN (LFLOOR)
1821 CASE_MATHFN (LGAMMA)
1822 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1823 CASE_MATHFN (LLCEIL)
1824 CASE_MATHFN (LLFLOOR)
1825 CASE_MATHFN (LLRINT)
1826 CASE_MATHFN (LLROUND)
1827 CASE_MATHFN (LOG)
1828 CASE_MATHFN (LOG10)
1829 CASE_MATHFN (LOG1P)
1830 CASE_MATHFN (LOG2)
1831 CASE_MATHFN (LOGB)
1832 CASE_MATHFN (LRINT)
1833 CASE_MATHFN (LROUND)
1834 CASE_MATHFN (MODF)
1835 CASE_MATHFN (NAN)
1836 CASE_MATHFN (NANS)
1837 CASE_MATHFN (NEARBYINT)
1838 CASE_MATHFN (NEXTAFTER)
1839 CASE_MATHFN (NEXTTOWARD)
1840 CASE_MATHFN (POW)
1841 CASE_MATHFN (POWI)
1842 CASE_MATHFN (POW10)
1843 CASE_MATHFN (REMAINDER)
1844 CASE_MATHFN (REMQUO)
1845 CASE_MATHFN (RINT)
1846 CASE_MATHFN (ROUND)
1847 CASE_MATHFN (SCALB)
1848 CASE_MATHFN (SCALBLN)
1849 CASE_MATHFN (SCALBN)
1850 CASE_MATHFN (SIGNBIT)
1851 CASE_MATHFN (SIGNIFICAND)
1852 CASE_MATHFN (SIN)
1853 CASE_MATHFN (SINCOS)
1854 CASE_MATHFN (SINH)
1855 CASE_MATHFN (SQRT)
1856 CASE_MATHFN (TAN)
1857 CASE_MATHFN (TANH)
1858 CASE_MATHFN (TGAMMA)
1859 CASE_MATHFN (TRUNC)
1860 CASE_MATHFN (Y0)
1861 CASE_MATHFN (Y1)
1862 CASE_MATHFN (YN)
1863
1864 default:
1865 return END_BUILTINS;
1866 }
1867
1868 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1869 return fcode;
1870 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1871 return fcodef;
1872 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1873 return fcodel;
1874 else
1875 return END_BUILTINS;
1876 }
1877
1878 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1879 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1880 otherwise use the explicit declaration. If we can't do the conversion,
1881 return null. */
1882
1883 static tree
1884 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1885 {
1886 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1887 if (fcode2 == END_BUILTINS)
1888 return NULL_TREE;
1889
1890 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1891 return NULL_TREE;
1892
1893 return builtin_decl_explicit (fcode2);
1894 }
1895
1896 /* Like mathfn_built_in_1, but always use the implicit array. */
1897
1898 tree
1899 mathfn_built_in (tree type, combined_fn fn)
1900 {
1901 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1902 }
1903
1904 /* Like mathfn_built_in_1, but take a built_in_function and
1905 always use the implicit array. */
1906
1907 tree
1908 mathfn_built_in (tree type, enum built_in_function fn)
1909 {
1910 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1911 }
1912
1913 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1914 return its code, otherwise return IFN_LAST. Note that this function
1915 only tests whether the function is defined in internals.def, not whether
1916 it is actually available on the target. */
1917
1918 internal_fn
1919 associated_internal_fn (tree fndecl)
1920 {
1921 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1922 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1923 switch (DECL_FUNCTION_CODE (fndecl))
1924 {
1925 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1926 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1927 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1928 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1929 #include "internal-fn.def"
1930
1931 CASE_FLT_FN (BUILT_IN_POW10):
1932 return IFN_EXP10;
1933
1934 CASE_FLT_FN (BUILT_IN_DREM):
1935 return IFN_REMAINDER;
1936
1937 CASE_FLT_FN (BUILT_IN_SCALBN):
1938 CASE_FLT_FN (BUILT_IN_SCALBLN):
1939 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1940 return IFN_LDEXP;
1941 return IFN_LAST;
1942
1943 default:
1944 return IFN_LAST;
1945 }
1946 }
1947
1948 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1949 on the current target by a call to an internal function, return the
1950 code of that internal function, otherwise return IFN_LAST. The caller
1951 is responsible for ensuring that any side-effects of the built-in
1952 call are dealt with correctly. E.g. if CALL sets errno, the caller
1953 must decide that the errno result isn't needed or make it available
1954 in some other way. */
1955
1956 internal_fn
1957 replacement_internal_fn (gcall *call)
1958 {
1959 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1960 {
1961 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1962 if (ifn != IFN_LAST)
1963 {
1964 tree_pair types = direct_internal_fn_types (ifn, call);
1965 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1966 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1967 return ifn;
1968 }
1969 }
1970 return IFN_LAST;
1971 }
1972
1973 /* Expand a call to the builtin trinary math functions (fma).
1974 Return NULL_RTX if a normal call should be emitted rather than expanding the
1975 function in-line. EXP is the expression that is a call to the builtin
1976 function; if convenient, the result should be placed in TARGET.
1977 SUBTARGET may be used as the target for computing one of EXP's
1978 operands. */
1979
1980 static rtx
1981 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1982 {
1983 optab builtin_optab;
1984 rtx op0, op1, op2, result;
1985 rtx_insn *insns;
1986 tree fndecl = get_callee_fndecl (exp);
1987 tree arg0, arg1, arg2;
1988 machine_mode mode;
1989
1990 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1991 return NULL_RTX;
1992
1993 arg0 = CALL_EXPR_ARG (exp, 0);
1994 arg1 = CALL_EXPR_ARG (exp, 1);
1995 arg2 = CALL_EXPR_ARG (exp, 2);
1996
1997 switch (DECL_FUNCTION_CODE (fndecl))
1998 {
1999 CASE_FLT_FN (BUILT_IN_FMA):
2000 builtin_optab = fma_optab; break;
2001 default:
2002 gcc_unreachable ();
2003 }
2004
2005 /* Make a suitable register to place result in. */
2006 mode = TYPE_MODE (TREE_TYPE (exp));
2007
2008 /* Before working hard, check whether the instruction is available. */
2009 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2010 return NULL_RTX;
2011
2012 result = gen_reg_rtx (mode);
2013
2014 /* Always stabilize the argument list. */
2015 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2016 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2017 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2018
2019 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2020 op1 = expand_normal (arg1);
2021 op2 = expand_normal (arg2);
2022
2023 start_sequence ();
2024
2025 /* Compute into RESULT.
2026 Set RESULT to wherever the result comes back. */
2027 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2028 result, 0);
2029
2030 /* If we were unable to expand via the builtin, stop the sequence
2031 (without outputting the insns) and call to the library function
2032 with the stabilized argument list. */
2033 if (result == 0)
2034 {
2035 end_sequence ();
2036 return expand_call (exp, target, target == const0_rtx);
2037 }
2038
2039 /* Output the entire sequence. */
2040 insns = get_insns ();
2041 end_sequence ();
2042 emit_insn (insns);
2043
2044 return result;
2045 }
2046
2047 /* Expand a call to the builtin sin and cos math functions.
2048 Return NULL_RTX if a normal call should be emitted rather than expanding the
2049 function in-line. EXP is the expression that is a call to the builtin
2050 function; if convenient, the result should be placed in TARGET.
2051 SUBTARGET may be used as the target for computing one of EXP's
2052 operands. */
2053
2054 static rtx
2055 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2056 {
2057 optab builtin_optab;
2058 rtx op0;
2059 rtx_insn *insns;
2060 tree fndecl = get_callee_fndecl (exp);
2061 machine_mode mode;
2062 tree arg;
2063
2064 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2065 return NULL_RTX;
2066
2067 arg = CALL_EXPR_ARG (exp, 0);
2068
2069 switch (DECL_FUNCTION_CODE (fndecl))
2070 {
2071 CASE_FLT_FN (BUILT_IN_SIN):
2072 CASE_FLT_FN (BUILT_IN_COS):
2073 builtin_optab = sincos_optab; break;
2074 default:
2075 gcc_unreachable ();
2076 }
2077
2078 /* Make a suitable register to place result in. */
2079 mode = TYPE_MODE (TREE_TYPE (exp));
2080
2081 /* Check if sincos insn is available, otherwise fallback
2082 to sin or cos insn. */
2083 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2084 switch (DECL_FUNCTION_CODE (fndecl))
2085 {
2086 CASE_FLT_FN (BUILT_IN_SIN):
2087 builtin_optab = sin_optab; break;
2088 CASE_FLT_FN (BUILT_IN_COS):
2089 builtin_optab = cos_optab; break;
2090 default:
2091 gcc_unreachable ();
2092 }
2093
2094 /* Before working hard, check whether the instruction is available. */
2095 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2096 {
2097 rtx result = gen_reg_rtx (mode);
2098
2099 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2100 need to expand the argument again. This way, we will not perform
2101 side-effects more the once. */
2102 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2103
2104 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2105
2106 start_sequence ();
2107
2108 /* Compute into RESULT.
2109 Set RESULT to wherever the result comes back. */
2110 if (builtin_optab == sincos_optab)
2111 {
2112 int ok;
2113
2114 switch (DECL_FUNCTION_CODE (fndecl))
2115 {
2116 CASE_FLT_FN (BUILT_IN_SIN):
2117 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2118 break;
2119 CASE_FLT_FN (BUILT_IN_COS):
2120 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2121 break;
2122 default:
2123 gcc_unreachable ();
2124 }
2125 gcc_assert (ok);
2126 }
2127 else
2128 result = expand_unop (mode, builtin_optab, op0, result, 0);
2129
2130 if (result != 0)
2131 {
2132 /* Output the entire sequence. */
2133 insns = get_insns ();
2134 end_sequence ();
2135 emit_insn (insns);
2136 return result;
2137 }
2138
2139 /* If we were unable to expand via the builtin, stop the sequence
2140 (without outputting the insns) and call to the library function
2141 with the stabilized argument list. */
2142 end_sequence ();
2143 }
2144
2145 return expand_call (exp, target, target == const0_rtx);
2146 }
2147
2148 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2149 return an RTL instruction code that implements the functionality.
2150 If that isn't possible or available return CODE_FOR_nothing. */
2151
2152 static enum insn_code
2153 interclass_mathfn_icode (tree arg, tree fndecl)
2154 {
2155 bool errno_set = false;
2156 optab builtin_optab = unknown_optab;
2157 machine_mode mode;
2158
2159 switch (DECL_FUNCTION_CODE (fndecl))
2160 {
2161 CASE_FLT_FN (BUILT_IN_ILOGB):
2162 errno_set = true; builtin_optab = ilogb_optab; break;
2163 CASE_FLT_FN (BUILT_IN_ISINF):
2164 builtin_optab = isinf_optab; break;
2165 case BUILT_IN_ISNORMAL:
2166 case BUILT_IN_ISFINITE:
2167 CASE_FLT_FN (BUILT_IN_FINITE):
2168 case BUILT_IN_FINITED32:
2169 case BUILT_IN_FINITED64:
2170 case BUILT_IN_FINITED128:
2171 case BUILT_IN_ISINFD32:
2172 case BUILT_IN_ISINFD64:
2173 case BUILT_IN_ISINFD128:
2174 /* These builtins have no optabs (yet). */
2175 break;
2176 default:
2177 gcc_unreachable ();
2178 }
2179
2180 /* There's no easy way to detect the case we need to set EDOM. */
2181 if (flag_errno_math && errno_set)
2182 return CODE_FOR_nothing;
2183
2184 /* Optab mode depends on the mode of the input argument. */
2185 mode = TYPE_MODE (TREE_TYPE (arg));
2186
2187 if (builtin_optab)
2188 return optab_handler (builtin_optab, mode);
2189 return CODE_FOR_nothing;
2190 }
2191
2192 /* Expand a call to one of the builtin math functions that operate on
2193 floating point argument and output an integer result (ilogb, isinf,
2194 isnan, etc).
2195 Return 0 if a normal call should be emitted rather than expanding the
2196 function in-line. EXP is the expression that is a call to the builtin
2197 function; if convenient, the result should be placed in TARGET. */
2198
2199 static rtx
2200 expand_builtin_interclass_mathfn (tree exp, rtx target)
2201 {
2202 enum insn_code icode = CODE_FOR_nothing;
2203 rtx op0;
2204 tree fndecl = get_callee_fndecl (exp);
2205 machine_mode mode;
2206 tree arg;
2207
2208 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2209 return NULL_RTX;
2210
2211 arg = CALL_EXPR_ARG (exp, 0);
2212 icode = interclass_mathfn_icode (arg, fndecl);
2213 mode = TYPE_MODE (TREE_TYPE (arg));
2214
2215 if (icode != CODE_FOR_nothing)
2216 {
2217 struct expand_operand ops[1];
2218 rtx_insn *last = get_last_insn ();
2219 tree orig_arg = arg;
2220
2221 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2222 need to expand the argument again. This way, we will not perform
2223 side-effects more the once. */
2224 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2225
2226 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2227
2228 if (mode != GET_MODE (op0))
2229 op0 = convert_to_mode (mode, op0, 0);
2230
2231 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2232 if (maybe_legitimize_operands (icode, 0, 1, ops)
2233 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2234 return ops[0].value;
2235
2236 delete_insns_since (last);
2237 CALL_EXPR_ARG (exp, 0) = orig_arg;
2238 }
2239
2240 return NULL_RTX;
2241 }
2242
2243 /* Expand a call to the builtin sincos math function.
2244 Return NULL_RTX if a normal call should be emitted rather than expanding the
2245 function in-line. EXP is the expression that is a call to the builtin
2246 function. */
2247
2248 static rtx
2249 expand_builtin_sincos (tree exp)
2250 {
2251 rtx op0, op1, op2, target1, target2;
2252 machine_mode mode;
2253 tree arg, sinp, cosp;
2254 int result;
2255 location_t loc = EXPR_LOCATION (exp);
2256 tree alias_type, alias_off;
2257
2258 if (!validate_arglist (exp, REAL_TYPE,
2259 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2260 return NULL_RTX;
2261
2262 arg = CALL_EXPR_ARG (exp, 0);
2263 sinp = CALL_EXPR_ARG (exp, 1);
2264 cosp = CALL_EXPR_ARG (exp, 2);
2265
2266 /* Make a suitable register to place result in. */
2267 mode = TYPE_MODE (TREE_TYPE (arg));
2268
2269 /* Check if sincos insn is available, otherwise emit the call. */
2270 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2271 return NULL_RTX;
2272
2273 target1 = gen_reg_rtx (mode);
2274 target2 = gen_reg_rtx (mode);
2275
2276 op0 = expand_normal (arg);
2277 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2278 alias_off = build_int_cst (alias_type, 0);
2279 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2280 sinp, alias_off));
2281 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2282 cosp, alias_off));
2283
2284 /* Compute into target1 and target2.
2285 Set TARGET to wherever the result comes back. */
2286 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2287 gcc_assert (result);
2288
2289 /* Move target1 and target2 to the memory locations indicated
2290 by op1 and op2. */
2291 emit_move_insn (op1, target1);
2292 emit_move_insn (op2, target2);
2293
2294 return const0_rtx;
2295 }
2296
2297 /* Expand a call to the internal cexpi builtin to the sincos math function.
2298 EXP is the expression that is a call to the builtin function; if convenient,
2299 the result should be placed in TARGET. */
2300
2301 static rtx
2302 expand_builtin_cexpi (tree exp, rtx target)
2303 {
2304 tree fndecl = get_callee_fndecl (exp);
2305 tree arg, type;
2306 machine_mode mode;
2307 rtx op0, op1, op2;
2308 location_t loc = EXPR_LOCATION (exp);
2309
2310 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2311 return NULL_RTX;
2312
2313 arg = CALL_EXPR_ARG (exp, 0);
2314 type = TREE_TYPE (arg);
2315 mode = TYPE_MODE (TREE_TYPE (arg));
2316
2317 /* Try expanding via a sincos optab, fall back to emitting a libcall
2318 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2319 is only generated from sincos, cexp or if we have either of them. */
2320 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2321 {
2322 op1 = gen_reg_rtx (mode);
2323 op2 = gen_reg_rtx (mode);
2324
2325 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2326
2327 /* Compute into op1 and op2. */
2328 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2329 }
2330 else if (targetm.libc_has_function (function_sincos))
2331 {
2332 tree call, fn = NULL_TREE;
2333 tree top1, top2;
2334 rtx op1a, op2a;
2335
2336 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2337 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2338 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2339 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2340 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2341 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2342 else
2343 gcc_unreachable ();
2344
2345 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2346 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2347 op1a = copy_addr_to_reg (XEXP (op1, 0));
2348 op2a = copy_addr_to_reg (XEXP (op2, 0));
2349 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2350 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2351
2352 /* Make sure not to fold the sincos call again. */
2353 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2354 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2355 call, 3, arg, top1, top2));
2356 }
2357 else
2358 {
2359 tree call, fn = NULL_TREE, narg;
2360 tree ctype = build_complex_type (type);
2361
2362 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2363 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2364 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2365 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2366 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2367 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2368 else
2369 gcc_unreachable ();
2370
2371 /* If we don't have a decl for cexp create one. This is the
2372 friendliest fallback if the user calls __builtin_cexpi
2373 without full target C99 function support. */
2374 if (fn == NULL_TREE)
2375 {
2376 tree fntype;
2377 const char *name = NULL;
2378
2379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2380 name = "cexpf";
2381 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2382 name = "cexp";
2383 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2384 name = "cexpl";
2385
2386 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2387 fn = build_fn_decl (name, fntype);
2388 }
2389
2390 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2391 build_real (type, dconst0), arg);
2392
2393 /* Make sure not to fold the cexp call again. */
2394 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2395 return expand_expr (build_call_nary (ctype, call, 1, narg),
2396 target, VOIDmode, EXPAND_NORMAL);
2397 }
2398
2399 /* Now build the proper return type. */
2400 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2401 make_tree (TREE_TYPE (arg), op2),
2402 make_tree (TREE_TYPE (arg), op1)),
2403 target, VOIDmode, EXPAND_NORMAL);
2404 }
2405
2406 /* Conveniently construct a function call expression. FNDECL names the
2407 function to be called, N is the number of arguments, and the "..."
2408 parameters are the argument expressions. Unlike build_call_exr
2409 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2410
2411 static tree
2412 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2413 {
2414 va_list ap;
2415 tree fntype = TREE_TYPE (fndecl);
2416 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2417
2418 va_start (ap, n);
2419 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2420 va_end (ap);
2421 SET_EXPR_LOCATION (fn, loc);
2422 return fn;
2423 }
2424
2425 /* Expand a call to one of the builtin rounding functions gcc defines
2426 as an extension (lfloor and lceil). As these are gcc extensions we
2427 do not need to worry about setting errno to EDOM.
2428 If expanding via optab fails, lower expression to (int)(floor(x)).
2429 EXP is the expression that is a call to the builtin function;
2430 if convenient, the result should be placed in TARGET. */
2431
2432 static rtx
2433 expand_builtin_int_roundingfn (tree exp, rtx target)
2434 {
2435 convert_optab builtin_optab;
2436 rtx op0, tmp;
2437 rtx_insn *insns;
2438 tree fndecl = get_callee_fndecl (exp);
2439 enum built_in_function fallback_fn;
2440 tree fallback_fndecl;
2441 machine_mode mode;
2442 tree arg;
2443
2444 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2445 gcc_unreachable ();
2446
2447 arg = CALL_EXPR_ARG (exp, 0);
2448
2449 switch (DECL_FUNCTION_CODE (fndecl))
2450 {
2451 CASE_FLT_FN (BUILT_IN_ICEIL):
2452 CASE_FLT_FN (BUILT_IN_LCEIL):
2453 CASE_FLT_FN (BUILT_IN_LLCEIL):
2454 builtin_optab = lceil_optab;
2455 fallback_fn = BUILT_IN_CEIL;
2456 break;
2457
2458 CASE_FLT_FN (BUILT_IN_IFLOOR):
2459 CASE_FLT_FN (BUILT_IN_LFLOOR):
2460 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2461 builtin_optab = lfloor_optab;
2462 fallback_fn = BUILT_IN_FLOOR;
2463 break;
2464
2465 default:
2466 gcc_unreachable ();
2467 }
2468
2469 /* Make a suitable register to place result in. */
2470 mode = TYPE_MODE (TREE_TYPE (exp));
2471
2472 target = gen_reg_rtx (mode);
2473
2474 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2475 need to expand the argument again. This way, we will not perform
2476 side-effects more the once. */
2477 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2478
2479 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2480
2481 start_sequence ();
2482
2483 /* Compute into TARGET. */
2484 if (expand_sfix_optab (target, op0, builtin_optab))
2485 {
2486 /* Output the entire sequence. */
2487 insns = get_insns ();
2488 end_sequence ();
2489 emit_insn (insns);
2490 return target;
2491 }
2492
2493 /* If we were unable to expand via the builtin, stop the sequence
2494 (without outputting the insns). */
2495 end_sequence ();
2496
2497 /* Fall back to floating point rounding optab. */
2498 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2499
2500 /* For non-C99 targets we may end up without a fallback fndecl here
2501 if the user called __builtin_lfloor directly. In this case emit
2502 a call to the floor/ceil variants nevertheless. This should result
2503 in the best user experience for not full C99 targets. */
2504 if (fallback_fndecl == NULL_TREE)
2505 {
2506 tree fntype;
2507 const char *name = NULL;
2508
2509 switch (DECL_FUNCTION_CODE (fndecl))
2510 {
2511 case BUILT_IN_ICEIL:
2512 case BUILT_IN_LCEIL:
2513 case BUILT_IN_LLCEIL:
2514 name = "ceil";
2515 break;
2516 case BUILT_IN_ICEILF:
2517 case BUILT_IN_LCEILF:
2518 case BUILT_IN_LLCEILF:
2519 name = "ceilf";
2520 break;
2521 case BUILT_IN_ICEILL:
2522 case BUILT_IN_LCEILL:
2523 case BUILT_IN_LLCEILL:
2524 name = "ceill";
2525 break;
2526 case BUILT_IN_IFLOOR:
2527 case BUILT_IN_LFLOOR:
2528 case BUILT_IN_LLFLOOR:
2529 name = "floor";
2530 break;
2531 case BUILT_IN_IFLOORF:
2532 case BUILT_IN_LFLOORF:
2533 case BUILT_IN_LLFLOORF:
2534 name = "floorf";
2535 break;
2536 case BUILT_IN_IFLOORL:
2537 case BUILT_IN_LFLOORL:
2538 case BUILT_IN_LLFLOORL:
2539 name = "floorl";
2540 break;
2541 default:
2542 gcc_unreachable ();
2543 }
2544
2545 fntype = build_function_type_list (TREE_TYPE (arg),
2546 TREE_TYPE (arg), NULL_TREE);
2547 fallback_fndecl = build_fn_decl (name, fntype);
2548 }
2549
2550 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2551
2552 tmp = expand_normal (exp);
2553 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2554
2555 /* Truncate the result of floating point optab to integer
2556 via expand_fix (). */
2557 target = gen_reg_rtx (mode);
2558 expand_fix (target, tmp, 0);
2559
2560 return target;
2561 }
2562
2563 /* Expand a call to one of the builtin math functions doing integer
2564 conversion (lrint).
2565 Return 0 if a normal call should be emitted rather than expanding the
2566 function in-line. EXP is the expression that is a call to the builtin
2567 function; if convenient, the result should be placed in TARGET. */
2568
2569 static rtx
2570 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2571 {
2572 convert_optab builtin_optab;
2573 rtx op0;
2574 rtx_insn *insns;
2575 tree fndecl = get_callee_fndecl (exp);
2576 tree arg;
2577 machine_mode mode;
2578 enum built_in_function fallback_fn = BUILT_IN_NONE;
2579
2580 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2581 gcc_unreachable ();
2582
2583 arg = CALL_EXPR_ARG (exp, 0);
2584
2585 switch (DECL_FUNCTION_CODE (fndecl))
2586 {
2587 CASE_FLT_FN (BUILT_IN_IRINT):
2588 fallback_fn = BUILT_IN_LRINT;
2589 /* FALLTHRU */
2590 CASE_FLT_FN (BUILT_IN_LRINT):
2591 CASE_FLT_FN (BUILT_IN_LLRINT):
2592 builtin_optab = lrint_optab;
2593 break;
2594
2595 CASE_FLT_FN (BUILT_IN_IROUND):
2596 fallback_fn = BUILT_IN_LROUND;
2597 /* FALLTHRU */
2598 CASE_FLT_FN (BUILT_IN_LROUND):
2599 CASE_FLT_FN (BUILT_IN_LLROUND):
2600 builtin_optab = lround_optab;
2601 break;
2602
2603 default:
2604 gcc_unreachable ();
2605 }
2606
2607 /* There's no easy way to detect the case we need to set EDOM. */
2608 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2609 return NULL_RTX;
2610
2611 /* Make a suitable register to place result in. */
2612 mode = TYPE_MODE (TREE_TYPE (exp));
2613
2614 /* There's no easy way to detect the case we need to set EDOM. */
2615 if (!flag_errno_math)
2616 {
2617 rtx result = gen_reg_rtx (mode);
2618
2619 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2620 need to expand the argument again. This way, we will not perform
2621 side-effects more the once. */
2622 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2623
2624 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2625
2626 start_sequence ();
2627
2628 if (expand_sfix_optab (result, op0, builtin_optab))
2629 {
2630 /* Output the entire sequence. */
2631 insns = get_insns ();
2632 end_sequence ();
2633 emit_insn (insns);
2634 return result;
2635 }
2636
2637 /* If we were unable to expand via the builtin, stop the sequence
2638 (without outputting the insns) and call to the library function
2639 with the stabilized argument list. */
2640 end_sequence ();
2641 }
2642
2643 if (fallback_fn != BUILT_IN_NONE)
2644 {
2645 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2646 targets, (int) round (x) should never be transformed into
2647 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2648 a call to lround in the hope that the target provides at least some
2649 C99 functions. This should result in the best user experience for
2650 not full C99 targets. */
2651 tree fallback_fndecl = mathfn_built_in_1
2652 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2653
2654 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2655 fallback_fndecl, 1, arg);
2656
2657 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2658 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2659 return convert_to_mode (mode, target, 0);
2660 }
2661
2662 return expand_call (exp, target, target == const0_rtx);
2663 }
2664
2665 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2666 a normal call should be emitted rather than expanding the function
2667 in-line. EXP is the expression that is a call to the builtin
2668 function; if convenient, the result should be placed in TARGET. */
2669
2670 static rtx
2671 expand_builtin_powi (tree exp, rtx target)
2672 {
2673 tree arg0, arg1;
2674 rtx op0, op1;
2675 machine_mode mode;
2676 machine_mode mode2;
2677
2678 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2679 return NULL_RTX;
2680
2681 arg0 = CALL_EXPR_ARG (exp, 0);
2682 arg1 = CALL_EXPR_ARG (exp, 1);
2683 mode = TYPE_MODE (TREE_TYPE (exp));
2684
2685 /* Emit a libcall to libgcc. */
2686
2687 /* Mode of the 2nd argument must match that of an int. */
2688 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2689
2690 if (target == NULL_RTX)
2691 target = gen_reg_rtx (mode);
2692
2693 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2694 if (GET_MODE (op0) != mode)
2695 op0 = convert_to_mode (mode, op0, 0);
2696 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2697 if (GET_MODE (op1) != mode2)
2698 op1 = convert_to_mode (mode2, op1, 0);
2699
2700 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2701 target, LCT_CONST, mode, 2,
2702 op0, mode, op1, mode2);
2703
2704 return target;
2705 }
2706
2707 /* Expand expression EXP which is a call to the strlen builtin. Return
2708 NULL_RTX if we failed the caller should emit a normal call, otherwise
2709 try to get the result in TARGET, if convenient. */
2710
2711 static rtx
2712 expand_builtin_strlen (tree exp, rtx target,
2713 machine_mode target_mode)
2714 {
2715 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2716 return NULL_RTX;
2717 else
2718 {
2719 struct expand_operand ops[4];
2720 rtx pat;
2721 tree len;
2722 tree src = CALL_EXPR_ARG (exp, 0);
2723 rtx src_reg;
2724 rtx_insn *before_strlen;
2725 machine_mode insn_mode = target_mode;
2726 enum insn_code icode = CODE_FOR_nothing;
2727 unsigned int align;
2728
2729 /* If the length can be computed at compile-time, return it. */
2730 len = c_strlen (src, 0);
2731 if (len)
2732 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2733
2734 /* If the length can be computed at compile-time and is constant
2735 integer, but there are side-effects in src, evaluate
2736 src for side-effects, then return len.
2737 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2738 can be optimized into: i++; x = 3; */
2739 len = c_strlen (src, 1);
2740 if (len && TREE_CODE (len) == INTEGER_CST)
2741 {
2742 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2743 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2744 }
2745
2746 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2747
2748 /* If SRC is not a pointer type, don't do this operation inline. */
2749 if (align == 0)
2750 return NULL_RTX;
2751
2752 /* Bail out if we can't compute strlen in the right mode. */
2753 while (insn_mode != VOIDmode)
2754 {
2755 icode = optab_handler (strlen_optab, insn_mode);
2756 if (icode != CODE_FOR_nothing)
2757 break;
2758
2759 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2760 }
2761 if (insn_mode == VOIDmode)
2762 return NULL_RTX;
2763
2764 /* Make a place to hold the source address. We will not expand
2765 the actual source until we are sure that the expansion will
2766 not fail -- there are trees that cannot be expanded twice. */
2767 src_reg = gen_reg_rtx (Pmode);
2768
2769 /* Mark the beginning of the strlen sequence so we can emit the
2770 source operand later. */
2771 before_strlen = get_last_insn ();
2772
2773 create_output_operand (&ops[0], target, insn_mode);
2774 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2775 create_integer_operand (&ops[2], 0);
2776 create_integer_operand (&ops[3], align);
2777 if (!maybe_expand_insn (icode, 4, ops))
2778 return NULL_RTX;
2779
2780 /* Now that we are assured of success, expand the source. */
2781 start_sequence ();
2782 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2783 if (pat != src_reg)
2784 {
2785 #ifdef POINTERS_EXTEND_UNSIGNED
2786 if (GET_MODE (pat) != Pmode)
2787 pat = convert_to_mode (Pmode, pat,
2788 POINTERS_EXTEND_UNSIGNED);
2789 #endif
2790 emit_move_insn (src_reg, pat);
2791 }
2792 pat = get_insns ();
2793 end_sequence ();
2794
2795 if (before_strlen)
2796 emit_insn_after (pat, before_strlen);
2797 else
2798 emit_insn_before (pat, get_insns ());
2799
2800 /* Return the value in the proper mode for this function. */
2801 if (GET_MODE (ops[0].value) == target_mode)
2802 target = ops[0].value;
2803 else if (target != 0)
2804 convert_move (target, ops[0].value, 0);
2805 else
2806 target = convert_to_mode (target_mode, ops[0].value, 0);
2807
2808 return target;
2809 }
2810 }
2811
2812 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2813 bytes from constant string DATA + OFFSET and return it as target
2814 constant. */
2815
2816 static rtx
2817 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2818 machine_mode mode)
2819 {
2820 const char *str = (const char *) data;
2821
2822 gcc_assert (offset >= 0
2823 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2824 <= strlen (str) + 1));
2825
2826 return c_readstr (str + offset, mode);
2827 }
2828
2829 /* LEN specify length of the block of memcpy/memset operation.
2830 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2831 In some cases we can make very likely guess on max size, then we
2832 set it into PROBABLE_MAX_SIZE. */
2833
2834 static void
2835 determine_block_size (tree len, rtx len_rtx,
2836 unsigned HOST_WIDE_INT *min_size,
2837 unsigned HOST_WIDE_INT *max_size,
2838 unsigned HOST_WIDE_INT *probable_max_size)
2839 {
2840 if (CONST_INT_P (len_rtx))
2841 {
2842 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2843 return;
2844 }
2845 else
2846 {
2847 wide_int min, max;
2848 enum value_range_type range_type = VR_UNDEFINED;
2849
2850 /* Determine bounds from the type. */
2851 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2852 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2853 else
2854 *min_size = 0;
2855 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2856 *probable_max_size = *max_size
2857 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2858 else
2859 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2860
2861 if (TREE_CODE (len) == SSA_NAME)
2862 range_type = get_range_info (len, &min, &max);
2863 if (range_type == VR_RANGE)
2864 {
2865 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2866 *min_size = min.to_uhwi ();
2867 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2868 *probable_max_size = *max_size = max.to_uhwi ();
2869 }
2870 else if (range_type == VR_ANTI_RANGE)
2871 {
2872 /* Anti range 0...N lets us to determine minimal size to N+1. */
2873 if (min == 0)
2874 {
2875 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2876 *min_size = max.to_uhwi () + 1;
2877 }
2878 /* Code like
2879
2880 int n;
2881 if (n < 100)
2882 memcpy (a, b, n)
2883
2884 Produce anti range allowing negative values of N. We still
2885 can use the information and make a guess that N is not negative.
2886 */
2887 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2888 *probable_max_size = min.to_uhwi () - 1;
2889 }
2890 }
2891 gcc_checking_assert (*max_size <=
2892 (unsigned HOST_WIDE_INT)
2893 GET_MODE_MASK (GET_MODE (len_rtx)));
2894 }
2895
2896 /* Helper function to do the actual work for expand_builtin_memcpy. */
2897
2898 static rtx
2899 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2900 {
2901 const char *src_str;
2902 unsigned int src_align = get_pointer_alignment (src);
2903 unsigned int dest_align = get_pointer_alignment (dest);
2904 rtx dest_mem, src_mem, dest_addr, len_rtx;
2905 HOST_WIDE_INT expected_size = -1;
2906 unsigned int expected_align = 0;
2907 unsigned HOST_WIDE_INT min_size;
2908 unsigned HOST_WIDE_INT max_size;
2909 unsigned HOST_WIDE_INT probable_max_size;
2910
2911 /* If DEST is not a pointer type, call the normal function. */
2912 if (dest_align == 0)
2913 return NULL_RTX;
2914
2915 /* If either SRC is not a pointer type, don't do this
2916 operation in-line. */
2917 if (src_align == 0)
2918 return NULL_RTX;
2919
2920 if (currently_expanding_gimple_stmt)
2921 stringop_block_profile (currently_expanding_gimple_stmt,
2922 &expected_align, &expected_size);
2923
2924 if (expected_align < dest_align)
2925 expected_align = dest_align;
2926 dest_mem = get_memory_rtx (dest, len);
2927 set_mem_align (dest_mem, dest_align);
2928 len_rtx = expand_normal (len);
2929 determine_block_size (len, len_rtx, &min_size, &max_size,
2930 &probable_max_size);
2931 src_str = c_getstr (src);
2932
2933 /* If SRC is a string constant and block move would be done
2934 by pieces, we can avoid loading the string from memory
2935 and only stored the computed constants. */
2936 if (src_str
2937 && CONST_INT_P (len_rtx)
2938 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2939 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2940 CONST_CAST (char *, src_str),
2941 dest_align, false))
2942 {
2943 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2944 builtin_memcpy_read_str,
2945 CONST_CAST (char *, src_str),
2946 dest_align, false, 0);
2947 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2948 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2949 return dest_mem;
2950 }
2951
2952 src_mem = get_memory_rtx (src, len);
2953 set_mem_align (src_mem, src_align);
2954
2955 /* Copy word part most expediently. */
2956 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2957 CALL_EXPR_TAILCALL (exp)
2958 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2959 expected_align, expected_size,
2960 min_size, max_size, probable_max_size);
2961
2962 if (dest_addr == 0)
2963 {
2964 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2965 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2966 }
2967
2968 return dest_addr;
2969 }
2970
2971 /* Expand a call EXP to the memcpy builtin.
2972 Return NULL_RTX if we failed, the caller should emit a normal call,
2973 otherwise try to get the result in TARGET, if convenient (and in
2974 mode MODE if that's convenient). */
2975
2976 static rtx
2977 expand_builtin_memcpy (tree exp, rtx target)
2978 {
2979 if (!validate_arglist (exp,
2980 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2981 return NULL_RTX;
2982 else
2983 {
2984 tree dest = CALL_EXPR_ARG (exp, 0);
2985 tree src = CALL_EXPR_ARG (exp, 1);
2986 tree len = CALL_EXPR_ARG (exp, 2);
2987 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2988 }
2989 }
2990
2991 /* Expand an instrumented call EXP to the memcpy builtin.
2992 Return NULL_RTX if we failed, the caller should emit a normal call,
2993 otherwise try to get the result in TARGET, if convenient (and in
2994 mode MODE if that's convenient). */
2995
2996 static rtx
2997 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
2998 {
2999 if (!validate_arglist (exp,
3000 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3001 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3002 INTEGER_TYPE, VOID_TYPE))
3003 return NULL_RTX;
3004 else
3005 {
3006 tree dest = CALL_EXPR_ARG (exp, 0);
3007 tree src = CALL_EXPR_ARG (exp, 2);
3008 tree len = CALL_EXPR_ARG (exp, 4);
3009 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3010
3011 /* Return src bounds with the result. */
3012 if (res)
3013 {
3014 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3015 expand_normal (CALL_EXPR_ARG (exp, 1)));
3016 res = chkp_join_splitted_slot (res, bnd);
3017 }
3018 return res;
3019 }
3020 }
3021
3022 /* Expand a call EXP to the mempcpy builtin.
3023 Return NULL_RTX if we failed; the caller should emit a normal call,
3024 otherwise try to get the result in TARGET, if convenient (and in
3025 mode MODE if that's convenient). If ENDP is 0 return the
3026 destination pointer, if ENDP is 1 return the end pointer ala
3027 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3028 stpcpy. */
3029
3030 static rtx
3031 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3032 {
3033 if (!validate_arglist (exp,
3034 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3035 return NULL_RTX;
3036 else
3037 {
3038 tree dest = CALL_EXPR_ARG (exp, 0);
3039 tree src = CALL_EXPR_ARG (exp, 1);
3040 tree len = CALL_EXPR_ARG (exp, 2);
3041 return expand_builtin_mempcpy_args (dest, src, len,
3042 target, mode, /*endp=*/ 1,
3043 exp);
3044 }
3045 }
3046
3047 /* Expand an instrumented call EXP to the mempcpy builtin.
3048 Return NULL_RTX if we failed, the caller should emit a normal call,
3049 otherwise try to get the result in TARGET, if convenient (and in
3050 mode MODE if that's convenient). */
3051
3052 static rtx
3053 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3054 {
3055 if (!validate_arglist (exp,
3056 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3057 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3058 INTEGER_TYPE, VOID_TYPE))
3059 return NULL_RTX;
3060 else
3061 {
3062 tree dest = CALL_EXPR_ARG (exp, 0);
3063 tree src = CALL_EXPR_ARG (exp, 2);
3064 tree len = CALL_EXPR_ARG (exp, 4);
3065 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3066 mode, 1, exp);
3067
3068 /* Return src bounds with the result. */
3069 if (res)
3070 {
3071 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3072 expand_normal (CALL_EXPR_ARG (exp, 1)));
3073 res = chkp_join_splitted_slot (res, bnd);
3074 }
3075 return res;
3076 }
3077 }
3078
3079 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3080 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3081 so that this can also be called without constructing an actual CALL_EXPR.
3082 The other arguments and return value are the same as for
3083 expand_builtin_mempcpy. */
3084
3085 static rtx
3086 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3087 rtx target, machine_mode mode, int endp,
3088 tree orig_exp)
3089 {
3090 tree fndecl = get_callee_fndecl (orig_exp);
3091
3092 /* If return value is ignored, transform mempcpy into memcpy. */
3093 if (target == const0_rtx
3094 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3095 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3096 {
3097 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3098 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3099 dest, src, len);
3100 return expand_expr (result, target, mode, EXPAND_NORMAL);
3101 }
3102 else if (target == const0_rtx
3103 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3104 {
3105 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3106 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3107 dest, src, len);
3108 return expand_expr (result, target, mode, EXPAND_NORMAL);
3109 }
3110 else
3111 {
3112 const char *src_str;
3113 unsigned int src_align = get_pointer_alignment (src);
3114 unsigned int dest_align = get_pointer_alignment (dest);
3115 rtx dest_mem, src_mem, len_rtx;
3116
3117 /* If either SRC or DEST is not a pointer type, don't do this
3118 operation in-line. */
3119 if (dest_align == 0 || src_align == 0)
3120 return NULL_RTX;
3121
3122 /* If LEN is not constant, call the normal function. */
3123 if (! tree_fits_uhwi_p (len))
3124 return NULL_RTX;
3125
3126 len_rtx = expand_normal (len);
3127 src_str = c_getstr (src);
3128
3129 /* If SRC is a string constant and block move would be done
3130 by pieces, we can avoid loading the string from memory
3131 and only stored the computed constants. */
3132 if (src_str
3133 && CONST_INT_P (len_rtx)
3134 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3135 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3136 CONST_CAST (char *, src_str),
3137 dest_align, false))
3138 {
3139 dest_mem = get_memory_rtx (dest, len);
3140 set_mem_align (dest_mem, dest_align);
3141 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3142 builtin_memcpy_read_str,
3143 CONST_CAST (char *, src_str),
3144 dest_align, false, endp);
3145 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3146 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3147 return dest_mem;
3148 }
3149
3150 if (CONST_INT_P (len_rtx)
3151 && can_move_by_pieces (INTVAL (len_rtx),
3152 MIN (dest_align, src_align)))
3153 {
3154 dest_mem = get_memory_rtx (dest, len);
3155 set_mem_align (dest_mem, dest_align);
3156 src_mem = get_memory_rtx (src, len);
3157 set_mem_align (src_mem, src_align);
3158 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3159 MIN (dest_align, src_align), endp);
3160 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3161 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3162 return dest_mem;
3163 }
3164
3165 return NULL_RTX;
3166 }
3167 }
3168
3169 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3170 we failed, the caller should emit a normal call, otherwise try to
3171 get the result in TARGET, if convenient. If ENDP is 0 return the
3172 destination pointer, if ENDP is 1 return the end pointer ala
3173 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3174 stpcpy. */
3175
3176 static rtx
3177 expand_movstr (tree dest, tree src, rtx target, int endp)
3178 {
3179 struct expand_operand ops[3];
3180 rtx dest_mem;
3181 rtx src_mem;
3182
3183 if (!targetm.have_movstr ())
3184 return NULL_RTX;
3185
3186 dest_mem = get_memory_rtx (dest, NULL);
3187 src_mem = get_memory_rtx (src, NULL);
3188 if (!endp)
3189 {
3190 target = force_reg (Pmode, XEXP (dest_mem, 0));
3191 dest_mem = replace_equiv_address (dest_mem, target);
3192 }
3193
3194 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3195 create_fixed_operand (&ops[1], dest_mem);
3196 create_fixed_operand (&ops[2], src_mem);
3197 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3198 return NULL_RTX;
3199
3200 if (endp && target != const0_rtx)
3201 {
3202 target = ops[0].value;
3203 /* movstr is supposed to set end to the address of the NUL
3204 terminator. If the caller requested a mempcpy-like return value,
3205 adjust it. */
3206 if (endp == 1)
3207 {
3208 rtx tem = plus_constant (GET_MODE (target),
3209 gen_lowpart (GET_MODE (target), target), 1);
3210 emit_move_insn (target, force_operand (tem, NULL_RTX));
3211 }
3212 }
3213 return target;
3214 }
3215
3216 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3217 NULL_RTX if we failed the caller should emit a normal call, otherwise
3218 try to get the result in TARGET, if convenient (and in mode MODE if that's
3219 convenient). */
3220
3221 static rtx
3222 expand_builtin_strcpy (tree exp, rtx target)
3223 {
3224 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3225 {
3226 tree dest = CALL_EXPR_ARG (exp, 0);
3227 tree src = CALL_EXPR_ARG (exp, 1);
3228 return expand_builtin_strcpy_args (dest, src, target);
3229 }
3230 return NULL_RTX;
3231 }
3232
3233 /* Helper function to do the actual work for expand_builtin_strcpy. The
3234 arguments to the builtin_strcpy call DEST and SRC are broken out
3235 so that this can also be called without constructing an actual CALL_EXPR.
3236 The other arguments and return value are the same as for
3237 expand_builtin_strcpy. */
3238
3239 static rtx
3240 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3241 {
3242 return expand_movstr (dest, src, target, /*endp=*/0);
3243 }
3244
3245 /* Expand a call EXP to the stpcpy builtin.
3246 Return NULL_RTX if we failed the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3249
3250 static rtx
3251 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3252 {
3253 tree dst, src;
3254 location_t loc = EXPR_LOCATION (exp);
3255
3256 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3257 return NULL_RTX;
3258
3259 dst = CALL_EXPR_ARG (exp, 0);
3260 src = CALL_EXPR_ARG (exp, 1);
3261
3262 /* If return value is ignored, transform stpcpy into strcpy. */
3263 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3264 {
3265 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3266 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3267 return expand_expr (result, target, mode, EXPAND_NORMAL);
3268 }
3269 else
3270 {
3271 tree len, lenp1;
3272 rtx ret;
3273
3274 /* Ensure we get an actual string whose length can be evaluated at
3275 compile-time, not an expression containing a string. This is
3276 because the latter will potentially produce pessimized code
3277 when used to produce the return value. */
3278 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3279 return expand_movstr (dst, src, target, /*endp=*/2);
3280
3281 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3282 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3283 target, mode, /*endp=*/2,
3284 exp);
3285
3286 if (ret)
3287 return ret;
3288
3289 if (TREE_CODE (len) == INTEGER_CST)
3290 {
3291 rtx len_rtx = expand_normal (len);
3292
3293 if (CONST_INT_P (len_rtx))
3294 {
3295 ret = expand_builtin_strcpy_args (dst, src, target);
3296
3297 if (ret)
3298 {
3299 if (! target)
3300 {
3301 if (mode != VOIDmode)
3302 target = gen_reg_rtx (mode);
3303 else
3304 target = gen_reg_rtx (GET_MODE (ret));
3305 }
3306 if (GET_MODE (target) != GET_MODE (ret))
3307 ret = gen_lowpart (GET_MODE (target), ret);
3308
3309 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3310 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3311 gcc_assert (ret);
3312
3313 return target;
3314 }
3315 }
3316 }
3317
3318 return expand_movstr (dst, src, target, /*endp=*/2);
3319 }
3320 }
3321
3322 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3323 bytes from constant string DATA + OFFSET and return it as target
3324 constant. */
3325
3326 rtx
3327 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3328 machine_mode mode)
3329 {
3330 const char *str = (const char *) data;
3331
3332 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3333 return const0_rtx;
3334
3335 return c_readstr (str + offset, mode);
3336 }
3337
3338 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3339 NULL_RTX if we failed the caller should emit a normal call. */
3340
3341 static rtx
3342 expand_builtin_strncpy (tree exp, rtx target)
3343 {
3344 location_t loc = EXPR_LOCATION (exp);
3345
3346 if (validate_arglist (exp,
3347 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3348 {
3349 tree dest = CALL_EXPR_ARG (exp, 0);
3350 tree src = CALL_EXPR_ARG (exp, 1);
3351 tree len = CALL_EXPR_ARG (exp, 2);
3352 tree slen = c_strlen (src, 1);
3353
3354 /* We must be passed a constant len and src parameter. */
3355 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3356 return NULL_RTX;
3357
3358 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3359
3360 /* We're required to pad with trailing zeros if the requested
3361 len is greater than strlen(s2)+1. In that case try to
3362 use store_by_pieces, if it fails, punt. */
3363 if (tree_int_cst_lt (slen, len))
3364 {
3365 unsigned int dest_align = get_pointer_alignment (dest);
3366 const char *p = c_getstr (src);
3367 rtx dest_mem;
3368
3369 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3370 || !can_store_by_pieces (tree_to_uhwi (len),
3371 builtin_strncpy_read_str,
3372 CONST_CAST (char *, p),
3373 dest_align, false))
3374 return NULL_RTX;
3375
3376 dest_mem = get_memory_rtx (dest, len);
3377 store_by_pieces (dest_mem, tree_to_uhwi (len),
3378 builtin_strncpy_read_str,
3379 CONST_CAST (char *, p), dest_align, false, 0);
3380 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3381 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3382 return dest_mem;
3383 }
3384 }
3385 return NULL_RTX;
3386 }
3387
3388 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3389 bytes from constant string DATA + OFFSET and return it as target
3390 constant. */
3391
3392 rtx
3393 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3394 machine_mode mode)
3395 {
3396 const char *c = (const char *) data;
3397 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3398
3399 memset (p, *c, GET_MODE_SIZE (mode));
3400
3401 return c_readstr (p, mode);
3402 }
3403
3404 /* Callback routine for store_by_pieces. Return the RTL of a register
3405 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3406 char value given in the RTL register data. For example, if mode is
3407 4 bytes wide, return the RTL for 0x01010101*data. */
3408
3409 static rtx
3410 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3411 machine_mode mode)
3412 {
3413 rtx target, coeff;
3414 size_t size;
3415 char *p;
3416
3417 size = GET_MODE_SIZE (mode);
3418 if (size == 1)
3419 return (rtx) data;
3420
3421 p = XALLOCAVEC (char, size);
3422 memset (p, 1, size);
3423 coeff = c_readstr (p, mode);
3424
3425 target = convert_to_mode (mode, (rtx) data, 1);
3426 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3427 return force_reg (mode, target);
3428 }
3429
3430 /* Expand expression EXP, which is a call to the memset builtin. Return
3431 NULL_RTX if we failed the caller should emit a normal call, otherwise
3432 try to get the result in TARGET, if convenient (and in mode MODE if that's
3433 convenient). */
3434
3435 static rtx
3436 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3437 {
3438 if (!validate_arglist (exp,
3439 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3440 return NULL_RTX;
3441 else
3442 {
3443 tree dest = CALL_EXPR_ARG (exp, 0);
3444 tree val = CALL_EXPR_ARG (exp, 1);
3445 tree len = CALL_EXPR_ARG (exp, 2);
3446 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3447 }
3448 }
3449
3450 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3451 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3452 try to get the result in TARGET, if convenient (and in mode MODE if that's
3453 convenient). */
3454
3455 static rtx
3456 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3457 {
3458 if (!validate_arglist (exp,
3459 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3460 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3461 return NULL_RTX;
3462 else
3463 {
3464 tree dest = CALL_EXPR_ARG (exp, 0);
3465 tree val = CALL_EXPR_ARG (exp, 2);
3466 tree len = CALL_EXPR_ARG (exp, 3);
3467 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3468
3469 /* Return src bounds with the result. */
3470 if (res)
3471 {
3472 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3473 expand_normal (CALL_EXPR_ARG (exp, 1)));
3474 res = chkp_join_splitted_slot (res, bnd);
3475 }
3476 return res;
3477 }
3478 }
3479
3480 /* Helper function to do the actual work for expand_builtin_memset. The
3481 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3482 so that this can also be called without constructing an actual CALL_EXPR.
3483 The other arguments and return value are the same as for
3484 expand_builtin_memset. */
3485
3486 static rtx
3487 expand_builtin_memset_args (tree dest, tree val, tree len,
3488 rtx target, machine_mode mode, tree orig_exp)
3489 {
3490 tree fndecl, fn;
3491 enum built_in_function fcode;
3492 machine_mode val_mode;
3493 char c;
3494 unsigned int dest_align;
3495 rtx dest_mem, dest_addr, len_rtx;
3496 HOST_WIDE_INT expected_size = -1;
3497 unsigned int expected_align = 0;
3498 unsigned HOST_WIDE_INT min_size;
3499 unsigned HOST_WIDE_INT max_size;
3500 unsigned HOST_WIDE_INT probable_max_size;
3501
3502 dest_align = get_pointer_alignment (dest);
3503
3504 /* If DEST is not a pointer type, don't do this operation in-line. */
3505 if (dest_align == 0)
3506 return NULL_RTX;
3507
3508 if (currently_expanding_gimple_stmt)
3509 stringop_block_profile (currently_expanding_gimple_stmt,
3510 &expected_align, &expected_size);
3511
3512 if (expected_align < dest_align)
3513 expected_align = dest_align;
3514
3515 /* If the LEN parameter is zero, return DEST. */
3516 if (integer_zerop (len))
3517 {
3518 /* Evaluate and ignore VAL in case it has side-effects. */
3519 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3520 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3521 }
3522
3523 /* Stabilize the arguments in case we fail. */
3524 dest = builtin_save_expr (dest);
3525 val = builtin_save_expr (val);
3526 len = builtin_save_expr (len);
3527
3528 len_rtx = expand_normal (len);
3529 determine_block_size (len, len_rtx, &min_size, &max_size,
3530 &probable_max_size);
3531 dest_mem = get_memory_rtx (dest, len);
3532 val_mode = TYPE_MODE (unsigned_char_type_node);
3533
3534 if (TREE_CODE (val) != INTEGER_CST)
3535 {
3536 rtx val_rtx;
3537
3538 val_rtx = expand_normal (val);
3539 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3540
3541 /* Assume that we can memset by pieces if we can store
3542 * the coefficients by pieces (in the required modes).
3543 * We can't pass builtin_memset_gen_str as that emits RTL. */
3544 c = 1;
3545 if (tree_fits_uhwi_p (len)
3546 && can_store_by_pieces (tree_to_uhwi (len),
3547 builtin_memset_read_str, &c, dest_align,
3548 true))
3549 {
3550 val_rtx = force_reg (val_mode, val_rtx);
3551 store_by_pieces (dest_mem, tree_to_uhwi (len),
3552 builtin_memset_gen_str, val_rtx, dest_align,
3553 true, 0);
3554 }
3555 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3556 dest_align, expected_align,
3557 expected_size, min_size, max_size,
3558 probable_max_size))
3559 goto do_libcall;
3560
3561 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3562 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3563 return dest_mem;
3564 }
3565
3566 if (target_char_cast (val, &c))
3567 goto do_libcall;
3568
3569 if (c)
3570 {
3571 if (tree_fits_uhwi_p (len)
3572 && can_store_by_pieces (tree_to_uhwi (len),
3573 builtin_memset_read_str, &c, dest_align,
3574 true))
3575 store_by_pieces (dest_mem, tree_to_uhwi (len),
3576 builtin_memset_read_str, &c, dest_align, true, 0);
3577 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3578 gen_int_mode (c, val_mode),
3579 dest_align, expected_align,
3580 expected_size, min_size, max_size,
3581 probable_max_size))
3582 goto do_libcall;
3583
3584 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3585 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3586 return dest_mem;
3587 }
3588
3589 set_mem_align (dest_mem, dest_align);
3590 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3591 CALL_EXPR_TAILCALL (orig_exp)
3592 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3593 expected_align, expected_size,
3594 min_size, max_size,
3595 probable_max_size);
3596
3597 if (dest_addr == 0)
3598 {
3599 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3600 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3601 }
3602
3603 return dest_addr;
3604
3605 do_libcall:
3606 fndecl = get_callee_fndecl (orig_exp);
3607 fcode = DECL_FUNCTION_CODE (fndecl);
3608 if (fcode == BUILT_IN_MEMSET
3609 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3610 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3611 dest, val, len);
3612 else if (fcode == BUILT_IN_BZERO)
3613 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3614 dest, len);
3615 else
3616 gcc_unreachable ();
3617 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3618 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3619 return expand_call (fn, target, target == const0_rtx);
3620 }
3621
3622 /* Expand expression EXP, which is a call to the bzero builtin. Return
3623 NULL_RTX if we failed the caller should emit a normal call. */
3624
3625 static rtx
3626 expand_builtin_bzero (tree exp)
3627 {
3628 tree dest, size;
3629 location_t loc = EXPR_LOCATION (exp);
3630
3631 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3632 return NULL_RTX;
3633
3634 dest = CALL_EXPR_ARG (exp, 0);
3635 size = CALL_EXPR_ARG (exp, 1);
3636
3637 /* New argument list transforming bzero(ptr x, int y) to
3638 memset(ptr x, int 0, size_t y). This is done this way
3639 so that if it isn't expanded inline, we fallback to
3640 calling bzero instead of memset. */
3641
3642 return expand_builtin_memset_args (dest, integer_zero_node,
3643 fold_convert_loc (loc,
3644 size_type_node, size),
3645 const0_rtx, VOIDmode, exp);
3646 }
3647
3648 /* Try to expand cmpstr operation ICODE with the given operands.
3649 Return the result rtx on success, otherwise return null. */
3650
3651 static rtx
3652 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3653 HOST_WIDE_INT align)
3654 {
3655 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3656
3657 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3658 target = NULL_RTX;
3659
3660 struct expand_operand ops[4];
3661 create_output_operand (&ops[0], target, insn_mode);
3662 create_fixed_operand (&ops[1], arg1_rtx);
3663 create_fixed_operand (&ops[2], arg2_rtx);
3664 create_integer_operand (&ops[3], align);
3665 if (maybe_expand_insn (icode, 4, ops))
3666 return ops[0].value;
3667 return NULL_RTX;
3668 }
3669
3670 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3671 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3672 otherwise return null. */
3673
3674 static rtx
3675 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3676 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3677 HOST_WIDE_INT align)
3678 {
3679 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3680
3681 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3682 target = NULL_RTX;
3683
3684 struct expand_operand ops[5];
3685 create_output_operand (&ops[0], target, insn_mode);
3686 create_fixed_operand (&ops[1], arg1_rtx);
3687 create_fixed_operand (&ops[2], arg2_rtx);
3688 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3689 TYPE_UNSIGNED (arg3_type));
3690 create_integer_operand (&ops[4], align);
3691 if (maybe_expand_insn (icode, 5, ops))
3692 return ops[0].value;
3693 return NULL_RTX;
3694 }
3695
3696 /* Expand expression EXP, which is a call to the memcmp built-in function.
3697 Return NULL_RTX if we failed and the caller should emit a normal call,
3698 otherwise try to get the result in TARGET, if convenient. */
3699
3700 static rtx
3701 expand_builtin_memcmp (tree exp, rtx target)
3702 {
3703 if (!validate_arglist (exp,
3704 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3705 return NULL_RTX;
3706
3707 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3708 implementing memcmp because it will stop if it encounters two
3709 zero bytes. */
3710 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3711 if (icode == CODE_FOR_nothing)
3712 return NULL_RTX;
3713
3714 tree arg1 = CALL_EXPR_ARG (exp, 0);
3715 tree arg2 = CALL_EXPR_ARG (exp, 1);
3716 tree len = CALL_EXPR_ARG (exp, 2);
3717
3718 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3719 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3720
3721 /* If we don't have POINTER_TYPE, call the function. */
3722 if (arg1_align == 0 || arg2_align == 0)
3723 return NULL_RTX;
3724
3725 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3726 location_t loc = EXPR_LOCATION (exp);
3727 rtx arg1_rtx = get_memory_rtx (arg1, len);
3728 rtx arg2_rtx = get_memory_rtx (arg2, len);
3729 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3730
3731 /* Set MEM_SIZE as appropriate. */
3732 if (CONST_INT_P (arg3_rtx))
3733 {
3734 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3735 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3736 }
3737
3738 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3739 TREE_TYPE (len), arg3_rtx,
3740 MIN (arg1_align, arg2_align));
3741 if (result)
3742 {
3743 /* Return the value in the proper mode for this function. */
3744 if (GET_MODE (result) == mode)
3745 return result;
3746
3747 if (target != 0)
3748 {
3749 convert_move (target, result, 0);
3750 return target;
3751 }
3752
3753 return convert_to_mode (mode, result, 0);
3754 }
3755
3756 result = target;
3757 if (! (result != 0
3758 && REG_P (result) && GET_MODE (result) == mode
3759 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3760 result = gen_reg_rtx (mode);
3761
3762 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3763 TYPE_MODE (integer_type_node), 3,
3764 XEXP (arg1_rtx, 0), Pmode,
3765 XEXP (arg2_rtx, 0), Pmode,
3766 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3767 TYPE_UNSIGNED (sizetype)),
3768 TYPE_MODE (sizetype));
3769 return result;
3770 }
3771
3772 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3773 if we failed the caller should emit a normal call, otherwise try to get
3774 the result in TARGET, if convenient. */
3775
3776 static rtx
3777 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3778 {
3779 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3780 return NULL_RTX;
3781
3782 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3783 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3784 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3785 {
3786 rtx arg1_rtx, arg2_rtx;
3787 tree fndecl, fn;
3788 tree arg1 = CALL_EXPR_ARG (exp, 0);
3789 tree arg2 = CALL_EXPR_ARG (exp, 1);
3790 rtx result = NULL_RTX;
3791
3792 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3793 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3794
3795 /* If we don't have POINTER_TYPE, call the function. */
3796 if (arg1_align == 0 || arg2_align == 0)
3797 return NULL_RTX;
3798
3799 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3800 arg1 = builtin_save_expr (arg1);
3801 arg2 = builtin_save_expr (arg2);
3802
3803 arg1_rtx = get_memory_rtx (arg1, NULL);
3804 arg2_rtx = get_memory_rtx (arg2, NULL);
3805
3806 /* Try to call cmpstrsi. */
3807 if (cmpstr_icode != CODE_FOR_nothing)
3808 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3809 MIN (arg1_align, arg2_align));
3810
3811 /* Try to determine at least one length and call cmpstrnsi. */
3812 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3813 {
3814 tree len;
3815 rtx arg3_rtx;
3816
3817 tree len1 = c_strlen (arg1, 1);
3818 tree len2 = c_strlen (arg2, 1);
3819
3820 if (len1)
3821 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3822 if (len2)
3823 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3824
3825 /* If we don't have a constant length for the first, use the length
3826 of the second, if we know it. We don't require a constant for
3827 this case; some cost analysis could be done if both are available
3828 but neither is constant. For now, assume they're equally cheap,
3829 unless one has side effects. If both strings have constant lengths,
3830 use the smaller. */
3831
3832 if (!len1)
3833 len = len2;
3834 else if (!len2)
3835 len = len1;
3836 else if (TREE_SIDE_EFFECTS (len1))
3837 len = len2;
3838 else if (TREE_SIDE_EFFECTS (len2))
3839 len = len1;
3840 else if (TREE_CODE (len1) != INTEGER_CST)
3841 len = len2;
3842 else if (TREE_CODE (len2) != INTEGER_CST)
3843 len = len1;
3844 else if (tree_int_cst_lt (len1, len2))
3845 len = len1;
3846 else
3847 len = len2;
3848
3849 /* If both arguments have side effects, we cannot optimize. */
3850 if (len && !TREE_SIDE_EFFECTS (len))
3851 {
3852 arg3_rtx = expand_normal (len);
3853 result = expand_cmpstrn_or_cmpmem
3854 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3855 arg3_rtx, MIN (arg1_align, arg2_align));
3856 }
3857 }
3858
3859 if (result)
3860 {
3861 /* Return the value in the proper mode for this function. */
3862 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3863 if (GET_MODE (result) == mode)
3864 return result;
3865 if (target == 0)
3866 return convert_to_mode (mode, result, 0);
3867 convert_move (target, result, 0);
3868 return target;
3869 }
3870
3871 /* Expand the library call ourselves using a stabilized argument
3872 list to avoid re-evaluating the function's arguments twice. */
3873 fndecl = get_callee_fndecl (exp);
3874 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3875 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3876 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3877 return expand_call (fn, target, target == const0_rtx);
3878 }
3879 return NULL_RTX;
3880 }
3881
3882 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3883 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3884 the result in TARGET, if convenient. */
3885
3886 static rtx
3887 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3888 ATTRIBUTE_UNUSED machine_mode mode)
3889 {
3890 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3891
3892 if (!validate_arglist (exp,
3893 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3894 return NULL_RTX;
3895
3896 /* If c_strlen can determine an expression for one of the string
3897 lengths, and it doesn't have side effects, then emit cmpstrnsi
3898 using length MIN(strlen(string)+1, arg3). */
3899 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3900 if (cmpstrn_icode != CODE_FOR_nothing)
3901 {
3902 tree len, len1, len2;
3903 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3904 rtx result;
3905 tree fndecl, fn;
3906 tree arg1 = CALL_EXPR_ARG (exp, 0);
3907 tree arg2 = CALL_EXPR_ARG (exp, 1);
3908 tree arg3 = CALL_EXPR_ARG (exp, 2);
3909
3910 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3911 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3912
3913 len1 = c_strlen (arg1, 1);
3914 len2 = c_strlen (arg2, 1);
3915
3916 if (len1)
3917 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3918 if (len2)
3919 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3920
3921 /* If we don't have a constant length for the first, use the length
3922 of the second, if we know it. We don't require a constant for
3923 this case; some cost analysis could be done if both are available
3924 but neither is constant. For now, assume they're equally cheap,
3925 unless one has side effects. If both strings have constant lengths,
3926 use the smaller. */
3927
3928 if (!len1)
3929 len = len2;
3930 else if (!len2)
3931 len = len1;
3932 else if (TREE_SIDE_EFFECTS (len1))
3933 len = len2;
3934 else if (TREE_SIDE_EFFECTS (len2))
3935 len = len1;
3936 else if (TREE_CODE (len1) != INTEGER_CST)
3937 len = len2;
3938 else if (TREE_CODE (len2) != INTEGER_CST)
3939 len = len1;
3940 else if (tree_int_cst_lt (len1, len2))
3941 len = len1;
3942 else
3943 len = len2;
3944
3945 /* If both arguments have side effects, we cannot optimize. */
3946 if (!len || TREE_SIDE_EFFECTS (len))
3947 return NULL_RTX;
3948
3949 /* The actual new length parameter is MIN(len,arg3). */
3950 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3951 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3952
3953 /* If we don't have POINTER_TYPE, call the function. */
3954 if (arg1_align == 0 || arg2_align == 0)
3955 return NULL_RTX;
3956
3957 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3958 arg1 = builtin_save_expr (arg1);
3959 arg2 = builtin_save_expr (arg2);
3960 len = builtin_save_expr (len);
3961
3962 arg1_rtx = get_memory_rtx (arg1, len);
3963 arg2_rtx = get_memory_rtx (arg2, len);
3964 arg3_rtx = expand_normal (len);
3965 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3966 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3967 MIN (arg1_align, arg2_align));
3968 if (result)
3969 {
3970 /* Return the value in the proper mode for this function. */
3971 mode = TYPE_MODE (TREE_TYPE (exp));
3972 if (GET_MODE (result) == mode)
3973 return result;
3974 if (target == 0)
3975 return convert_to_mode (mode, result, 0);
3976 convert_move (target, result, 0);
3977 return target;
3978 }
3979
3980 /* Expand the library call ourselves using a stabilized argument
3981 list to avoid re-evaluating the function's arguments twice. */
3982 fndecl = get_callee_fndecl (exp);
3983 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3984 arg1, arg2, len);
3985 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3986 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3987 return expand_call (fn, target, target == const0_rtx);
3988 }
3989 return NULL_RTX;
3990 }
3991
3992 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3993 if that's convenient. */
3994
3995 rtx
3996 expand_builtin_saveregs (void)
3997 {
3998 rtx val;
3999 rtx_insn *seq;
4000
4001 /* Don't do __builtin_saveregs more than once in a function.
4002 Save the result of the first call and reuse it. */
4003 if (saveregs_value != 0)
4004 return saveregs_value;
4005
4006 /* When this function is called, it means that registers must be
4007 saved on entry to this function. So we migrate the call to the
4008 first insn of this function. */
4009
4010 start_sequence ();
4011
4012 /* Do whatever the machine needs done in this case. */
4013 val = targetm.calls.expand_builtin_saveregs ();
4014
4015 seq = get_insns ();
4016 end_sequence ();
4017
4018 saveregs_value = val;
4019
4020 /* Put the insns after the NOTE that starts the function. If this
4021 is inside a start_sequence, make the outer-level insn chain current, so
4022 the code is placed at the start of the function. */
4023 push_topmost_sequence ();
4024 emit_insn_after (seq, entry_of_function ());
4025 pop_topmost_sequence ();
4026
4027 return val;
4028 }
4029
4030 /* Expand a call to __builtin_next_arg. */
4031
4032 static rtx
4033 expand_builtin_next_arg (void)
4034 {
4035 /* Checking arguments is already done in fold_builtin_next_arg
4036 that must be called before this function. */
4037 return expand_binop (ptr_mode, add_optab,
4038 crtl->args.internal_arg_pointer,
4039 crtl->args.arg_offset_rtx,
4040 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4041 }
4042
4043 /* Make it easier for the backends by protecting the valist argument
4044 from multiple evaluations. */
4045
4046 static tree
4047 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4048 {
4049 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4050
4051 /* The current way of determining the type of valist is completely
4052 bogus. We should have the information on the va builtin instead. */
4053 if (!vatype)
4054 vatype = targetm.fn_abi_va_list (cfun->decl);
4055
4056 if (TREE_CODE (vatype) == ARRAY_TYPE)
4057 {
4058 if (TREE_SIDE_EFFECTS (valist))
4059 valist = save_expr (valist);
4060
4061 /* For this case, the backends will be expecting a pointer to
4062 vatype, but it's possible we've actually been given an array
4063 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4064 So fix it. */
4065 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4066 {
4067 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4068 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4069 }
4070 }
4071 else
4072 {
4073 tree pt = build_pointer_type (vatype);
4074
4075 if (! needs_lvalue)
4076 {
4077 if (! TREE_SIDE_EFFECTS (valist))
4078 return valist;
4079
4080 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4081 TREE_SIDE_EFFECTS (valist) = 1;
4082 }
4083
4084 if (TREE_SIDE_EFFECTS (valist))
4085 valist = save_expr (valist);
4086 valist = fold_build2_loc (loc, MEM_REF,
4087 vatype, valist, build_int_cst (pt, 0));
4088 }
4089
4090 return valist;
4091 }
4092
4093 /* The "standard" definition of va_list is void*. */
4094
4095 tree
4096 std_build_builtin_va_list (void)
4097 {
4098 return ptr_type_node;
4099 }
4100
4101 /* The "standard" abi va_list is va_list_type_node. */
4102
4103 tree
4104 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4105 {
4106 return va_list_type_node;
4107 }
4108
4109 /* The "standard" type of va_list is va_list_type_node. */
4110
4111 tree
4112 std_canonical_va_list_type (tree type)
4113 {
4114 tree wtype, htype;
4115
4116 if (INDIRECT_REF_P (type))
4117 type = TREE_TYPE (type);
4118 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4119 type = TREE_TYPE (type);
4120 wtype = va_list_type_node;
4121 htype = type;
4122 /* Treat structure va_list types. */
4123 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4124 htype = TREE_TYPE (htype);
4125 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4126 {
4127 /* If va_list is an array type, the argument may have decayed
4128 to a pointer type, e.g. by being passed to another function.
4129 In that case, unwrap both types so that we can compare the
4130 underlying records. */
4131 if (TREE_CODE (htype) == ARRAY_TYPE
4132 || POINTER_TYPE_P (htype))
4133 {
4134 wtype = TREE_TYPE (wtype);
4135 htype = TREE_TYPE (htype);
4136 }
4137 }
4138 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4139 return va_list_type_node;
4140
4141 return NULL_TREE;
4142 }
4143
4144 /* The "standard" implementation of va_start: just assign `nextarg' to
4145 the variable. */
4146
4147 void
4148 std_expand_builtin_va_start (tree valist, rtx nextarg)
4149 {
4150 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4151 convert_move (va_r, nextarg, 0);
4152
4153 /* We do not have any valid bounds for the pointer, so
4154 just store zero bounds for it. */
4155 if (chkp_function_instrumented_p (current_function_decl))
4156 chkp_expand_bounds_reset_for_mem (valist,
4157 make_tree (TREE_TYPE (valist),
4158 nextarg));
4159 }
4160
4161 /* Expand EXP, a call to __builtin_va_start. */
4162
4163 static rtx
4164 expand_builtin_va_start (tree exp)
4165 {
4166 rtx nextarg;
4167 tree valist;
4168 location_t loc = EXPR_LOCATION (exp);
4169
4170 if (call_expr_nargs (exp) < 2)
4171 {
4172 error_at (loc, "too few arguments to function %<va_start%>");
4173 return const0_rtx;
4174 }
4175
4176 if (fold_builtin_next_arg (exp, true))
4177 return const0_rtx;
4178
4179 nextarg = expand_builtin_next_arg ();
4180 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4181
4182 if (targetm.expand_builtin_va_start)
4183 targetm.expand_builtin_va_start (valist, nextarg);
4184 else
4185 std_expand_builtin_va_start (valist, nextarg);
4186
4187 return const0_rtx;
4188 }
4189
4190 /* Expand EXP, a call to __builtin_va_end. */
4191
4192 static rtx
4193 expand_builtin_va_end (tree exp)
4194 {
4195 tree valist = CALL_EXPR_ARG (exp, 0);
4196
4197 /* Evaluate for side effects, if needed. I hate macros that don't
4198 do that. */
4199 if (TREE_SIDE_EFFECTS (valist))
4200 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4201
4202 return const0_rtx;
4203 }
4204
4205 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4206 builtin rather than just as an assignment in stdarg.h because of the
4207 nastiness of array-type va_list types. */
4208
4209 static rtx
4210 expand_builtin_va_copy (tree exp)
4211 {
4212 tree dst, src, t;
4213 location_t loc = EXPR_LOCATION (exp);
4214
4215 dst = CALL_EXPR_ARG (exp, 0);
4216 src = CALL_EXPR_ARG (exp, 1);
4217
4218 dst = stabilize_va_list_loc (loc, dst, 1);
4219 src = stabilize_va_list_loc (loc, src, 0);
4220
4221 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4222
4223 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4224 {
4225 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4226 TREE_SIDE_EFFECTS (t) = 1;
4227 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4228 }
4229 else
4230 {
4231 rtx dstb, srcb, size;
4232
4233 /* Evaluate to pointers. */
4234 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4235 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4236 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4237 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4238
4239 dstb = convert_memory_address (Pmode, dstb);
4240 srcb = convert_memory_address (Pmode, srcb);
4241
4242 /* "Dereference" to BLKmode memories. */
4243 dstb = gen_rtx_MEM (BLKmode, dstb);
4244 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4245 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4246 srcb = gen_rtx_MEM (BLKmode, srcb);
4247 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4248 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4249
4250 /* Copy. */
4251 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4252 }
4253
4254 return const0_rtx;
4255 }
4256
4257 /* Expand a call to one of the builtin functions __builtin_frame_address or
4258 __builtin_return_address. */
4259
4260 static rtx
4261 expand_builtin_frame_address (tree fndecl, tree exp)
4262 {
4263 /* The argument must be a nonnegative integer constant.
4264 It counts the number of frames to scan up the stack.
4265 The value is either the frame pointer value or the return
4266 address saved in that frame. */
4267 if (call_expr_nargs (exp) == 0)
4268 /* Warning about missing arg was already issued. */
4269 return const0_rtx;
4270 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4271 {
4272 error ("invalid argument to %qD", fndecl);
4273 return const0_rtx;
4274 }
4275 else
4276 {
4277 /* Number of frames to scan up the stack. */
4278 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4279
4280 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4281
4282 /* Some ports cannot access arbitrary stack frames. */
4283 if (tem == NULL)
4284 {
4285 warning (0, "unsupported argument to %qD", fndecl);
4286 return const0_rtx;
4287 }
4288
4289 if (count)
4290 {
4291 /* Warn since no effort is made to ensure that any frame
4292 beyond the current one exists or can be safely reached. */
4293 warning (OPT_Wframe_address, "calling %qD with "
4294 "a nonzero argument is unsafe", fndecl);
4295 }
4296
4297 /* For __builtin_frame_address, return what we've got. */
4298 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4299 return tem;
4300
4301 if (!REG_P (tem)
4302 && ! CONSTANT_P (tem))
4303 tem = copy_addr_to_reg (tem);
4304 return tem;
4305 }
4306 }
4307
4308 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4309 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4310 is the same as for allocate_dynamic_stack_space. */
4311
4312 static rtx
4313 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4314 {
4315 rtx op0;
4316 rtx result;
4317 bool valid_arglist;
4318 unsigned int align;
4319 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4320 == BUILT_IN_ALLOCA_WITH_ALIGN);
4321
4322 valid_arglist
4323 = (alloca_with_align
4324 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4325 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4326
4327 if (!valid_arglist)
4328 return NULL_RTX;
4329
4330 /* Compute the argument. */
4331 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4332
4333 /* Compute the alignment. */
4334 align = (alloca_with_align
4335 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4336 : BIGGEST_ALIGNMENT);
4337
4338 /* Allocate the desired space. */
4339 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4340 result = convert_memory_address (ptr_mode, result);
4341
4342 return result;
4343 }
4344
4345 /* Expand a call to bswap builtin in EXP.
4346 Return NULL_RTX if a normal call should be emitted rather than expanding the
4347 function in-line. If convenient, the result should be placed in TARGET.
4348 SUBTARGET may be used as the target for computing one of EXP's operands. */
4349
4350 static rtx
4351 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4352 rtx subtarget)
4353 {
4354 tree arg;
4355 rtx op0;
4356
4357 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4358 return NULL_RTX;
4359
4360 arg = CALL_EXPR_ARG (exp, 0);
4361 op0 = expand_expr (arg,
4362 subtarget && GET_MODE (subtarget) == target_mode
4363 ? subtarget : NULL_RTX,
4364 target_mode, EXPAND_NORMAL);
4365 if (GET_MODE (op0) != target_mode)
4366 op0 = convert_to_mode (target_mode, op0, 1);
4367
4368 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4369
4370 gcc_assert (target);
4371
4372 return convert_to_mode (target_mode, target, 1);
4373 }
4374
4375 /* Expand a call to a unary builtin in EXP.
4376 Return NULL_RTX if a normal call should be emitted rather than expanding the
4377 function in-line. If convenient, the result should be placed in TARGET.
4378 SUBTARGET may be used as the target for computing one of EXP's operands. */
4379
4380 static rtx
4381 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4382 rtx subtarget, optab op_optab)
4383 {
4384 rtx op0;
4385
4386 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4387 return NULL_RTX;
4388
4389 /* Compute the argument. */
4390 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4391 (subtarget
4392 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4393 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4394 VOIDmode, EXPAND_NORMAL);
4395 /* Compute op, into TARGET if possible.
4396 Set TARGET to wherever the result comes back. */
4397 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4398 op_optab, op0, target, op_optab != clrsb_optab);
4399 gcc_assert (target);
4400
4401 return convert_to_mode (target_mode, target, 0);
4402 }
4403
4404 /* Expand a call to __builtin_expect. We just return our argument
4405 as the builtin_expect semantic should've been already executed by
4406 tree branch prediction pass. */
4407
4408 static rtx
4409 expand_builtin_expect (tree exp, rtx target)
4410 {
4411 tree arg;
4412
4413 if (call_expr_nargs (exp) < 2)
4414 return const0_rtx;
4415 arg = CALL_EXPR_ARG (exp, 0);
4416
4417 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4418 /* When guessing was done, the hints should be already stripped away. */
4419 gcc_assert (!flag_guess_branch_prob
4420 || optimize == 0 || seen_error ());
4421 return target;
4422 }
4423
4424 /* Expand a call to __builtin_assume_aligned. We just return our first
4425 argument as the builtin_assume_aligned semantic should've been already
4426 executed by CCP. */
4427
4428 static rtx
4429 expand_builtin_assume_aligned (tree exp, rtx target)
4430 {
4431 if (call_expr_nargs (exp) < 2)
4432 return const0_rtx;
4433 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4434 EXPAND_NORMAL);
4435 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4436 && (call_expr_nargs (exp) < 3
4437 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4438 return target;
4439 }
4440
4441 void
4442 expand_builtin_trap (void)
4443 {
4444 if (targetm.have_trap ())
4445 {
4446 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4447 /* For trap insns when not accumulating outgoing args force
4448 REG_ARGS_SIZE note to prevent crossjumping of calls with
4449 different args sizes. */
4450 if (!ACCUMULATE_OUTGOING_ARGS)
4451 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4452 }
4453 else
4454 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4455 emit_barrier ();
4456 }
4457
4458 /* Expand a call to __builtin_unreachable. We do nothing except emit
4459 a barrier saying that control flow will not pass here.
4460
4461 It is the responsibility of the program being compiled to ensure
4462 that control flow does never reach __builtin_unreachable. */
4463 static void
4464 expand_builtin_unreachable (void)
4465 {
4466 emit_barrier ();
4467 }
4468
4469 /* Expand EXP, a call to fabs, fabsf or fabsl.
4470 Return NULL_RTX if a normal call should be emitted rather than expanding
4471 the function inline. If convenient, the result should be placed
4472 in TARGET. SUBTARGET may be used as the target for computing
4473 the operand. */
4474
4475 static rtx
4476 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4477 {
4478 machine_mode mode;
4479 tree arg;
4480 rtx op0;
4481
4482 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4483 return NULL_RTX;
4484
4485 arg = CALL_EXPR_ARG (exp, 0);
4486 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4487 mode = TYPE_MODE (TREE_TYPE (arg));
4488 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4489 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4490 }
4491
4492 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4493 Return NULL is a normal call should be emitted rather than expanding the
4494 function inline. If convenient, the result should be placed in TARGET.
4495 SUBTARGET may be used as the target for computing the operand. */
4496
4497 static rtx
4498 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4499 {
4500 rtx op0, op1;
4501 tree arg;
4502
4503 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4504 return NULL_RTX;
4505
4506 arg = CALL_EXPR_ARG (exp, 0);
4507 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4508
4509 arg = CALL_EXPR_ARG (exp, 1);
4510 op1 = expand_normal (arg);
4511
4512 return expand_copysign (op0, op1, target);
4513 }
4514
4515 /* Expand a call to __builtin___clear_cache. */
4516
4517 static rtx
4518 expand_builtin___clear_cache (tree exp)
4519 {
4520 if (!targetm.code_for_clear_cache)
4521 {
4522 #ifdef CLEAR_INSN_CACHE
4523 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4524 does something. Just do the default expansion to a call to
4525 __clear_cache(). */
4526 return NULL_RTX;
4527 #else
4528 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4529 does nothing. There is no need to call it. Do nothing. */
4530 return const0_rtx;
4531 #endif /* CLEAR_INSN_CACHE */
4532 }
4533
4534 /* We have a "clear_cache" insn, and it will handle everything. */
4535 tree begin, end;
4536 rtx begin_rtx, end_rtx;
4537
4538 /* We must not expand to a library call. If we did, any
4539 fallback library function in libgcc that might contain a call to
4540 __builtin___clear_cache() would recurse infinitely. */
4541 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4542 {
4543 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4544 return const0_rtx;
4545 }
4546
4547 if (targetm.have_clear_cache ())
4548 {
4549 struct expand_operand ops[2];
4550
4551 begin = CALL_EXPR_ARG (exp, 0);
4552 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4553
4554 end = CALL_EXPR_ARG (exp, 1);
4555 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4556
4557 create_address_operand (&ops[0], begin_rtx);
4558 create_address_operand (&ops[1], end_rtx);
4559 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4560 return const0_rtx;
4561 }
4562 return const0_rtx;
4563 }
4564
4565 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4566
4567 static rtx
4568 round_trampoline_addr (rtx tramp)
4569 {
4570 rtx temp, addend, mask;
4571
4572 /* If we don't need too much alignment, we'll have been guaranteed
4573 proper alignment by get_trampoline_type. */
4574 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4575 return tramp;
4576
4577 /* Round address up to desired boundary. */
4578 temp = gen_reg_rtx (Pmode);
4579 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4580 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4581
4582 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4583 temp, 0, OPTAB_LIB_WIDEN);
4584 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4585 temp, 0, OPTAB_LIB_WIDEN);
4586
4587 return tramp;
4588 }
4589
4590 static rtx
4591 expand_builtin_init_trampoline (tree exp, bool onstack)
4592 {
4593 tree t_tramp, t_func, t_chain;
4594 rtx m_tramp, r_tramp, r_chain, tmp;
4595
4596 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4597 POINTER_TYPE, VOID_TYPE))
4598 return NULL_RTX;
4599
4600 t_tramp = CALL_EXPR_ARG (exp, 0);
4601 t_func = CALL_EXPR_ARG (exp, 1);
4602 t_chain = CALL_EXPR_ARG (exp, 2);
4603
4604 r_tramp = expand_normal (t_tramp);
4605 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4606 MEM_NOTRAP_P (m_tramp) = 1;
4607
4608 /* If ONSTACK, the TRAMP argument should be the address of a field
4609 within the local function's FRAME decl. Either way, let's see if
4610 we can fill in the MEM_ATTRs for this memory. */
4611 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4612 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4613
4614 /* Creator of a heap trampoline is responsible for making sure the
4615 address is aligned to at least STACK_BOUNDARY. Normally malloc
4616 will ensure this anyhow. */
4617 tmp = round_trampoline_addr (r_tramp);
4618 if (tmp != r_tramp)
4619 {
4620 m_tramp = change_address (m_tramp, BLKmode, tmp);
4621 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4622 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4623 }
4624
4625 /* The FUNC argument should be the address of the nested function.
4626 Extract the actual function decl to pass to the hook. */
4627 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4628 t_func = TREE_OPERAND (t_func, 0);
4629 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4630
4631 r_chain = expand_normal (t_chain);
4632
4633 /* Generate insns to initialize the trampoline. */
4634 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4635
4636 if (onstack)
4637 {
4638 trampolines_created = 1;
4639
4640 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4641 "trampoline generated for nested function %qD", t_func);
4642 }
4643
4644 return const0_rtx;
4645 }
4646
4647 static rtx
4648 expand_builtin_adjust_trampoline (tree exp)
4649 {
4650 rtx tramp;
4651
4652 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4653 return NULL_RTX;
4654
4655 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4656 tramp = round_trampoline_addr (tramp);
4657 if (targetm.calls.trampoline_adjust_address)
4658 tramp = targetm.calls.trampoline_adjust_address (tramp);
4659
4660 return tramp;
4661 }
4662
4663 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4664 function. The function first checks whether the back end provides
4665 an insn to implement signbit for the respective mode. If not, it
4666 checks whether the floating point format of the value is such that
4667 the sign bit can be extracted. If that is not the case, error out.
4668 EXP is the expression that is a call to the builtin function; if
4669 convenient, the result should be placed in TARGET. */
4670 static rtx
4671 expand_builtin_signbit (tree exp, rtx target)
4672 {
4673 const struct real_format *fmt;
4674 machine_mode fmode, imode, rmode;
4675 tree arg;
4676 int word, bitpos;
4677 enum insn_code icode;
4678 rtx temp;
4679 location_t loc = EXPR_LOCATION (exp);
4680
4681 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4682 return NULL_RTX;
4683
4684 arg = CALL_EXPR_ARG (exp, 0);
4685 fmode = TYPE_MODE (TREE_TYPE (arg));
4686 rmode = TYPE_MODE (TREE_TYPE (exp));
4687 fmt = REAL_MODE_FORMAT (fmode);
4688
4689 arg = builtin_save_expr (arg);
4690
4691 /* Expand the argument yielding a RTX expression. */
4692 temp = expand_normal (arg);
4693
4694 /* Check if the back end provides an insn that handles signbit for the
4695 argument's mode. */
4696 icode = optab_handler (signbit_optab, fmode);
4697 if (icode != CODE_FOR_nothing)
4698 {
4699 rtx_insn *last = get_last_insn ();
4700 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4701 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4702 return target;
4703 delete_insns_since (last);
4704 }
4705
4706 /* For floating point formats without a sign bit, implement signbit
4707 as "ARG < 0.0". */
4708 bitpos = fmt->signbit_ro;
4709 if (bitpos < 0)
4710 {
4711 /* But we can't do this if the format supports signed zero. */
4712 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4713
4714 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4715 build_real (TREE_TYPE (arg), dconst0));
4716 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4717 }
4718
4719 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4720 {
4721 imode = int_mode_for_mode (fmode);
4722 gcc_assert (imode != BLKmode);
4723 temp = gen_lowpart (imode, temp);
4724 }
4725 else
4726 {
4727 imode = word_mode;
4728 /* Handle targets with different FP word orders. */
4729 if (FLOAT_WORDS_BIG_ENDIAN)
4730 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4731 else
4732 word = bitpos / BITS_PER_WORD;
4733 temp = operand_subword_force (temp, word, fmode);
4734 bitpos = bitpos % BITS_PER_WORD;
4735 }
4736
4737 /* Force the intermediate word_mode (or narrower) result into a
4738 register. This avoids attempting to create paradoxical SUBREGs
4739 of floating point modes below. */
4740 temp = force_reg (imode, temp);
4741
4742 /* If the bitpos is within the "result mode" lowpart, the operation
4743 can be implement with a single bitwise AND. Otherwise, we need
4744 a right shift and an AND. */
4745
4746 if (bitpos < GET_MODE_BITSIZE (rmode))
4747 {
4748 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4749
4750 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4751 temp = gen_lowpart (rmode, temp);
4752 temp = expand_binop (rmode, and_optab, temp,
4753 immed_wide_int_const (mask, rmode),
4754 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4755 }
4756 else
4757 {
4758 /* Perform a logical right shift to place the signbit in the least
4759 significant bit, then truncate the result to the desired mode
4760 and mask just this bit. */
4761 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4762 temp = gen_lowpart (rmode, temp);
4763 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4764 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4765 }
4766
4767 return temp;
4768 }
4769
4770 /* Expand fork or exec calls. TARGET is the desired target of the
4771 call. EXP is the call. FN is the
4772 identificator of the actual function. IGNORE is nonzero if the
4773 value is to be ignored. */
4774
4775 static rtx
4776 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4777 {
4778 tree id, decl;
4779 tree call;
4780
4781 /* If we are not profiling, just call the function. */
4782 if (!profile_arc_flag)
4783 return NULL_RTX;
4784
4785 /* Otherwise call the wrapper. This should be equivalent for the rest of
4786 compiler, so the code does not diverge, and the wrapper may run the
4787 code necessary for keeping the profiling sane. */
4788
4789 switch (DECL_FUNCTION_CODE (fn))
4790 {
4791 case BUILT_IN_FORK:
4792 id = get_identifier ("__gcov_fork");
4793 break;
4794
4795 case BUILT_IN_EXECL:
4796 id = get_identifier ("__gcov_execl");
4797 break;
4798
4799 case BUILT_IN_EXECV:
4800 id = get_identifier ("__gcov_execv");
4801 break;
4802
4803 case BUILT_IN_EXECLP:
4804 id = get_identifier ("__gcov_execlp");
4805 break;
4806
4807 case BUILT_IN_EXECLE:
4808 id = get_identifier ("__gcov_execle");
4809 break;
4810
4811 case BUILT_IN_EXECVP:
4812 id = get_identifier ("__gcov_execvp");
4813 break;
4814
4815 case BUILT_IN_EXECVE:
4816 id = get_identifier ("__gcov_execve");
4817 break;
4818
4819 default:
4820 gcc_unreachable ();
4821 }
4822
4823 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4824 FUNCTION_DECL, id, TREE_TYPE (fn));
4825 DECL_EXTERNAL (decl) = 1;
4826 TREE_PUBLIC (decl) = 1;
4827 DECL_ARTIFICIAL (decl) = 1;
4828 TREE_NOTHROW (decl) = 1;
4829 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4830 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4831 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4832 return expand_call (call, target, ignore);
4833 }
4834
4835
4836 \f
4837 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4838 the pointer in these functions is void*, the tree optimizers may remove
4839 casts. The mode computed in expand_builtin isn't reliable either, due
4840 to __sync_bool_compare_and_swap.
4841
4842 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4843 group of builtins. This gives us log2 of the mode size. */
4844
4845 static inline machine_mode
4846 get_builtin_sync_mode (int fcode_diff)
4847 {
4848 /* The size is not negotiable, so ask not to get BLKmode in return
4849 if the target indicates that a smaller size would be better. */
4850 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4851 }
4852
4853 /* Expand the memory expression LOC and return the appropriate memory operand
4854 for the builtin_sync operations. */
4855
4856 static rtx
4857 get_builtin_sync_mem (tree loc, machine_mode mode)
4858 {
4859 rtx addr, mem;
4860
4861 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4862 addr = convert_memory_address (Pmode, addr);
4863
4864 /* Note that we explicitly do not want any alias information for this
4865 memory, so that we kill all other live memories. Otherwise we don't
4866 satisfy the full barrier semantics of the intrinsic. */
4867 mem = validize_mem (gen_rtx_MEM (mode, addr));
4868
4869 /* The alignment needs to be at least according to that of the mode. */
4870 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4871 get_pointer_alignment (loc)));
4872 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4873 MEM_VOLATILE_P (mem) = 1;
4874
4875 return mem;
4876 }
4877
4878 /* Make sure an argument is in the right mode.
4879 EXP is the tree argument.
4880 MODE is the mode it should be in. */
4881
4882 static rtx
4883 expand_expr_force_mode (tree exp, machine_mode mode)
4884 {
4885 rtx val;
4886 machine_mode old_mode;
4887
4888 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4889 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4890 of CONST_INTs, where we know the old_mode only from the call argument. */
4891
4892 old_mode = GET_MODE (val);
4893 if (old_mode == VOIDmode)
4894 old_mode = TYPE_MODE (TREE_TYPE (exp));
4895 val = convert_modes (mode, old_mode, val, 1);
4896 return val;
4897 }
4898
4899
4900 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4901 EXP is the CALL_EXPR. CODE is the rtx code
4902 that corresponds to the arithmetic or logical operation from the name;
4903 an exception here is that NOT actually means NAND. TARGET is an optional
4904 place for us to store the results; AFTER is true if this is the
4905 fetch_and_xxx form. */
4906
4907 static rtx
4908 expand_builtin_sync_operation (machine_mode mode, tree exp,
4909 enum rtx_code code, bool after,
4910 rtx target)
4911 {
4912 rtx val, mem;
4913 location_t loc = EXPR_LOCATION (exp);
4914
4915 if (code == NOT && warn_sync_nand)
4916 {
4917 tree fndecl = get_callee_fndecl (exp);
4918 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4919
4920 static bool warned_f_a_n, warned_n_a_f;
4921
4922 switch (fcode)
4923 {
4924 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4925 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4926 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4927 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4928 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4929 if (warned_f_a_n)
4930 break;
4931
4932 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4933 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4934 warned_f_a_n = true;
4935 break;
4936
4937 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4938 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4939 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4940 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4941 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4942 if (warned_n_a_f)
4943 break;
4944
4945 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4946 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4947 warned_n_a_f = true;
4948 break;
4949
4950 default:
4951 gcc_unreachable ();
4952 }
4953 }
4954
4955 /* Expand the operands. */
4956 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4957 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4958
4959 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4960 after);
4961 }
4962
4963 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4964 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4965 true if this is the boolean form. TARGET is a place for us to store the
4966 results; this is NOT optional if IS_BOOL is true. */
4967
4968 static rtx
4969 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4970 bool is_bool, rtx target)
4971 {
4972 rtx old_val, new_val, mem;
4973 rtx *pbool, *poval;
4974
4975 /* Expand the operands. */
4976 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4977 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4978 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4979
4980 pbool = poval = NULL;
4981 if (target != const0_rtx)
4982 {
4983 if (is_bool)
4984 pbool = &target;
4985 else
4986 poval = &target;
4987 }
4988 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4989 false, MEMMODEL_SYNC_SEQ_CST,
4990 MEMMODEL_SYNC_SEQ_CST))
4991 return NULL_RTX;
4992
4993 return target;
4994 }
4995
4996 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4997 general form is actually an atomic exchange, and some targets only
4998 support a reduced form with the second argument being a constant 1.
4999 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5000 the results. */
5001
5002 static rtx
5003 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5004 rtx target)
5005 {
5006 rtx val, mem;
5007
5008 /* Expand the operands. */
5009 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5010 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5011
5012 return expand_sync_lock_test_and_set (target, mem, val);
5013 }
5014
5015 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5016
5017 static void
5018 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5019 {
5020 rtx mem;
5021
5022 /* Expand the operands. */
5023 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5024
5025 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5026 }
5027
5028 /* Given an integer representing an ``enum memmodel'', verify its
5029 correctness and return the memory model enum. */
5030
5031 static enum memmodel
5032 get_memmodel (tree exp)
5033 {
5034 rtx op;
5035 unsigned HOST_WIDE_INT val;
5036
5037 /* If the parameter is not a constant, it's a run time value so we'll just
5038 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5039 if (TREE_CODE (exp) != INTEGER_CST)
5040 return MEMMODEL_SEQ_CST;
5041
5042 op = expand_normal (exp);
5043
5044 val = INTVAL (op);
5045 if (targetm.memmodel_check)
5046 val = targetm.memmodel_check (val);
5047 else if (val & ~MEMMODEL_MASK)
5048 {
5049 warning (OPT_Winvalid_memory_model,
5050 "Unknown architecture specifier in memory model to builtin.");
5051 return MEMMODEL_SEQ_CST;
5052 }
5053
5054 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5055 if (memmodel_base (val) >= MEMMODEL_LAST)
5056 {
5057 warning (OPT_Winvalid_memory_model,
5058 "invalid memory model argument to builtin");
5059 return MEMMODEL_SEQ_CST;
5060 }
5061
5062 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5063 be conservative and promote consume to acquire. */
5064 if (val == MEMMODEL_CONSUME)
5065 val = MEMMODEL_ACQUIRE;
5066
5067 return (enum memmodel) val;
5068 }
5069
5070 /* Expand the __atomic_exchange intrinsic:
5071 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5072 EXP is the CALL_EXPR.
5073 TARGET is an optional place for us to store the results. */
5074
5075 static rtx
5076 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5077 {
5078 rtx val, mem;
5079 enum memmodel model;
5080
5081 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5082
5083 if (!flag_inline_atomics)
5084 return NULL_RTX;
5085
5086 /* Expand the operands. */
5087 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5088 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5089
5090 return expand_atomic_exchange (target, mem, val, model);
5091 }
5092
5093 /* Expand the __atomic_compare_exchange intrinsic:
5094 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5095 TYPE desired, BOOL weak,
5096 enum memmodel success,
5097 enum memmodel failure)
5098 EXP is the CALL_EXPR.
5099 TARGET is an optional place for us to store the results. */
5100
5101 static rtx
5102 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5103 rtx target)
5104 {
5105 rtx expect, desired, mem, oldval;
5106 rtx_code_label *label;
5107 enum memmodel success, failure;
5108 tree weak;
5109 bool is_weak;
5110
5111 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5112 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5113
5114 if (failure > success)
5115 {
5116 warning (OPT_Winvalid_memory_model,
5117 "failure memory model cannot be stronger than success memory "
5118 "model for %<__atomic_compare_exchange%>");
5119 success = MEMMODEL_SEQ_CST;
5120 }
5121
5122 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5123 {
5124 warning (OPT_Winvalid_memory_model,
5125 "invalid failure memory model for "
5126 "%<__atomic_compare_exchange%>");
5127 failure = MEMMODEL_SEQ_CST;
5128 success = MEMMODEL_SEQ_CST;
5129 }
5130
5131
5132 if (!flag_inline_atomics)
5133 return NULL_RTX;
5134
5135 /* Expand the operands. */
5136 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5137
5138 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5139 expect = convert_memory_address (Pmode, expect);
5140 expect = gen_rtx_MEM (mode, expect);
5141 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5142
5143 weak = CALL_EXPR_ARG (exp, 3);
5144 is_weak = false;
5145 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5146 is_weak = true;
5147
5148 if (target == const0_rtx)
5149 target = NULL;
5150
5151 /* Lest the rtl backend create a race condition with an imporoper store
5152 to memory, always create a new pseudo for OLDVAL. */
5153 oldval = NULL;
5154
5155 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5156 is_weak, success, failure))
5157 return NULL_RTX;
5158
5159 /* Conditionally store back to EXPECT, lest we create a race condition
5160 with an improper store to memory. */
5161 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5162 the normal case where EXPECT is totally private, i.e. a register. At
5163 which point the store can be unconditional. */
5164 label = gen_label_rtx ();
5165 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5166 GET_MODE (target), 1, label);
5167 emit_move_insn (expect, oldval);
5168 emit_label (label);
5169
5170 return target;
5171 }
5172
5173 /* Expand the __atomic_load intrinsic:
5174 TYPE __atomic_load (TYPE *object, enum memmodel)
5175 EXP is the CALL_EXPR.
5176 TARGET is an optional place for us to store the results. */
5177
5178 static rtx
5179 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5180 {
5181 rtx mem;
5182 enum memmodel model;
5183
5184 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5185 if (is_mm_release (model) || is_mm_acq_rel (model))
5186 {
5187 warning (OPT_Winvalid_memory_model,
5188 "invalid memory model for %<__atomic_load%>");
5189 model = MEMMODEL_SEQ_CST;
5190 }
5191
5192 if (!flag_inline_atomics)
5193 return NULL_RTX;
5194
5195 /* Expand the operand. */
5196 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5197
5198 return expand_atomic_load (target, mem, model);
5199 }
5200
5201
5202 /* Expand the __atomic_store intrinsic:
5203 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5204 EXP is the CALL_EXPR.
5205 TARGET is an optional place for us to store the results. */
5206
5207 static rtx
5208 expand_builtin_atomic_store (machine_mode mode, tree exp)
5209 {
5210 rtx mem, val;
5211 enum memmodel model;
5212
5213 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5214 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5215 || is_mm_release (model)))
5216 {
5217 warning (OPT_Winvalid_memory_model,
5218 "invalid memory model for %<__atomic_store%>");
5219 model = MEMMODEL_SEQ_CST;
5220 }
5221
5222 if (!flag_inline_atomics)
5223 return NULL_RTX;
5224
5225 /* Expand the operands. */
5226 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5227 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5228
5229 return expand_atomic_store (mem, val, model, false);
5230 }
5231
5232 /* Expand the __atomic_fetch_XXX intrinsic:
5233 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5234 EXP is the CALL_EXPR.
5235 TARGET is an optional place for us to store the results.
5236 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5237 FETCH_AFTER is true if returning the result of the operation.
5238 FETCH_AFTER is false if returning the value before the operation.
5239 IGNORE is true if the result is not used.
5240 EXT_CALL is the correct builtin for an external call if this cannot be
5241 resolved to an instruction sequence. */
5242
5243 static rtx
5244 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5245 enum rtx_code code, bool fetch_after,
5246 bool ignore, enum built_in_function ext_call)
5247 {
5248 rtx val, mem, ret;
5249 enum memmodel model;
5250 tree fndecl;
5251 tree addr;
5252
5253 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5254
5255 /* Expand the operands. */
5256 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5257 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5258
5259 /* Only try generating instructions if inlining is turned on. */
5260 if (flag_inline_atomics)
5261 {
5262 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5263 if (ret)
5264 return ret;
5265 }
5266
5267 /* Return if a different routine isn't needed for the library call. */
5268 if (ext_call == BUILT_IN_NONE)
5269 return NULL_RTX;
5270
5271 /* Change the call to the specified function. */
5272 fndecl = get_callee_fndecl (exp);
5273 addr = CALL_EXPR_FN (exp);
5274 STRIP_NOPS (addr);
5275
5276 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5277 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5278
5279 /* Expand the call here so we can emit trailing code. */
5280 ret = expand_call (exp, target, ignore);
5281
5282 /* Replace the original function just in case it matters. */
5283 TREE_OPERAND (addr, 0) = fndecl;
5284
5285 /* Then issue the arithmetic correction to return the right result. */
5286 if (!ignore)
5287 {
5288 if (code == NOT)
5289 {
5290 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5291 OPTAB_LIB_WIDEN);
5292 ret = expand_simple_unop (mode, NOT, ret, target, true);
5293 }
5294 else
5295 ret = expand_simple_binop (mode, code, ret, val, target, true,
5296 OPTAB_LIB_WIDEN);
5297 }
5298 return ret;
5299 }
5300
5301 /* Expand an atomic clear operation.
5302 void _atomic_clear (BOOL *obj, enum memmodel)
5303 EXP is the call expression. */
5304
5305 static rtx
5306 expand_builtin_atomic_clear (tree exp)
5307 {
5308 machine_mode mode;
5309 rtx mem, ret;
5310 enum memmodel model;
5311
5312 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5313 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5314 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5315
5316 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5317 {
5318 warning (OPT_Winvalid_memory_model,
5319 "invalid memory model for %<__atomic_store%>");
5320 model = MEMMODEL_SEQ_CST;
5321 }
5322
5323 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5324 Failing that, a store is issued by __atomic_store. The only way this can
5325 fail is if the bool type is larger than a word size. Unlikely, but
5326 handle it anyway for completeness. Assume a single threaded model since
5327 there is no atomic support in this case, and no barriers are required. */
5328 ret = expand_atomic_store (mem, const0_rtx, model, true);
5329 if (!ret)
5330 emit_move_insn (mem, const0_rtx);
5331 return const0_rtx;
5332 }
5333
5334 /* Expand an atomic test_and_set operation.
5335 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5336 EXP is the call expression. */
5337
5338 static rtx
5339 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5340 {
5341 rtx mem;
5342 enum memmodel model;
5343 machine_mode mode;
5344
5345 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5346 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5347 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5348
5349 return expand_atomic_test_and_set (target, mem, model);
5350 }
5351
5352
5353 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5354 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5355
5356 static tree
5357 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5358 {
5359 int size;
5360 machine_mode mode;
5361 unsigned int mode_align, type_align;
5362
5363 if (TREE_CODE (arg0) != INTEGER_CST)
5364 return NULL_TREE;
5365
5366 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5367 mode = mode_for_size (size, MODE_INT, 0);
5368 mode_align = GET_MODE_ALIGNMENT (mode);
5369
5370 if (TREE_CODE (arg1) == INTEGER_CST)
5371 {
5372 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5373
5374 /* Either this argument is null, or it's a fake pointer encoding
5375 the alignment of the object. */
5376 val = val & -val;
5377 val *= BITS_PER_UNIT;
5378
5379 if (val == 0 || mode_align < val)
5380 type_align = mode_align;
5381 else
5382 type_align = val;
5383 }
5384 else
5385 {
5386 tree ttype = TREE_TYPE (arg1);
5387
5388 /* This function is usually invoked and folded immediately by the front
5389 end before anything else has a chance to look at it. The pointer
5390 parameter at this point is usually cast to a void *, so check for that
5391 and look past the cast. */
5392 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5393 && VOID_TYPE_P (TREE_TYPE (ttype)))
5394 arg1 = TREE_OPERAND (arg1, 0);
5395
5396 ttype = TREE_TYPE (arg1);
5397 gcc_assert (POINTER_TYPE_P (ttype));
5398
5399 /* Get the underlying type of the object. */
5400 ttype = TREE_TYPE (ttype);
5401 type_align = TYPE_ALIGN (ttype);
5402 }
5403
5404 /* If the object has smaller alignment, the lock free routines cannot
5405 be used. */
5406 if (type_align < mode_align)
5407 return boolean_false_node;
5408
5409 /* Check if a compare_and_swap pattern exists for the mode which represents
5410 the required size. The pattern is not allowed to fail, so the existence
5411 of the pattern indicates support is present. */
5412 if (can_compare_and_swap_p (mode, true))
5413 return boolean_true_node;
5414 else
5415 return boolean_false_node;
5416 }
5417
5418 /* Return true if the parameters to call EXP represent an object which will
5419 always generate lock free instructions. The first argument represents the
5420 size of the object, and the second parameter is a pointer to the object
5421 itself. If NULL is passed for the object, then the result is based on
5422 typical alignment for an object of the specified size. Otherwise return
5423 false. */
5424
5425 static rtx
5426 expand_builtin_atomic_always_lock_free (tree exp)
5427 {
5428 tree size;
5429 tree arg0 = CALL_EXPR_ARG (exp, 0);
5430 tree arg1 = CALL_EXPR_ARG (exp, 1);
5431
5432 if (TREE_CODE (arg0) != INTEGER_CST)
5433 {
5434 error ("non-constant argument 1 to __atomic_always_lock_free");
5435 return const0_rtx;
5436 }
5437
5438 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5439 if (size == boolean_true_node)
5440 return const1_rtx;
5441 return const0_rtx;
5442 }
5443
5444 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5445 is lock free on this architecture. */
5446
5447 static tree
5448 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5449 {
5450 if (!flag_inline_atomics)
5451 return NULL_TREE;
5452
5453 /* If it isn't always lock free, don't generate a result. */
5454 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5455 return boolean_true_node;
5456
5457 return NULL_TREE;
5458 }
5459
5460 /* Return true if the parameters to call EXP represent an object which will
5461 always generate lock free instructions. The first argument represents the
5462 size of the object, and the second parameter is a pointer to the object
5463 itself. If NULL is passed for the object, then the result is based on
5464 typical alignment for an object of the specified size. Otherwise return
5465 NULL*/
5466
5467 static rtx
5468 expand_builtin_atomic_is_lock_free (tree exp)
5469 {
5470 tree size;
5471 tree arg0 = CALL_EXPR_ARG (exp, 0);
5472 tree arg1 = CALL_EXPR_ARG (exp, 1);
5473
5474 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5475 {
5476 error ("non-integer argument 1 to __atomic_is_lock_free");
5477 return NULL_RTX;
5478 }
5479
5480 if (!flag_inline_atomics)
5481 return NULL_RTX;
5482
5483 /* If the value is known at compile time, return the RTX for it. */
5484 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5485 if (size == boolean_true_node)
5486 return const1_rtx;
5487
5488 return NULL_RTX;
5489 }
5490
5491 /* Expand the __atomic_thread_fence intrinsic:
5492 void __atomic_thread_fence (enum memmodel)
5493 EXP is the CALL_EXPR. */
5494
5495 static void
5496 expand_builtin_atomic_thread_fence (tree exp)
5497 {
5498 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5499 expand_mem_thread_fence (model);
5500 }
5501
5502 /* Expand the __atomic_signal_fence intrinsic:
5503 void __atomic_signal_fence (enum memmodel)
5504 EXP is the CALL_EXPR. */
5505
5506 static void
5507 expand_builtin_atomic_signal_fence (tree exp)
5508 {
5509 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5510 expand_mem_signal_fence (model);
5511 }
5512
5513 /* Expand the __sync_synchronize intrinsic. */
5514
5515 static void
5516 expand_builtin_sync_synchronize (void)
5517 {
5518 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5519 }
5520
5521 static rtx
5522 expand_builtin_thread_pointer (tree exp, rtx target)
5523 {
5524 enum insn_code icode;
5525 if (!validate_arglist (exp, VOID_TYPE))
5526 return const0_rtx;
5527 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5528 if (icode != CODE_FOR_nothing)
5529 {
5530 struct expand_operand op;
5531 /* If the target is not sutitable then create a new target. */
5532 if (target == NULL_RTX
5533 || !REG_P (target)
5534 || GET_MODE (target) != Pmode)
5535 target = gen_reg_rtx (Pmode);
5536 create_output_operand (&op, target, Pmode);
5537 expand_insn (icode, 1, &op);
5538 return target;
5539 }
5540 error ("__builtin_thread_pointer is not supported on this target");
5541 return const0_rtx;
5542 }
5543
5544 static void
5545 expand_builtin_set_thread_pointer (tree exp)
5546 {
5547 enum insn_code icode;
5548 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5549 return;
5550 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5551 if (icode != CODE_FOR_nothing)
5552 {
5553 struct expand_operand op;
5554 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5555 Pmode, EXPAND_NORMAL);
5556 create_input_operand (&op, val, Pmode);
5557 expand_insn (icode, 1, &op);
5558 return;
5559 }
5560 error ("__builtin_set_thread_pointer is not supported on this target");
5561 }
5562
5563 \f
5564 /* Emit code to restore the current value of stack. */
5565
5566 static void
5567 expand_stack_restore (tree var)
5568 {
5569 rtx_insn *prev;
5570 rtx sa = expand_normal (var);
5571
5572 sa = convert_memory_address (Pmode, sa);
5573
5574 prev = get_last_insn ();
5575 emit_stack_restore (SAVE_BLOCK, sa);
5576
5577 record_new_stack_level ();
5578
5579 fixup_args_size_notes (prev, get_last_insn (), 0);
5580 }
5581
5582 /* Emit code to save the current value of stack. */
5583
5584 static rtx
5585 expand_stack_save (void)
5586 {
5587 rtx ret = NULL_RTX;
5588
5589 emit_stack_save (SAVE_BLOCK, &ret);
5590 return ret;
5591 }
5592
5593
5594 /* Expand an expression EXP that calls a built-in function,
5595 with result going to TARGET if that's convenient
5596 (and in mode MODE if that's convenient).
5597 SUBTARGET may be used as the target for computing one of EXP's operands.
5598 IGNORE is nonzero if the value is to be ignored. */
5599
5600 rtx
5601 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5602 int ignore)
5603 {
5604 tree fndecl = get_callee_fndecl (exp);
5605 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5606 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5607 int flags;
5608
5609 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5610 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5611
5612 /* When ASan is enabled, we don't want to expand some memory/string
5613 builtins and rely on libsanitizer's hooks. This allows us to avoid
5614 redundant checks and be sure, that possible overflow will be detected
5615 by ASan. */
5616
5617 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5618 return expand_call (exp, target, ignore);
5619
5620 /* When not optimizing, generate calls to library functions for a certain
5621 set of builtins. */
5622 if (!optimize
5623 && !called_as_built_in (fndecl)
5624 && fcode != BUILT_IN_FORK
5625 && fcode != BUILT_IN_EXECL
5626 && fcode != BUILT_IN_EXECV
5627 && fcode != BUILT_IN_EXECLP
5628 && fcode != BUILT_IN_EXECLE
5629 && fcode != BUILT_IN_EXECVP
5630 && fcode != BUILT_IN_EXECVE
5631 && fcode != BUILT_IN_ALLOCA
5632 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5633 && fcode != BUILT_IN_FREE
5634 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5635 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5636 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5637 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5638 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5639 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5640 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5641 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5642 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5643 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5644 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5645 && fcode != BUILT_IN_CHKP_BNDRET)
5646 return expand_call (exp, target, ignore);
5647
5648 /* The built-in function expanders test for target == const0_rtx
5649 to determine whether the function's result will be ignored. */
5650 if (ignore)
5651 target = const0_rtx;
5652
5653 /* If the result of a pure or const built-in function is ignored, and
5654 none of its arguments are volatile, we can avoid expanding the
5655 built-in call and just evaluate the arguments for side-effects. */
5656 if (target == const0_rtx
5657 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5658 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5659 {
5660 bool volatilep = false;
5661 tree arg;
5662 call_expr_arg_iterator iter;
5663
5664 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5665 if (TREE_THIS_VOLATILE (arg))
5666 {
5667 volatilep = true;
5668 break;
5669 }
5670
5671 if (! volatilep)
5672 {
5673 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5674 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5675 return const0_rtx;
5676 }
5677 }
5678
5679 /* expand_builtin_with_bounds is supposed to be used for
5680 instrumented builtin calls. */
5681 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5682
5683 switch (fcode)
5684 {
5685 CASE_FLT_FN (BUILT_IN_FABS):
5686 case BUILT_IN_FABSD32:
5687 case BUILT_IN_FABSD64:
5688 case BUILT_IN_FABSD128:
5689 target = expand_builtin_fabs (exp, target, subtarget);
5690 if (target)
5691 return target;
5692 break;
5693
5694 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5695 target = expand_builtin_copysign (exp, target, subtarget);
5696 if (target)
5697 return target;
5698 break;
5699
5700 /* Just do a normal library call if we were unable to fold
5701 the values. */
5702 CASE_FLT_FN (BUILT_IN_CABS):
5703 break;
5704
5705 CASE_FLT_FN (BUILT_IN_FMA):
5706 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5707 if (target)
5708 return target;
5709 break;
5710
5711 CASE_FLT_FN (BUILT_IN_ILOGB):
5712 if (! flag_unsafe_math_optimizations)
5713 break;
5714 CASE_FLT_FN (BUILT_IN_ISINF):
5715 CASE_FLT_FN (BUILT_IN_FINITE):
5716 case BUILT_IN_ISFINITE:
5717 case BUILT_IN_ISNORMAL:
5718 target = expand_builtin_interclass_mathfn (exp, target);
5719 if (target)
5720 return target;
5721 break;
5722
5723 CASE_FLT_FN (BUILT_IN_ICEIL):
5724 CASE_FLT_FN (BUILT_IN_LCEIL):
5725 CASE_FLT_FN (BUILT_IN_LLCEIL):
5726 CASE_FLT_FN (BUILT_IN_LFLOOR):
5727 CASE_FLT_FN (BUILT_IN_IFLOOR):
5728 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5729 target = expand_builtin_int_roundingfn (exp, target);
5730 if (target)
5731 return target;
5732 break;
5733
5734 CASE_FLT_FN (BUILT_IN_IRINT):
5735 CASE_FLT_FN (BUILT_IN_LRINT):
5736 CASE_FLT_FN (BUILT_IN_LLRINT):
5737 CASE_FLT_FN (BUILT_IN_IROUND):
5738 CASE_FLT_FN (BUILT_IN_LROUND):
5739 CASE_FLT_FN (BUILT_IN_LLROUND):
5740 target = expand_builtin_int_roundingfn_2 (exp, target);
5741 if (target)
5742 return target;
5743 break;
5744
5745 CASE_FLT_FN (BUILT_IN_POWI):
5746 target = expand_builtin_powi (exp, target);
5747 if (target)
5748 return target;
5749 break;
5750
5751 CASE_FLT_FN (BUILT_IN_CEXPI):
5752 target = expand_builtin_cexpi (exp, target);
5753 gcc_assert (target);
5754 return target;
5755
5756 CASE_FLT_FN (BUILT_IN_SIN):
5757 CASE_FLT_FN (BUILT_IN_COS):
5758 if (! flag_unsafe_math_optimizations)
5759 break;
5760 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5761 if (target)
5762 return target;
5763 break;
5764
5765 CASE_FLT_FN (BUILT_IN_SINCOS):
5766 if (! flag_unsafe_math_optimizations)
5767 break;
5768 target = expand_builtin_sincos (exp);
5769 if (target)
5770 return target;
5771 break;
5772
5773 case BUILT_IN_APPLY_ARGS:
5774 return expand_builtin_apply_args ();
5775
5776 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5777 FUNCTION with a copy of the parameters described by
5778 ARGUMENTS, and ARGSIZE. It returns a block of memory
5779 allocated on the stack into which is stored all the registers
5780 that might possibly be used for returning the result of a
5781 function. ARGUMENTS is the value returned by
5782 __builtin_apply_args. ARGSIZE is the number of bytes of
5783 arguments that must be copied. ??? How should this value be
5784 computed? We'll also need a safe worst case value for varargs
5785 functions. */
5786 case BUILT_IN_APPLY:
5787 if (!validate_arglist (exp, POINTER_TYPE,
5788 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5789 && !validate_arglist (exp, REFERENCE_TYPE,
5790 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5791 return const0_rtx;
5792 else
5793 {
5794 rtx ops[3];
5795
5796 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5797 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5798 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5799
5800 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5801 }
5802
5803 /* __builtin_return (RESULT) causes the function to return the
5804 value described by RESULT. RESULT is address of the block of
5805 memory returned by __builtin_apply. */
5806 case BUILT_IN_RETURN:
5807 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5808 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5809 return const0_rtx;
5810
5811 case BUILT_IN_SAVEREGS:
5812 return expand_builtin_saveregs ();
5813
5814 case BUILT_IN_VA_ARG_PACK:
5815 /* All valid uses of __builtin_va_arg_pack () are removed during
5816 inlining. */
5817 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5818 return const0_rtx;
5819
5820 case BUILT_IN_VA_ARG_PACK_LEN:
5821 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5822 inlining. */
5823 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5824 return const0_rtx;
5825
5826 /* Return the address of the first anonymous stack arg. */
5827 case BUILT_IN_NEXT_ARG:
5828 if (fold_builtin_next_arg (exp, false))
5829 return const0_rtx;
5830 return expand_builtin_next_arg ();
5831
5832 case BUILT_IN_CLEAR_CACHE:
5833 target = expand_builtin___clear_cache (exp);
5834 if (target)
5835 return target;
5836 break;
5837
5838 case BUILT_IN_CLASSIFY_TYPE:
5839 return expand_builtin_classify_type (exp);
5840
5841 case BUILT_IN_CONSTANT_P:
5842 return const0_rtx;
5843
5844 case BUILT_IN_FRAME_ADDRESS:
5845 case BUILT_IN_RETURN_ADDRESS:
5846 return expand_builtin_frame_address (fndecl, exp);
5847
5848 /* Returns the address of the area where the structure is returned.
5849 0 otherwise. */
5850 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5851 if (call_expr_nargs (exp) != 0
5852 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5853 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5854 return const0_rtx;
5855 else
5856 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5857
5858 case BUILT_IN_ALLOCA:
5859 case BUILT_IN_ALLOCA_WITH_ALIGN:
5860 /* If the allocation stems from the declaration of a variable-sized
5861 object, it cannot accumulate. */
5862 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5863 if (target)
5864 return target;
5865 break;
5866
5867 case BUILT_IN_STACK_SAVE:
5868 return expand_stack_save ();
5869
5870 case BUILT_IN_STACK_RESTORE:
5871 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5872 return const0_rtx;
5873
5874 case BUILT_IN_BSWAP16:
5875 case BUILT_IN_BSWAP32:
5876 case BUILT_IN_BSWAP64:
5877 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
5878 if (target)
5879 return target;
5880 break;
5881
5882 CASE_INT_FN (BUILT_IN_FFS):
5883 target = expand_builtin_unop (target_mode, exp, target,
5884 subtarget, ffs_optab);
5885 if (target)
5886 return target;
5887 break;
5888
5889 CASE_INT_FN (BUILT_IN_CLZ):
5890 target = expand_builtin_unop (target_mode, exp, target,
5891 subtarget, clz_optab);
5892 if (target)
5893 return target;
5894 break;
5895
5896 CASE_INT_FN (BUILT_IN_CTZ):
5897 target = expand_builtin_unop (target_mode, exp, target,
5898 subtarget, ctz_optab);
5899 if (target)
5900 return target;
5901 break;
5902
5903 CASE_INT_FN (BUILT_IN_CLRSB):
5904 target = expand_builtin_unop (target_mode, exp, target,
5905 subtarget, clrsb_optab);
5906 if (target)
5907 return target;
5908 break;
5909
5910 CASE_INT_FN (BUILT_IN_POPCOUNT):
5911 target = expand_builtin_unop (target_mode, exp, target,
5912 subtarget, popcount_optab);
5913 if (target)
5914 return target;
5915 break;
5916
5917 CASE_INT_FN (BUILT_IN_PARITY):
5918 target = expand_builtin_unop (target_mode, exp, target,
5919 subtarget, parity_optab);
5920 if (target)
5921 return target;
5922 break;
5923
5924 case BUILT_IN_STRLEN:
5925 target = expand_builtin_strlen (exp, target, target_mode);
5926 if (target)
5927 return target;
5928 break;
5929
5930 case BUILT_IN_STRCPY:
5931 target = expand_builtin_strcpy (exp, target);
5932 if (target)
5933 return target;
5934 break;
5935
5936 case BUILT_IN_STRNCPY:
5937 target = expand_builtin_strncpy (exp, target);
5938 if (target)
5939 return target;
5940 break;
5941
5942 case BUILT_IN_STPCPY:
5943 target = expand_builtin_stpcpy (exp, target, mode);
5944 if (target)
5945 return target;
5946 break;
5947
5948 case BUILT_IN_MEMCPY:
5949 target = expand_builtin_memcpy (exp, target);
5950 if (target)
5951 return target;
5952 break;
5953
5954 case BUILT_IN_MEMPCPY:
5955 target = expand_builtin_mempcpy (exp, target, mode);
5956 if (target)
5957 return target;
5958 break;
5959
5960 case BUILT_IN_MEMSET:
5961 target = expand_builtin_memset (exp, target, mode);
5962 if (target)
5963 return target;
5964 break;
5965
5966 case BUILT_IN_BZERO:
5967 target = expand_builtin_bzero (exp);
5968 if (target)
5969 return target;
5970 break;
5971
5972 case BUILT_IN_STRCMP:
5973 target = expand_builtin_strcmp (exp, target);
5974 if (target)
5975 return target;
5976 break;
5977
5978 case BUILT_IN_STRNCMP:
5979 target = expand_builtin_strncmp (exp, target, mode);
5980 if (target)
5981 return target;
5982 break;
5983
5984 case BUILT_IN_BCMP:
5985 case BUILT_IN_MEMCMP:
5986 target = expand_builtin_memcmp (exp, target);
5987 if (target)
5988 return target;
5989 break;
5990
5991 case BUILT_IN_SETJMP:
5992 /* This should have been lowered to the builtins below. */
5993 gcc_unreachable ();
5994
5995 case BUILT_IN_SETJMP_SETUP:
5996 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5997 and the receiver label. */
5998 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5999 {
6000 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6001 VOIDmode, EXPAND_NORMAL);
6002 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6003 rtx_insn *label_r = label_rtx (label);
6004
6005 /* This is copied from the handling of non-local gotos. */
6006 expand_builtin_setjmp_setup (buf_addr, label_r);
6007 nonlocal_goto_handler_labels
6008 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6009 nonlocal_goto_handler_labels);
6010 /* ??? Do not let expand_label treat us as such since we would
6011 not want to be both on the list of non-local labels and on
6012 the list of forced labels. */
6013 FORCED_LABEL (label) = 0;
6014 return const0_rtx;
6015 }
6016 break;
6017
6018 case BUILT_IN_SETJMP_RECEIVER:
6019 /* __builtin_setjmp_receiver is passed the receiver label. */
6020 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6021 {
6022 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6023 rtx_insn *label_r = label_rtx (label);
6024
6025 expand_builtin_setjmp_receiver (label_r);
6026 return const0_rtx;
6027 }
6028 break;
6029
6030 /* __builtin_longjmp is passed a pointer to an array of five words.
6031 It's similar to the C library longjmp function but works with
6032 __builtin_setjmp above. */
6033 case BUILT_IN_LONGJMP:
6034 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6035 {
6036 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6037 VOIDmode, EXPAND_NORMAL);
6038 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6039
6040 if (value != const1_rtx)
6041 {
6042 error ("%<__builtin_longjmp%> second argument must be 1");
6043 return const0_rtx;
6044 }
6045
6046 expand_builtin_longjmp (buf_addr, value);
6047 return const0_rtx;
6048 }
6049 break;
6050
6051 case BUILT_IN_NONLOCAL_GOTO:
6052 target = expand_builtin_nonlocal_goto (exp);
6053 if (target)
6054 return target;
6055 break;
6056
6057 /* This updates the setjmp buffer that is its argument with the value
6058 of the current stack pointer. */
6059 case BUILT_IN_UPDATE_SETJMP_BUF:
6060 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6061 {
6062 rtx buf_addr
6063 = expand_normal (CALL_EXPR_ARG (exp, 0));
6064
6065 expand_builtin_update_setjmp_buf (buf_addr);
6066 return const0_rtx;
6067 }
6068 break;
6069
6070 case BUILT_IN_TRAP:
6071 expand_builtin_trap ();
6072 return const0_rtx;
6073
6074 case BUILT_IN_UNREACHABLE:
6075 expand_builtin_unreachable ();
6076 return const0_rtx;
6077
6078 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6079 case BUILT_IN_SIGNBITD32:
6080 case BUILT_IN_SIGNBITD64:
6081 case BUILT_IN_SIGNBITD128:
6082 target = expand_builtin_signbit (exp, target);
6083 if (target)
6084 return target;
6085 break;
6086
6087 /* Various hooks for the DWARF 2 __throw routine. */
6088 case BUILT_IN_UNWIND_INIT:
6089 expand_builtin_unwind_init ();
6090 return const0_rtx;
6091 case BUILT_IN_DWARF_CFA:
6092 return virtual_cfa_rtx;
6093 #ifdef DWARF2_UNWIND_INFO
6094 case BUILT_IN_DWARF_SP_COLUMN:
6095 return expand_builtin_dwarf_sp_column ();
6096 case BUILT_IN_INIT_DWARF_REG_SIZES:
6097 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6098 return const0_rtx;
6099 #endif
6100 case BUILT_IN_FROB_RETURN_ADDR:
6101 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6102 case BUILT_IN_EXTRACT_RETURN_ADDR:
6103 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6104 case BUILT_IN_EH_RETURN:
6105 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6106 CALL_EXPR_ARG (exp, 1));
6107 return const0_rtx;
6108 case BUILT_IN_EH_RETURN_DATA_REGNO:
6109 return expand_builtin_eh_return_data_regno (exp);
6110 case BUILT_IN_EXTEND_POINTER:
6111 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6112 case BUILT_IN_EH_POINTER:
6113 return expand_builtin_eh_pointer (exp);
6114 case BUILT_IN_EH_FILTER:
6115 return expand_builtin_eh_filter (exp);
6116 case BUILT_IN_EH_COPY_VALUES:
6117 return expand_builtin_eh_copy_values (exp);
6118
6119 case BUILT_IN_VA_START:
6120 return expand_builtin_va_start (exp);
6121 case BUILT_IN_VA_END:
6122 return expand_builtin_va_end (exp);
6123 case BUILT_IN_VA_COPY:
6124 return expand_builtin_va_copy (exp);
6125 case BUILT_IN_EXPECT:
6126 return expand_builtin_expect (exp, target);
6127 case BUILT_IN_ASSUME_ALIGNED:
6128 return expand_builtin_assume_aligned (exp, target);
6129 case BUILT_IN_PREFETCH:
6130 expand_builtin_prefetch (exp);
6131 return const0_rtx;
6132
6133 case BUILT_IN_INIT_TRAMPOLINE:
6134 return expand_builtin_init_trampoline (exp, true);
6135 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6136 return expand_builtin_init_trampoline (exp, false);
6137 case BUILT_IN_ADJUST_TRAMPOLINE:
6138 return expand_builtin_adjust_trampoline (exp);
6139
6140 case BUILT_IN_FORK:
6141 case BUILT_IN_EXECL:
6142 case BUILT_IN_EXECV:
6143 case BUILT_IN_EXECLP:
6144 case BUILT_IN_EXECLE:
6145 case BUILT_IN_EXECVP:
6146 case BUILT_IN_EXECVE:
6147 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6148 if (target)
6149 return target;
6150 break;
6151
6152 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6153 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6154 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6155 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6156 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6157 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6158 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6159 if (target)
6160 return target;
6161 break;
6162
6163 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6164 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6165 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6166 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6167 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6168 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6169 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6170 if (target)
6171 return target;
6172 break;
6173
6174 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6175 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6176 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6177 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6178 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6179 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6180 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6181 if (target)
6182 return target;
6183 break;
6184
6185 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6186 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6187 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6188 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6189 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6190 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6191 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6192 if (target)
6193 return target;
6194 break;
6195
6196 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6197 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6198 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6199 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6200 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6201 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6202 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6203 if (target)
6204 return target;
6205 break;
6206
6207 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6208 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6209 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6210 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6211 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6212 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6213 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6214 if (target)
6215 return target;
6216 break;
6217
6218 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6219 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6220 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6221 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6222 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6223 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6224 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6225 if (target)
6226 return target;
6227 break;
6228
6229 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6230 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6231 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6232 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6233 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6234 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6235 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6236 if (target)
6237 return target;
6238 break;
6239
6240 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6241 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6242 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6243 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6244 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6245 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6246 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6247 if (target)
6248 return target;
6249 break;
6250
6251 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6252 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6253 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6254 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6255 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6256 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6257 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6258 if (target)
6259 return target;
6260 break;
6261
6262 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6263 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6264 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6265 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6266 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6267 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6268 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6269 if (target)
6270 return target;
6271 break;
6272
6273 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6274 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6275 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6276 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6277 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6278 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6279 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6280 if (target)
6281 return target;
6282 break;
6283
6284 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6285 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6286 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6287 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6288 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6289 if (mode == VOIDmode)
6290 mode = TYPE_MODE (boolean_type_node);
6291 if (!target || !register_operand (target, mode))
6292 target = gen_reg_rtx (mode);
6293
6294 mode = get_builtin_sync_mode
6295 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6296 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6297 if (target)
6298 return target;
6299 break;
6300
6301 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6302 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6303 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6304 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6305 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6306 mode = get_builtin_sync_mode
6307 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6308 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6309 if (target)
6310 return target;
6311 break;
6312
6313 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6314 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6315 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6316 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6317 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6318 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6319 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6320 if (target)
6321 return target;
6322 break;
6323
6324 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6325 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6326 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6327 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6328 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6329 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6330 expand_builtin_sync_lock_release (mode, exp);
6331 return const0_rtx;
6332
6333 case BUILT_IN_SYNC_SYNCHRONIZE:
6334 expand_builtin_sync_synchronize ();
6335 return const0_rtx;
6336
6337 case BUILT_IN_ATOMIC_EXCHANGE_1:
6338 case BUILT_IN_ATOMIC_EXCHANGE_2:
6339 case BUILT_IN_ATOMIC_EXCHANGE_4:
6340 case BUILT_IN_ATOMIC_EXCHANGE_8:
6341 case BUILT_IN_ATOMIC_EXCHANGE_16:
6342 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6343 target = expand_builtin_atomic_exchange (mode, exp, target);
6344 if (target)
6345 return target;
6346 break;
6347
6348 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6349 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6350 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6351 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6352 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6353 {
6354 unsigned int nargs, z;
6355 vec<tree, va_gc> *vec;
6356
6357 mode =
6358 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6359 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6360 if (target)
6361 return target;
6362
6363 /* If this is turned into an external library call, the weak parameter
6364 must be dropped to match the expected parameter list. */
6365 nargs = call_expr_nargs (exp);
6366 vec_alloc (vec, nargs - 1);
6367 for (z = 0; z < 3; z++)
6368 vec->quick_push (CALL_EXPR_ARG (exp, z));
6369 /* Skip the boolean weak parameter. */
6370 for (z = 4; z < 6; z++)
6371 vec->quick_push (CALL_EXPR_ARG (exp, z));
6372 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6373 break;
6374 }
6375
6376 case BUILT_IN_ATOMIC_LOAD_1:
6377 case BUILT_IN_ATOMIC_LOAD_2:
6378 case BUILT_IN_ATOMIC_LOAD_4:
6379 case BUILT_IN_ATOMIC_LOAD_8:
6380 case BUILT_IN_ATOMIC_LOAD_16:
6381 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6382 target = expand_builtin_atomic_load (mode, exp, target);
6383 if (target)
6384 return target;
6385 break;
6386
6387 case BUILT_IN_ATOMIC_STORE_1:
6388 case BUILT_IN_ATOMIC_STORE_2:
6389 case BUILT_IN_ATOMIC_STORE_4:
6390 case BUILT_IN_ATOMIC_STORE_8:
6391 case BUILT_IN_ATOMIC_STORE_16:
6392 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6393 target = expand_builtin_atomic_store (mode, exp);
6394 if (target)
6395 return const0_rtx;
6396 break;
6397
6398 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6399 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6400 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6401 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6402 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6403 {
6404 enum built_in_function lib;
6405 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6406 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6407 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6408 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6409 ignore, lib);
6410 if (target)
6411 return target;
6412 break;
6413 }
6414 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6415 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6416 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6417 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6418 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6419 {
6420 enum built_in_function lib;
6421 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6422 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6423 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6424 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6425 ignore, lib);
6426 if (target)
6427 return target;
6428 break;
6429 }
6430 case BUILT_IN_ATOMIC_AND_FETCH_1:
6431 case BUILT_IN_ATOMIC_AND_FETCH_2:
6432 case BUILT_IN_ATOMIC_AND_FETCH_4:
6433 case BUILT_IN_ATOMIC_AND_FETCH_8:
6434 case BUILT_IN_ATOMIC_AND_FETCH_16:
6435 {
6436 enum built_in_function lib;
6437 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6438 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6439 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6440 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6441 ignore, lib);
6442 if (target)
6443 return target;
6444 break;
6445 }
6446 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6447 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6448 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6449 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6450 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6451 {
6452 enum built_in_function lib;
6453 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6454 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6455 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6456 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6457 ignore, lib);
6458 if (target)
6459 return target;
6460 break;
6461 }
6462 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6463 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6464 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6465 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6466 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6467 {
6468 enum built_in_function lib;
6469 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6470 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6471 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6472 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6473 ignore, lib);
6474 if (target)
6475 return target;
6476 break;
6477 }
6478 case BUILT_IN_ATOMIC_OR_FETCH_1:
6479 case BUILT_IN_ATOMIC_OR_FETCH_2:
6480 case BUILT_IN_ATOMIC_OR_FETCH_4:
6481 case BUILT_IN_ATOMIC_OR_FETCH_8:
6482 case BUILT_IN_ATOMIC_OR_FETCH_16:
6483 {
6484 enum built_in_function lib;
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6486 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6487 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6488 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6489 ignore, lib);
6490 if (target)
6491 return target;
6492 break;
6493 }
6494 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6495 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6496 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6497 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6498 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6500 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6501 ignore, BUILT_IN_NONE);
6502 if (target)
6503 return target;
6504 break;
6505
6506 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6507 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6508 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6509 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6510 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6511 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6512 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6513 ignore, BUILT_IN_NONE);
6514 if (target)
6515 return target;
6516 break;
6517
6518 case BUILT_IN_ATOMIC_FETCH_AND_1:
6519 case BUILT_IN_ATOMIC_FETCH_AND_2:
6520 case BUILT_IN_ATOMIC_FETCH_AND_4:
6521 case BUILT_IN_ATOMIC_FETCH_AND_8:
6522 case BUILT_IN_ATOMIC_FETCH_AND_16:
6523 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6524 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6525 ignore, BUILT_IN_NONE);
6526 if (target)
6527 return target;
6528 break;
6529
6530 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6531 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6532 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6533 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6534 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6535 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6536 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6537 ignore, BUILT_IN_NONE);
6538 if (target)
6539 return target;
6540 break;
6541
6542 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6543 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6544 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6545 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6546 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6547 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6548 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6549 ignore, BUILT_IN_NONE);
6550 if (target)
6551 return target;
6552 break;
6553
6554 case BUILT_IN_ATOMIC_FETCH_OR_1:
6555 case BUILT_IN_ATOMIC_FETCH_OR_2:
6556 case BUILT_IN_ATOMIC_FETCH_OR_4:
6557 case BUILT_IN_ATOMIC_FETCH_OR_8:
6558 case BUILT_IN_ATOMIC_FETCH_OR_16:
6559 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6560 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6561 ignore, BUILT_IN_NONE);
6562 if (target)
6563 return target;
6564 break;
6565
6566 case BUILT_IN_ATOMIC_TEST_AND_SET:
6567 return expand_builtin_atomic_test_and_set (exp, target);
6568
6569 case BUILT_IN_ATOMIC_CLEAR:
6570 return expand_builtin_atomic_clear (exp);
6571
6572 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6573 return expand_builtin_atomic_always_lock_free (exp);
6574
6575 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6576 target = expand_builtin_atomic_is_lock_free (exp);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_ATOMIC_THREAD_FENCE:
6582 expand_builtin_atomic_thread_fence (exp);
6583 return const0_rtx;
6584
6585 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6586 expand_builtin_atomic_signal_fence (exp);
6587 return const0_rtx;
6588
6589 case BUILT_IN_OBJECT_SIZE:
6590 return expand_builtin_object_size (exp);
6591
6592 case BUILT_IN_MEMCPY_CHK:
6593 case BUILT_IN_MEMPCPY_CHK:
6594 case BUILT_IN_MEMMOVE_CHK:
6595 case BUILT_IN_MEMSET_CHK:
6596 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6597 if (target)
6598 return target;
6599 break;
6600
6601 case BUILT_IN_STRCPY_CHK:
6602 case BUILT_IN_STPCPY_CHK:
6603 case BUILT_IN_STRNCPY_CHK:
6604 case BUILT_IN_STPNCPY_CHK:
6605 case BUILT_IN_STRCAT_CHK:
6606 case BUILT_IN_STRNCAT_CHK:
6607 case BUILT_IN_SNPRINTF_CHK:
6608 case BUILT_IN_VSNPRINTF_CHK:
6609 maybe_emit_chk_warning (exp, fcode);
6610 break;
6611
6612 case BUILT_IN_SPRINTF_CHK:
6613 case BUILT_IN_VSPRINTF_CHK:
6614 maybe_emit_sprintf_chk_warning (exp, fcode);
6615 break;
6616
6617 case BUILT_IN_FREE:
6618 if (warn_free_nonheap_object)
6619 maybe_emit_free_warning (exp);
6620 break;
6621
6622 case BUILT_IN_THREAD_POINTER:
6623 return expand_builtin_thread_pointer (exp, target);
6624
6625 case BUILT_IN_SET_THREAD_POINTER:
6626 expand_builtin_set_thread_pointer (exp);
6627 return const0_rtx;
6628
6629 case BUILT_IN_CILK_DETACH:
6630 expand_builtin_cilk_detach (exp);
6631 return const0_rtx;
6632
6633 case BUILT_IN_CILK_POP_FRAME:
6634 expand_builtin_cilk_pop_frame (exp);
6635 return const0_rtx;
6636
6637 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6638 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6639 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6640 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6641 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6642 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6643 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6644 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6645 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6646 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6647 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6648 /* We allow user CHKP builtins if Pointer Bounds
6649 Checker is off. */
6650 if (!chkp_function_instrumented_p (current_function_decl))
6651 {
6652 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6653 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6654 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6655 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6656 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6657 return expand_normal (CALL_EXPR_ARG (exp, 0));
6658 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6659 return expand_normal (size_zero_node);
6660 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6661 return expand_normal (size_int (-1));
6662 else
6663 return const0_rtx;
6664 }
6665 /* FALLTHROUGH */
6666
6667 case BUILT_IN_CHKP_BNDMK:
6668 case BUILT_IN_CHKP_BNDSTX:
6669 case BUILT_IN_CHKP_BNDCL:
6670 case BUILT_IN_CHKP_BNDCU:
6671 case BUILT_IN_CHKP_BNDLDX:
6672 case BUILT_IN_CHKP_BNDRET:
6673 case BUILT_IN_CHKP_INTERSECT:
6674 case BUILT_IN_CHKP_NARROW:
6675 case BUILT_IN_CHKP_EXTRACT_LOWER:
6676 case BUILT_IN_CHKP_EXTRACT_UPPER:
6677 /* Software implementation of Pointer Bounds Checker is NYI.
6678 Target support is required. */
6679 error ("Your target platform does not support -fcheck-pointer-bounds");
6680 break;
6681
6682 case BUILT_IN_ACC_ON_DEVICE:
6683 /* Do library call, if we failed to expand the builtin when
6684 folding. */
6685 break;
6686
6687 default: /* just do library call, if unknown builtin */
6688 break;
6689 }
6690
6691 /* The switch statement above can drop through to cause the function
6692 to be called normally. */
6693 return expand_call (exp, target, ignore);
6694 }
6695
6696 /* Similar to expand_builtin but is used for instrumented calls. */
6697
6698 rtx
6699 expand_builtin_with_bounds (tree exp, rtx target,
6700 rtx subtarget ATTRIBUTE_UNUSED,
6701 machine_mode mode, int ignore)
6702 {
6703 tree fndecl = get_callee_fndecl (exp);
6704 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6705
6706 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6707
6708 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6709 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6710
6711 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6712 && fcode < END_CHKP_BUILTINS);
6713
6714 switch (fcode)
6715 {
6716 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6717 target = expand_builtin_memcpy_with_bounds (exp, target);
6718 if (target)
6719 return target;
6720 break;
6721
6722 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6723 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6724 if (target)
6725 return target;
6726 break;
6727
6728 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6729 target = expand_builtin_memset_with_bounds (exp, target, mode);
6730 if (target)
6731 return target;
6732 break;
6733
6734 default:
6735 break;
6736 }
6737
6738 /* The switch statement above can drop through to cause the function
6739 to be called normally. */
6740 return expand_call (exp, target, ignore);
6741 }
6742
6743 /* Determine whether a tree node represents a call to a built-in
6744 function. If the tree T is a call to a built-in function with
6745 the right number of arguments of the appropriate types, return
6746 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6747 Otherwise the return value is END_BUILTINS. */
6748
6749 enum built_in_function
6750 builtin_mathfn_code (const_tree t)
6751 {
6752 const_tree fndecl, arg, parmlist;
6753 const_tree argtype, parmtype;
6754 const_call_expr_arg_iterator iter;
6755
6756 if (TREE_CODE (t) != CALL_EXPR
6757 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6758 return END_BUILTINS;
6759
6760 fndecl = get_callee_fndecl (t);
6761 if (fndecl == NULL_TREE
6762 || TREE_CODE (fndecl) != FUNCTION_DECL
6763 || ! DECL_BUILT_IN (fndecl)
6764 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6765 return END_BUILTINS;
6766
6767 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6768 init_const_call_expr_arg_iterator (t, &iter);
6769 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6770 {
6771 /* If a function doesn't take a variable number of arguments,
6772 the last element in the list will have type `void'. */
6773 parmtype = TREE_VALUE (parmlist);
6774 if (VOID_TYPE_P (parmtype))
6775 {
6776 if (more_const_call_expr_args_p (&iter))
6777 return END_BUILTINS;
6778 return DECL_FUNCTION_CODE (fndecl);
6779 }
6780
6781 if (! more_const_call_expr_args_p (&iter))
6782 return END_BUILTINS;
6783
6784 arg = next_const_call_expr_arg (&iter);
6785 argtype = TREE_TYPE (arg);
6786
6787 if (SCALAR_FLOAT_TYPE_P (parmtype))
6788 {
6789 if (! SCALAR_FLOAT_TYPE_P (argtype))
6790 return END_BUILTINS;
6791 }
6792 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6793 {
6794 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6795 return END_BUILTINS;
6796 }
6797 else if (POINTER_TYPE_P (parmtype))
6798 {
6799 if (! POINTER_TYPE_P (argtype))
6800 return END_BUILTINS;
6801 }
6802 else if (INTEGRAL_TYPE_P (parmtype))
6803 {
6804 if (! INTEGRAL_TYPE_P (argtype))
6805 return END_BUILTINS;
6806 }
6807 else
6808 return END_BUILTINS;
6809 }
6810
6811 /* Variable-length argument list. */
6812 return DECL_FUNCTION_CODE (fndecl);
6813 }
6814
6815 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6816 evaluate to a constant. */
6817
6818 static tree
6819 fold_builtin_constant_p (tree arg)
6820 {
6821 /* We return 1 for a numeric type that's known to be a constant
6822 value at compile-time or for an aggregate type that's a
6823 literal constant. */
6824 STRIP_NOPS (arg);
6825
6826 /* If we know this is a constant, emit the constant of one. */
6827 if (CONSTANT_CLASS_P (arg)
6828 || (TREE_CODE (arg) == CONSTRUCTOR
6829 && TREE_CONSTANT (arg)))
6830 return integer_one_node;
6831 if (TREE_CODE (arg) == ADDR_EXPR)
6832 {
6833 tree op = TREE_OPERAND (arg, 0);
6834 if (TREE_CODE (op) == STRING_CST
6835 || (TREE_CODE (op) == ARRAY_REF
6836 && integer_zerop (TREE_OPERAND (op, 1))
6837 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6838 return integer_one_node;
6839 }
6840
6841 /* If this expression has side effects, show we don't know it to be a
6842 constant. Likewise if it's a pointer or aggregate type since in
6843 those case we only want literals, since those are only optimized
6844 when generating RTL, not later.
6845 And finally, if we are compiling an initializer, not code, we
6846 need to return a definite result now; there's not going to be any
6847 more optimization done. */
6848 if (TREE_SIDE_EFFECTS (arg)
6849 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6850 || POINTER_TYPE_P (TREE_TYPE (arg))
6851 || cfun == 0
6852 || folding_initializer
6853 || force_folding_builtin_constant_p)
6854 return integer_zero_node;
6855
6856 return NULL_TREE;
6857 }
6858
6859 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6860 return it as a truthvalue. */
6861
6862 static tree
6863 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6864 tree predictor)
6865 {
6866 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6867
6868 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6869 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6870 ret_type = TREE_TYPE (TREE_TYPE (fn));
6871 pred_type = TREE_VALUE (arg_types);
6872 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6873
6874 pred = fold_convert_loc (loc, pred_type, pred);
6875 expected = fold_convert_loc (loc, expected_type, expected);
6876 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6877 predictor);
6878
6879 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6880 build_int_cst (ret_type, 0));
6881 }
6882
6883 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6884 NULL_TREE if no simplification is possible. */
6885
6886 tree
6887 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6888 {
6889 tree inner, fndecl, inner_arg0;
6890 enum tree_code code;
6891
6892 /* Distribute the expected value over short-circuiting operators.
6893 See through the cast from truthvalue_type_node to long. */
6894 inner_arg0 = arg0;
6895 while (CONVERT_EXPR_P (inner_arg0)
6896 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6897 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6898 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6899
6900 /* If this is a builtin_expect within a builtin_expect keep the
6901 inner one. See through a comparison against a constant. It
6902 might have been added to create a thruthvalue. */
6903 inner = inner_arg0;
6904
6905 if (COMPARISON_CLASS_P (inner)
6906 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6907 inner = TREE_OPERAND (inner, 0);
6908
6909 if (TREE_CODE (inner) == CALL_EXPR
6910 && (fndecl = get_callee_fndecl (inner))
6911 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6912 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6913 return arg0;
6914
6915 inner = inner_arg0;
6916 code = TREE_CODE (inner);
6917 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6918 {
6919 tree op0 = TREE_OPERAND (inner, 0);
6920 tree op1 = TREE_OPERAND (inner, 1);
6921
6922 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
6923 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
6924 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6925
6926 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6927 }
6928
6929 /* If the argument isn't invariant then there's nothing else we can do. */
6930 if (!TREE_CONSTANT (inner_arg0))
6931 return NULL_TREE;
6932
6933 /* If we expect that a comparison against the argument will fold to
6934 a constant return the constant. In practice, this means a true
6935 constant or the address of a non-weak symbol. */
6936 inner = inner_arg0;
6937 STRIP_NOPS (inner);
6938 if (TREE_CODE (inner) == ADDR_EXPR)
6939 {
6940 do
6941 {
6942 inner = TREE_OPERAND (inner, 0);
6943 }
6944 while (TREE_CODE (inner) == COMPONENT_REF
6945 || TREE_CODE (inner) == ARRAY_REF);
6946 if ((TREE_CODE (inner) == VAR_DECL
6947 || TREE_CODE (inner) == FUNCTION_DECL)
6948 && DECL_WEAK (inner))
6949 return NULL_TREE;
6950 }
6951
6952 /* Otherwise, ARG0 already has the proper type for the return value. */
6953 return arg0;
6954 }
6955
6956 /* Fold a call to __builtin_classify_type with argument ARG. */
6957
6958 static tree
6959 fold_builtin_classify_type (tree arg)
6960 {
6961 if (arg == 0)
6962 return build_int_cst (integer_type_node, no_type_class);
6963
6964 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6965 }
6966
6967 /* Fold a call to __builtin_strlen with argument ARG. */
6968
6969 static tree
6970 fold_builtin_strlen (location_t loc, tree type, tree arg)
6971 {
6972 if (!validate_arg (arg, POINTER_TYPE))
6973 return NULL_TREE;
6974 else
6975 {
6976 tree len = c_strlen (arg, 0);
6977
6978 if (len)
6979 return fold_convert_loc (loc, type, len);
6980
6981 return NULL_TREE;
6982 }
6983 }
6984
6985 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6986
6987 static tree
6988 fold_builtin_inf (location_t loc, tree type, int warn)
6989 {
6990 REAL_VALUE_TYPE real;
6991
6992 /* __builtin_inff is intended to be usable to define INFINITY on all
6993 targets. If an infinity is not available, INFINITY expands "to a
6994 positive constant of type float that overflows at translation
6995 time", footnote "In this case, using INFINITY will violate the
6996 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6997 Thus we pedwarn to ensure this constraint violation is
6998 diagnosed. */
6999 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7000 pedwarn (loc, 0, "target format does not support infinity");
7001
7002 real_inf (&real);
7003 return build_real (type, real);
7004 }
7005
7006 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7007 NULL_TREE if no simplification can be made. */
7008
7009 static tree
7010 fold_builtin_sincos (location_t loc,
7011 tree arg0, tree arg1, tree arg2)
7012 {
7013 tree type;
7014 tree fndecl, call = NULL_TREE;
7015
7016 if (!validate_arg (arg0, REAL_TYPE)
7017 || !validate_arg (arg1, POINTER_TYPE)
7018 || !validate_arg (arg2, POINTER_TYPE))
7019 return NULL_TREE;
7020
7021 type = TREE_TYPE (arg0);
7022
7023 /* Calculate the result when the argument is a constant. */
7024 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7025 if (fn == END_BUILTINS)
7026 return NULL_TREE;
7027
7028 /* Canonicalize sincos to cexpi. */
7029 if (TREE_CODE (arg0) == REAL_CST)
7030 {
7031 tree complex_type = build_complex_type (type);
7032 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7033 }
7034 if (!call)
7035 {
7036 if (!targetm.libc_has_function (function_c99_math_complex)
7037 || !builtin_decl_implicit_p (fn))
7038 return NULL_TREE;
7039 fndecl = builtin_decl_explicit (fn);
7040 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7041 call = builtin_save_expr (call);
7042 }
7043
7044 return build2 (COMPOUND_EXPR, void_type_node,
7045 build2 (MODIFY_EXPR, void_type_node,
7046 build_fold_indirect_ref_loc (loc, arg1),
7047 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7048 build2 (MODIFY_EXPR, void_type_node,
7049 build_fold_indirect_ref_loc (loc, arg2),
7050 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7051 }
7052
7053 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7054 arguments to the call, and TYPE is its return type.
7055 Return NULL_TREE if no simplification can be made. */
7056
7057 static tree
7058 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7059 {
7060 if (!validate_arg (arg1, POINTER_TYPE)
7061 || !validate_arg (arg2, INTEGER_TYPE)
7062 || !validate_arg (len, INTEGER_TYPE))
7063 return NULL_TREE;
7064 else
7065 {
7066 const char *p1;
7067
7068 if (TREE_CODE (arg2) != INTEGER_CST
7069 || !tree_fits_uhwi_p (len))
7070 return NULL_TREE;
7071
7072 p1 = c_getstr (arg1);
7073 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7074 {
7075 char c;
7076 const char *r;
7077 tree tem;
7078
7079 if (target_char_cast (arg2, &c))
7080 return NULL_TREE;
7081
7082 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7083
7084 if (r == NULL)
7085 return build_int_cst (TREE_TYPE (arg1), 0);
7086
7087 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7088 return fold_convert_loc (loc, type, tem);
7089 }
7090 return NULL_TREE;
7091 }
7092 }
7093
7094 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7095 Return NULL_TREE if no simplification can be made. */
7096
7097 static tree
7098 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7099 {
7100 if (!validate_arg (arg1, POINTER_TYPE)
7101 || !validate_arg (arg2, POINTER_TYPE)
7102 || !validate_arg (len, INTEGER_TYPE))
7103 return NULL_TREE;
7104
7105 /* If the LEN parameter is zero, return zero. */
7106 if (integer_zerop (len))
7107 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7108 arg1, arg2);
7109
7110 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7111 if (operand_equal_p (arg1, arg2, 0))
7112 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7113
7114 /* If len parameter is one, return an expression corresponding to
7115 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7116 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7117 {
7118 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7119 tree cst_uchar_ptr_node
7120 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7121
7122 tree ind1
7123 = fold_convert_loc (loc, integer_type_node,
7124 build1 (INDIRECT_REF, cst_uchar_node,
7125 fold_convert_loc (loc,
7126 cst_uchar_ptr_node,
7127 arg1)));
7128 tree ind2
7129 = fold_convert_loc (loc, integer_type_node,
7130 build1 (INDIRECT_REF, cst_uchar_node,
7131 fold_convert_loc (loc,
7132 cst_uchar_ptr_node,
7133 arg2)));
7134 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7135 }
7136
7137 return NULL_TREE;
7138 }
7139
7140 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7141 Return NULL_TREE if no simplification can be made. */
7142
7143 static tree
7144 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7145 {
7146 if (!validate_arg (arg1, POINTER_TYPE)
7147 || !validate_arg (arg2, POINTER_TYPE))
7148 return NULL_TREE;
7149
7150 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7151 if (operand_equal_p (arg1, arg2, 0))
7152 return integer_zero_node;
7153
7154 /* If the second arg is "", return *(const unsigned char*)arg1. */
7155 const char *p2 = c_getstr (arg2);
7156 if (p2 && *p2 == '\0')
7157 {
7158 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7159 tree cst_uchar_ptr_node
7160 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7161
7162 return fold_convert_loc (loc, integer_type_node,
7163 build1 (INDIRECT_REF, cst_uchar_node,
7164 fold_convert_loc (loc,
7165 cst_uchar_ptr_node,
7166 arg1)));
7167 }
7168
7169 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7170 const char *p1 = c_getstr (arg1);
7171 if (p1 && *p1 == '\0')
7172 {
7173 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7174 tree cst_uchar_ptr_node
7175 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7176
7177 tree temp
7178 = fold_convert_loc (loc, integer_type_node,
7179 build1 (INDIRECT_REF, cst_uchar_node,
7180 fold_convert_loc (loc,
7181 cst_uchar_ptr_node,
7182 arg2)));
7183 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7184 }
7185
7186 return NULL_TREE;
7187 }
7188
7189 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7190 Return NULL_TREE if no simplification can be made. */
7191
7192 static tree
7193 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7194 {
7195 if (!validate_arg (arg1, POINTER_TYPE)
7196 || !validate_arg (arg2, POINTER_TYPE)
7197 || !validate_arg (len, INTEGER_TYPE))
7198 return NULL_TREE;
7199
7200 /* If the LEN parameter is zero, return zero. */
7201 if (integer_zerop (len))
7202 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7203 arg1, arg2);
7204
7205 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7206 if (operand_equal_p (arg1, arg2, 0))
7207 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7208
7209 /* If the second arg is "", and the length is greater than zero,
7210 return *(const unsigned char*)arg1. */
7211 const char *p2 = c_getstr (arg2);
7212 if (p2 && *p2 == '\0'
7213 && TREE_CODE (len) == INTEGER_CST
7214 && tree_int_cst_sgn (len) == 1)
7215 {
7216 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7217 tree cst_uchar_ptr_node
7218 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7219
7220 return fold_convert_loc (loc, integer_type_node,
7221 build1 (INDIRECT_REF, cst_uchar_node,
7222 fold_convert_loc (loc,
7223 cst_uchar_ptr_node,
7224 arg1)));
7225 }
7226
7227 /* If the first arg is "", and the length is greater than zero,
7228 return -*(const unsigned char*)arg2. */
7229 const char *p1 = c_getstr (arg1);
7230 if (p1 && *p1 == '\0'
7231 && TREE_CODE (len) == INTEGER_CST
7232 && tree_int_cst_sgn (len) == 1)
7233 {
7234 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7235 tree cst_uchar_ptr_node
7236 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7237
7238 tree temp = fold_convert_loc (loc, integer_type_node,
7239 build1 (INDIRECT_REF, cst_uchar_node,
7240 fold_convert_loc (loc,
7241 cst_uchar_ptr_node,
7242 arg2)));
7243 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7244 }
7245
7246 /* If len parameter is one, return an expression corresponding to
7247 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7248 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7249 {
7250 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7251 tree cst_uchar_ptr_node
7252 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7253
7254 tree ind1 = fold_convert_loc (loc, integer_type_node,
7255 build1 (INDIRECT_REF, cst_uchar_node,
7256 fold_convert_loc (loc,
7257 cst_uchar_ptr_node,
7258 arg1)));
7259 tree ind2 = fold_convert_loc (loc, integer_type_node,
7260 build1 (INDIRECT_REF, cst_uchar_node,
7261 fold_convert_loc (loc,
7262 cst_uchar_ptr_node,
7263 arg2)));
7264 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7265 }
7266
7267 return NULL_TREE;
7268 }
7269
7270 /* Fold a call to builtin isascii with argument ARG. */
7271
7272 static tree
7273 fold_builtin_isascii (location_t loc, tree arg)
7274 {
7275 if (!validate_arg (arg, INTEGER_TYPE))
7276 return NULL_TREE;
7277 else
7278 {
7279 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7280 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7281 build_int_cst (integer_type_node,
7282 ~ (unsigned HOST_WIDE_INT) 0x7f));
7283 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7284 arg, integer_zero_node);
7285 }
7286 }
7287
7288 /* Fold a call to builtin toascii with argument ARG. */
7289
7290 static tree
7291 fold_builtin_toascii (location_t loc, tree arg)
7292 {
7293 if (!validate_arg (arg, INTEGER_TYPE))
7294 return NULL_TREE;
7295
7296 /* Transform toascii(c) -> (c & 0x7f). */
7297 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7298 build_int_cst (integer_type_node, 0x7f));
7299 }
7300
7301 /* Fold a call to builtin isdigit with argument ARG. */
7302
7303 static tree
7304 fold_builtin_isdigit (location_t loc, tree arg)
7305 {
7306 if (!validate_arg (arg, INTEGER_TYPE))
7307 return NULL_TREE;
7308 else
7309 {
7310 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7311 /* According to the C standard, isdigit is unaffected by locale.
7312 However, it definitely is affected by the target character set. */
7313 unsigned HOST_WIDE_INT target_digit0
7314 = lang_hooks.to_target_charset ('0');
7315
7316 if (target_digit0 == 0)
7317 return NULL_TREE;
7318
7319 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7320 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7321 build_int_cst (unsigned_type_node, target_digit0));
7322 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7323 build_int_cst (unsigned_type_node, 9));
7324 }
7325 }
7326
7327 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7328
7329 static tree
7330 fold_builtin_fabs (location_t loc, tree arg, tree type)
7331 {
7332 if (!validate_arg (arg, REAL_TYPE))
7333 return NULL_TREE;
7334
7335 arg = fold_convert_loc (loc, type, arg);
7336 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7337 }
7338
7339 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7340
7341 static tree
7342 fold_builtin_abs (location_t loc, tree arg, tree type)
7343 {
7344 if (!validate_arg (arg, INTEGER_TYPE))
7345 return NULL_TREE;
7346
7347 arg = fold_convert_loc (loc, type, arg);
7348 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7349 }
7350
7351 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7352
7353 static tree
7354 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7355 {
7356 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7357 if (validate_arg (arg0, REAL_TYPE)
7358 && validate_arg (arg1, REAL_TYPE)
7359 && validate_arg (arg2, REAL_TYPE)
7360 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7361 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7362
7363 return NULL_TREE;
7364 }
7365
7366 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7367
7368 static tree
7369 fold_builtin_carg (location_t loc, tree arg, tree type)
7370 {
7371 if (validate_arg (arg, COMPLEX_TYPE)
7372 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7373 {
7374 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7375
7376 if (atan2_fn)
7377 {
7378 tree new_arg = builtin_save_expr (arg);
7379 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7380 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7381 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7382 }
7383 }
7384
7385 return NULL_TREE;
7386 }
7387
7388 /* Fold a call to builtin frexp, we can assume the base is 2. */
7389
7390 static tree
7391 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7392 {
7393 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7394 return NULL_TREE;
7395
7396 STRIP_NOPS (arg0);
7397
7398 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7399 return NULL_TREE;
7400
7401 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7402
7403 /* Proceed if a valid pointer type was passed in. */
7404 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7405 {
7406 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7407 tree frac, exp;
7408
7409 switch (value->cl)
7410 {
7411 case rvc_zero:
7412 /* For +-0, return (*exp = 0, +-0). */
7413 exp = integer_zero_node;
7414 frac = arg0;
7415 break;
7416 case rvc_nan:
7417 case rvc_inf:
7418 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7419 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7420 case rvc_normal:
7421 {
7422 /* Since the frexp function always expects base 2, and in
7423 GCC normalized significands are already in the range
7424 [0.5, 1.0), we have exactly what frexp wants. */
7425 REAL_VALUE_TYPE frac_rvt = *value;
7426 SET_REAL_EXP (&frac_rvt, 0);
7427 frac = build_real (rettype, frac_rvt);
7428 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7429 }
7430 break;
7431 default:
7432 gcc_unreachable ();
7433 }
7434
7435 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7436 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7437 TREE_SIDE_EFFECTS (arg1) = 1;
7438 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7439 }
7440
7441 return NULL_TREE;
7442 }
7443
7444 /* Fold a call to builtin modf. */
7445
7446 static tree
7447 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7448 {
7449 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7450 return NULL_TREE;
7451
7452 STRIP_NOPS (arg0);
7453
7454 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7455 return NULL_TREE;
7456
7457 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7458
7459 /* Proceed if a valid pointer type was passed in. */
7460 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7461 {
7462 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7463 REAL_VALUE_TYPE trunc, frac;
7464
7465 switch (value->cl)
7466 {
7467 case rvc_nan:
7468 case rvc_zero:
7469 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7470 trunc = frac = *value;
7471 break;
7472 case rvc_inf:
7473 /* For +-Inf, return (*arg1 = arg0, +-0). */
7474 frac = dconst0;
7475 frac.sign = value->sign;
7476 trunc = *value;
7477 break;
7478 case rvc_normal:
7479 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7480 real_trunc (&trunc, VOIDmode, value);
7481 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7482 /* If the original number was negative and already
7483 integral, then the fractional part is -0.0. */
7484 if (value->sign && frac.cl == rvc_zero)
7485 frac.sign = value->sign;
7486 break;
7487 }
7488
7489 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7490 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7491 build_real (rettype, trunc));
7492 TREE_SIDE_EFFECTS (arg1) = 1;
7493 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7494 build_real (rettype, frac));
7495 }
7496
7497 return NULL_TREE;
7498 }
7499
7500 /* Given a location LOC, an interclass builtin function decl FNDECL
7501 and its single argument ARG, return an folded expression computing
7502 the same, or NULL_TREE if we either couldn't or didn't want to fold
7503 (the latter happen if there's an RTL instruction available). */
7504
7505 static tree
7506 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7507 {
7508 machine_mode mode;
7509
7510 if (!validate_arg (arg, REAL_TYPE))
7511 return NULL_TREE;
7512
7513 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7514 return NULL_TREE;
7515
7516 mode = TYPE_MODE (TREE_TYPE (arg));
7517
7518 /* If there is no optab, try generic code. */
7519 switch (DECL_FUNCTION_CODE (fndecl))
7520 {
7521 tree result;
7522
7523 CASE_FLT_FN (BUILT_IN_ISINF):
7524 {
7525 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7526 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7527 tree const type = TREE_TYPE (arg);
7528 REAL_VALUE_TYPE r;
7529 char buf[128];
7530
7531 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7532 real_from_string (&r, buf);
7533 result = build_call_expr (isgr_fn, 2,
7534 fold_build1_loc (loc, ABS_EXPR, type, arg),
7535 build_real (type, r));
7536 return result;
7537 }
7538 CASE_FLT_FN (BUILT_IN_FINITE):
7539 case BUILT_IN_ISFINITE:
7540 {
7541 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7542 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7543 tree const type = TREE_TYPE (arg);
7544 REAL_VALUE_TYPE r;
7545 char buf[128];
7546
7547 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7548 real_from_string (&r, buf);
7549 result = build_call_expr (isle_fn, 2,
7550 fold_build1_loc (loc, ABS_EXPR, type, arg),
7551 build_real (type, r));
7552 /*result = fold_build2_loc (loc, UNGT_EXPR,
7553 TREE_TYPE (TREE_TYPE (fndecl)),
7554 fold_build1_loc (loc, ABS_EXPR, type, arg),
7555 build_real (type, r));
7556 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7557 TREE_TYPE (TREE_TYPE (fndecl)),
7558 result);*/
7559 return result;
7560 }
7561 case BUILT_IN_ISNORMAL:
7562 {
7563 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7564 islessequal(fabs(x),DBL_MAX). */
7565 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7566 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7567 tree const type = TREE_TYPE (arg);
7568 REAL_VALUE_TYPE rmax, rmin;
7569 char buf[128];
7570
7571 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7572 real_from_string (&rmax, buf);
7573 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7574 real_from_string (&rmin, buf);
7575 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7576 result = build_call_expr (isle_fn, 2, arg,
7577 build_real (type, rmax));
7578 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
7579 build_call_expr (isge_fn, 2, arg,
7580 build_real (type, rmin)));
7581 return result;
7582 }
7583 default:
7584 break;
7585 }
7586
7587 return NULL_TREE;
7588 }
7589
7590 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7591 ARG is the argument for the call. */
7592
7593 static tree
7594 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7595 {
7596 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7597
7598 if (!validate_arg (arg, REAL_TYPE))
7599 return NULL_TREE;
7600
7601 switch (builtin_index)
7602 {
7603 case BUILT_IN_ISINF:
7604 if (!HONOR_INFINITIES (arg))
7605 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7606
7607 return NULL_TREE;
7608
7609 case BUILT_IN_ISINF_SIGN:
7610 {
7611 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7612 /* In a boolean context, GCC will fold the inner COND_EXPR to
7613 1. So e.g. "if (isinf_sign(x))" would be folded to just
7614 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7615 tree signbit_fn = mathfn_built_in_1
7616 (TREE_TYPE (arg), CFN_BUILT_IN_SIGNBIT, 0);
7617 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7618 tree tmp = NULL_TREE;
7619
7620 arg = builtin_save_expr (arg);
7621
7622 if (signbit_fn && isinf_fn)
7623 {
7624 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7625 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7626
7627 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7628 signbit_call, integer_zero_node);
7629 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7630 isinf_call, integer_zero_node);
7631
7632 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7633 integer_minus_one_node, integer_one_node);
7634 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7635 isinf_call, tmp,
7636 integer_zero_node);
7637 }
7638
7639 return tmp;
7640 }
7641
7642 case BUILT_IN_ISFINITE:
7643 if (!HONOR_NANS (arg)
7644 && !HONOR_INFINITIES (arg))
7645 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7646
7647 return NULL_TREE;
7648
7649 case BUILT_IN_ISNAN:
7650 if (!HONOR_NANS (arg))
7651 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7652
7653 arg = builtin_save_expr (arg);
7654 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7655
7656 default:
7657 gcc_unreachable ();
7658 }
7659 }
7660
7661 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7662 This builtin will generate code to return the appropriate floating
7663 point classification depending on the value of the floating point
7664 number passed in. The possible return values must be supplied as
7665 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7666 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7667 one floating point argument which is "type generic". */
7668
7669 static tree
7670 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7671 {
7672 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7673 arg, type, res, tmp;
7674 machine_mode mode;
7675 REAL_VALUE_TYPE r;
7676 char buf[128];
7677
7678 /* Verify the required arguments in the original call. */
7679 if (nargs != 6
7680 || !validate_arg (args[0], INTEGER_TYPE)
7681 || !validate_arg (args[1], INTEGER_TYPE)
7682 || !validate_arg (args[2], INTEGER_TYPE)
7683 || !validate_arg (args[3], INTEGER_TYPE)
7684 || !validate_arg (args[4], INTEGER_TYPE)
7685 || !validate_arg (args[5], REAL_TYPE))
7686 return NULL_TREE;
7687
7688 fp_nan = args[0];
7689 fp_infinite = args[1];
7690 fp_normal = args[2];
7691 fp_subnormal = args[3];
7692 fp_zero = args[4];
7693 arg = args[5];
7694 type = TREE_TYPE (arg);
7695 mode = TYPE_MODE (type);
7696 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7697
7698 /* fpclassify(x) ->
7699 isnan(x) ? FP_NAN :
7700 (fabs(x) == Inf ? FP_INFINITE :
7701 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7702 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7703
7704 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7705 build_real (type, dconst0));
7706 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7707 tmp, fp_zero, fp_subnormal);
7708
7709 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7710 real_from_string (&r, buf);
7711 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7712 arg, build_real (type, r));
7713 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7714
7715 if (HONOR_INFINITIES (mode))
7716 {
7717 real_inf (&r);
7718 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7719 build_real (type, r));
7720 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7721 fp_infinite, res);
7722 }
7723
7724 if (HONOR_NANS (mode))
7725 {
7726 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7727 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7728 }
7729
7730 return res;
7731 }
7732
7733 /* Fold a call to an unordered comparison function such as
7734 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
7735 being called and ARG0 and ARG1 are the arguments for the call.
7736 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7737 the opposite of the desired result. UNORDERED_CODE is used
7738 for modes that can hold NaNs and ORDERED_CODE is used for
7739 the rest. */
7740
7741 static tree
7742 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
7743 enum tree_code unordered_code,
7744 enum tree_code ordered_code)
7745 {
7746 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7747 enum tree_code code;
7748 tree type0, type1;
7749 enum tree_code code0, code1;
7750 tree cmp_type = NULL_TREE;
7751
7752 type0 = TREE_TYPE (arg0);
7753 type1 = TREE_TYPE (arg1);
7754
7755 code0 = TREE_CODE (type0);
7756 code1 = TREE_CODE (type1);
7757
7758 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
7759 /* Choose the wider of two real types. */
7760 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
7761 ? type0 : type1;
7762 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
7763 cmp_type = type0;
7764 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
7765 cmp_type = type1;
7766
7767 arg0 = fold_convert_loc (loc, cmp_type, arg0);
7768 arg1 = fold_convert_loc (loc, cmp_type, arg1);
7769
7770 if (unordered_code == UNORDERED_EXPR)
7771 {
7772 if (!HONOR_NANS (arg0))
7773 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
7774 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
7775 }
7776
7777 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
7778 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
7779 fold_build2_loc (loc, code, type, arg0, arg1));
7780 }
7781
7782 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
7783 arithmetics if it can never overflow, or into internal functions that
7784 return both result of arithmetics and overflowed boolean flag in
7785 a complex integer result, or some other check for overflow. */
7786
7787 static tree
7788 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
7789 tree arg0, tree arg1, tree arg2)
7790 {
7791 enum internal_fn ifn = IFN_LAST;
7792 tree type = TREE_TYPE (TREE_TYPE (arg2));
7793 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
7794 switch (fcode)
7795 {
7796 case BUILT_IN_ADD_OVERFLOW:
7797 case BUILT_IN_SADD_OVERFLOW:
7798 case BUILT_IN_SADDL_OVERFLOW:
7799 case BUILT_IN_SADDLL_OVERFLOW:
7800 case BUILT_IN_UADD_OVERFLOW:
7801 case BUILT_IN_UADDL_OVERFLOW:
7802 case BUILT_IN_UADDLL_OVERFLOW:
7803 ifn = IFN_ADD_OVERFLOW;
7804 break;
7805 case BUILT_IN_SUB_OVERFLOW:
7806 case BUILT_IN_SSUB_OVERFLOW:
7807 case BUILT_IN_SSUBL_OVERFLOW:
7808 case BUILT_IN_SSUBLL_OVERFLOW:
7809 case BUILT_IN_USUB_OVERFLOW:
7810 case BUILT_IN_USUBL_OVERFLOW:
7811 case BUILT_IN_USUBLL_OVERFLOW:
7812 ifn = IFN_SUB_OVERFLOW;
7813 break;
7814 case BUILT_IN_MUL_OVERFLOW:
7815 case BUILT_IN_SMUL_OVERFLOW:
7816 case BUILT_IN_SMULL_OVERFLOW:
7817 case BUILT_IN_SMULLL_OVERFLOW:
7818 case BUILT_IN_UMUL_OVERFLOW:
7819 case BUILT_IN_UMULL_OVERFLOW:
7820 case BUILT_IN_UMULLL_OVERFLOW:
7821 ifn = IFN_MUL_OVERFLOW;
7822 break;
7823 default:
7824 gcc_unreachable ();
7825 }
7826 tree ctype = build_complex_type (type);
7827 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
7828 2, arg0, arg1);
7829 tree tgt = save_expr (call);
7830 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
7831 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
7832 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
7833 tree store
7834 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
7835 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
7836 }
7837
7838 /* Fold a call to built-in function FNDECL with 0 arguments.
7839 This function returns NULL_TREE if no simplification was possible. */
7840
7841 static tree
7842 fold_builtin_0 (location_t loc, tree fndecl)
7843 {
7844 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7845 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7846 switch (fcode)
7847 {
7848 CASE_FLT_FN (BUILT_IN_INF):
7849 case BUILT_IN_INFD32:
7850 case BUILT_IN_INFD64:
7851 case BUILT_IN_INFD128:
7852 return fold_builtin_inf (loc, type, true);
7853
7854 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
7855 return fold_builtin_inf (loc, type, false);
7856
7857 case BUILT_IN_CLASSIFY_TYPE:
7858 return fold_builtin_classify_type (NULL_TREE);
7859
7860 default:
7861 break;
7862 }
7863 return NULL_TREE;
7864 }
7865
7866 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
7867 This function returns NULL_TREE if no simplification was possible. */
7868
7869 static tree
7870 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
7871 {
7872 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7873 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7874
7875 if (TREE_CODE (arg0) == ERROR_MARK)
7876 return NULL_TREE;
7877
7878 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
7879 return ret;
7880
7881 switch (fcode)
7882 {
7883 case BUILT_IN_CONSTANT_P:
7884 {
7885 tree val = fold_builtin_constant_p (arg0);
7886
7887 /* Gimplification will pull the CALL_EXPR for the builtin out of
7888 an if condition. When not optimizing, we'll not CSE it back.
7889 To avoid link error types of regressions, return false now. */
7890 if (!val && !optimize)
7891 val = integer_zero_node;
7892
7893 return val;
7894 }
7895
7896 case BUILT_IN_CLASSIFY_TYPE:
7897 return fold_builtin_classify_type (arg0);
7898
7899 case BUILT_IN_STRLEN:
7900 return fold_builtin_strlen (loc, type, arg0);
7901
7902 CASE_FLT_FN (BUILT_IN_FABS):
7903 case BUILT_IN_FABSD32:
7904 case BUILT_IN_FABSD64:
7905 case BUILT_IN_FABSD128:
7906 return fold_builtin_fabs (loc, arg0, type);
7907
7908 case BUILT_IN_ABS:
7909 case BUILT_IN_LABS:
7910 case BUILT_IN_LLABS:
7911 case BUILT_IN_IMAXABS:
7912 return fold_builtin_abs (loc, arg0, type);
7913
7914 CASE_FLT_FN (BUILT_IN_CONJ):
7915 if (validate_arg (arg0, COMPLEX_TYPE)
7916 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7917 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
7918 break;
7919
7920 CASE_FLT_FN (BUILT_IN_CREAL):
7921 if (validate_arg (arg0, COMPLEX_TYPE)
7922 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7923 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
7924 break;
7925
7926 CASE_FLT_FN (BUILT_IN_CIMAG):
7927 if (validate_arg (arg0, COMPLEX_TYPE)
7928 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7929 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
7930 break;
7931
7932 CASE_FLT_FN (BUILT_IN_CARG):
7933 return fold_builtin_carg (loc, arg0, type);
7934
7935 case BUILT_IN_ISASCII:
7936 return fold_builtin_isascii (loc, arg0);
7937
7938 case BUILT_IN_TOASCII:
7939 return fold_builtin_toascii (loc, arg0);
7940
7941 case BUILT_IN_ISDIGIT:
7942 return fold_builtin_isdigit (loc, arg0);
7943
7944 CASE_FLT_FN (BUILT_IN_FINITE):
7945 case BUILT_IN_FINITED32:
7946 case BUILT_IN_FINITED64:
7947 case BUILT_IN_FINITED128:
7948 case BUILT_IN_ISFINITE:
7949 {
7950 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
7951 if (ret)
7952 return ret;
7953 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
7954 }
7955
7956 CASE_FLT_FN (BUILT_IN_ISINF):
7957 case BUILT_IN_ISINFD32:
7958 case BUILT_IN_ISINFD64:
7959 case BUILT_IN_ISINFD128:
7960 {
7961 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
7962 if (ret)
7963 return ret;
7964 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
7965 }
7966
7967 case BUILT_IN_ISNORMAL:
7968 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
7969
7970 case BUILT_IN_ISINF_SIGN:
7971 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
7972
7973 CASE_FLT_FN (BUILT_IN_ISNAN):
7974 case BUILT_IN_ISNAND32:
7975 case BUILT_IN_ISNAND64:
7976 case BUILT_IN_ISNAND128:
7977 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
7978
7979 case BUILT_IN_FREE:
7980 if (integer_zerop (arg0))
7981 return build_empty_stmt (loc);
7982 break;
7983
7984 default:
7985 break;
7986 }
7987
7988 return NULL_TREE;
7989
7990 }
7991
7992 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
7993 This function returns NULL_TREE if no simplification was possible. */
7994
7995 static tree
7996 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
7997 {
7998 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7999 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8000
8001 if (TREE_CODE (arg0) == ERROR_MARK
8002 || TREE_CODE (arg1) == ERROR_MARK)
8003 return NULL_TREE;
8004
8005 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8006 return ret;
8007
8008 switch (fcode)
8009 {
8010 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8011 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8012 if (validate_arg (arg0, REAL_TYPE)
8013 && validate_arg (arg1, POINTER_TYPE))
8014 return do_mpfr_lgamma_r (arg0, arg1, type);
8015 break;
8016
8017 CASE_FLT_FN (BUILT_IN_FREXP):
8018 return fold_builtin_frexp (loc, arg0, arg1, type);
8019
8020 CASE_FLT_FN (BUILT_IN_MODF):
8021 return fold_builtin_modf (loc, arg0, arg1, type);
8022
8023 case BUILT_IN_STRSTR:
8024 return fold_builtin_strstr (loc, arg0, arg1, type);
8025
8026 case BUILT_IN_STRSPN:
8027 return fold_builtin_strspn (loc, arg0, arg1);
8028
8029 case BUILT_IN_STRCSPN:
8030 return fold_builtin_strcspn (loc, arg0, arg1);
8031
8032 case BUILT_IN_STRCHR:
8033 case BUILT_IN_INDEX:
8034 return fold_builtin_strchr (loc, arg0, arg1, type);
8035
8036 case BUILT_IN_STRRCHR:
8037 case BUILT_IN_RINDEX:
8038 return fold_builtin_strrchr (loc, arg0, arg1, type);
8039
8040 case BUILT_IN_STRCMP:
8041 return fold_builtin_strcmp (loc, arg0, arg1);
8042
8043 case BUILT_IN_STRPBRK:
8044 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8045
8046 case BUILT_IN_EXPECT:
8047 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8048
8049 case BUILT_IN_ISGREATER:
8050 return fold_builtin_unordered_cmp (loc, fndecl,
8051 arg0, arg1, UNLE_EXPR, LE_EXPR);
8052 case BUILT_IN_ISGREATEREQUAL:
8053 return fold_builtin_unordered_cmp (loc, fndecl,
8054 arg0, arg1, UNLT_EXPR, LT_EXPR);
8055 case BUILT_IN_ISLESS:
8056 return fold_builtin_unordered_cmp (loc, fndecl,
8057 arg0, arg1, UNGE_EXPR, GE_EXPR);
8058 case BUILT_IN_ISLESSEQUAL:
8059 return fold_builtin_unordered_cmp (loc, fndecl,
8060 arg0, arg1, UNGT_EXPR, GT_EXPR);
8061 case BUILT_IN_ISLESSGREATER:
8062 return fold_builtin_unordered_cmp (loc, fndecl,
8063 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8064 case BUILT_IN_ISUNORDERED:
8065 return fold_builtin_unordered_cmp (loc, fndecl,
8066 arg0, arg1, UNORDERED_EXPR,
8067 NOP_EXPR);
8068
8069 /* We do the folding for va_start in the expander. */
8070 case BUILT_IN_VA_START:
8071 break;
8072
8073 case BUILT_IN_OBJECT_SIZE:
8074 return fold_builtin_object_size (arg0, arg1);
8075
8076 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8077 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8078
8079 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8080 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8081
8082 default:
8083 break;
8084 }
8085 return NULL_TREE;
8086 }
8087
8088 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8089 and ARG2.
8090 This function returns NULL_TREE if no simplification was possible. */
8091
8092 static tree
8093 fold_builtin_3 (location_t loc, tree fndecl,
8094 tree arg0, tree arg1, tree arg2)
8095 {
8096 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8097 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8098
8099 if (TREE_CODE (arg0) == ERROR_MARK
8100 || TREE_CODE (arg1) == ERROR_MARK
8101 || TREE_CODE (arg2) == ERROR_MARK)
8102 return NULL_TREE;
8103
8104 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8105 arg0, arg1, arg2))
8106 return ret;
8107
8108 switch (fcode)
8109 {
8110
8111 CASE_FLT_FN (BUILT_IN_SINCOS):
8112 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8113
8114 CASE_FLT_FN (BUILT_IN_FMA):
8115 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8116
8117 CASE_FLT_FN (BUILT_IN_REMQUO):
8118 if (validate_arg (arg0, REAL_TYPE)
8119 && validate_arg (arg1, REAL_TYPE)
8120 && validate_arg (arg2, POINTER_TYPE))
8121 return do_mpfr_remquo (arg0, arg1, arg2);
8122 break;
8123
8124 case BUILT_IN_STRNCMP:
8125 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8126
8127 case BUILT_IN_MEMCHR:
8128 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8129
8130 case BUILT_IN_BCMP:
8131 case BUILT_IN_MEMCMP:
8132 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8133
8134 case BUILT_IN_EXPECT:
8135 return fold_builtin_expect (loc, arg0, arg1, arg2);
8136
8137 case BUILT_IN_ADD_OVERFLOW:
8138 case BUILT_IN_SUB_OVERFLOW:
8139 case BUILT_IN_MUL_OVERFLOW:
8140 case BUILT_IN_SADD_OVERFLOW:
8141 case BUILT_IN_SADDL_OVERFLOW:
8142 case BUILT_IN_SADDLL_OVERFLOW:
8143 case BUILT_IN_SSUB_OVERFLOW:
8144 case BUILT_IN_SSUBL_OVERFLOW:
8145 case BUILT_IN_SSUBLL_OVERFLOW:
8146 case BUILT_IN_SMUL_OVERFLOW:
8147 case BUILT_IN_SMULL_OVERFLOW:
8148 case BUILT_IN_SMULLL_OVERFLOW:
8149 case BUILT_IN_UADD_OVERFLOW:
8150 case BUILT_IN_UADDL_OVERFLOW:
8151 case BUILT_IN_UADDLL_OVERFLOW:
8152 case BUILT_IN_USUB_OVERFLOW:
8153 case BUILT_IN_USUBL_OVERFLOW:
8154 case BUILT_IN_USUBLL_OVERFLOW:
8155 case BUILT_IN_UMUL_OVERFLOW:
8156 case BUILT_IN_UMULL_OVERFLOW:
8157 case BUILT_IN_UMULLL_OVERFLOW:
8158 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8159
8160 default:
8161 break;
8162 }
8163 return NULL_TREE;
8164 }
8165
8166 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8167 arguments. IGNORE is true if the result of the
8168 function call is ignored. This function returns NULL_TREE if no
8169 simplification was possible. */
8170
8171 tree
8172 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8173 {
8174 tree ret = NULL_TREE;
8175
8176 switch (nargs)
8177 {
8178 case 0:
8179 ret = fold_builtin_0 (loc, fndecl);
8180 break;
8181 case 1:
8182 ret = fold_builtin_1 (loc, fndecl, args[0]);
8183 break;
8184 case 2:
8185 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8186 break;
8187 case 3:
8188 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8189 break;
8190 default:
8191 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8192 break;
8193 }
8194 if (ret)
8195 {
8196 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8197 SET_EXPR_LOCATION (ret, loc);
8198 TREE_NO_WARNING (ret) = 1;
8199 return ret;
8200 }
8201 return NULL_TREE;
8202 }
8203
8204 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8205 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8206 of arguments in ARGS to be omitted. OLDNARGS is the number of
8207 elements in ARGS. */
8208
8209 static tree
8210 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8211 int skip, tree fndecl, int n, va_list newargs)
8212 {
8213 int nargs = oldnargs - skip + n;
8214 tree *buffer;
8215
8216 if (n > 0)
8217 {
8218 int i, j;
8219
8220 buffer = XALLOCAVEC (tree, nargs);
8221 for (i = 0; i < n; i++)
8222 buffer[i] = va_arg (newargs, tree);
8223 for (j = skip; j < oldnargs; j++, i++)
8224 buffer[i] = args[j];
8225 }
8226 else
8227 buffer = args + skip;
8228
8229 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8230 }
8231
8232 /* Return true if FNDECL shouldn't be folded right now.
8233 If a built-in function has an inline attribute always_inline
8234 wrapper, defer folding it after always_inline functions have
8235 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8236 might not be performed. */
8237
8238 bool
8239 avoid_folding_inline_builtin (tree fndecl)
8240 {
8241 return (DECL_DECLARED_INLINE_P (fndecl)
8242 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8243 && cfun
8244 && !cfun->always_inline_functions_inlined
8245 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8246 }
8247
8248 /* A wrapper function for builtin folding that prevents warnings for
8249 "statement without effect" and the like, caused by removing the
8250 call node earlier than the warning is generated. */
8251
8252 tree
8253 fold_call_expr (location_t loc, tree exp, bool ignore)
8254 {
8255 tree ret = NULL_TREE;
8256 tree fndecl = get_callee_fndecl (exp);
8257 if (fndecl
8258 && TREE_CODE (fndecl) == FUNCTION_DECL
8259 && DECL_BUILT_IN (fndecl)
8260 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8261 yet. Defer folding until we see all the arguments
8262 (after inlining). */
8263 && !CALL_EXPR_VA_ARG_PACK (exp))
8264 {
8265 int nargs = call_expr_nargs (exp);
8266
8267 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8268 instead last argument is __builtin_va_arg_pack (). Defer folding
8269 even in that case, until arguments are finalized. */
8270 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8271 {
8272 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8273 if (fndecl2
8274 && TREE_CODE (fndecl2) == FUNCTION_DECL
8275 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8276 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8277 return NULL_TREE;
8278 }
8279
8280 if (avoid_folding_inline_builtin (fndecl))
8281 return NULL_TREE;
8282
8283 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8284 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8285 CALL_EXPR_ARGP (exp), ignore);
8286 else
8287 {
8288 tree *args = CALL_EXPR_ARGP (exp);
8289 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8290 if (ret)
8291 return ret;
8292 }
8293 }
8294 return NULL_TREE;
8295 }
8296
8297 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8298 N arguments are passed in the array ARGARRAY. Return a folded
8299 expression or NULL_TREE if no simplification was possible. */
8300
8301 tree
8302 fold_builtin_call_array (location_t loc, tree,
8303 tree fn,
8304 int n,
8305 tree *argarray)
8306 {
8307 if (TREE_CODE (fn) != ADDR_EXPR)
8308 return NULL_TREE;
8309
8310 tree fndecl = TREE_OPERAND (fn, 0);
8311 if (TREE_CODE (fndecl) == FUNCTION_DECL
8312 && DECL_BUILT_IN (fndecl))
8313 {
8314 /* If last argument is __builtin_va_arg_pack (), arguments to this
8315 function are not finalized yet. Defer folding until they are. */
8316 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8317 {
8318 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8319 if (fndecl2
8320 && TREE_CODE (fndecl2) == FUNCTION_DECL
8321 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8322 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8323 return NULL_TREE;
8324 }
8325 if (avoid_folding_inline_builtin (fndecl))
8326 return NULL_TREE;
8327 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8328 return targetm.fold_builtin (fndecl, n, argarray, false);
8329 else
8330 return fold_builtin_n (loc, fndecl, argarray, n, false);
8331 }
8332
8333 return NULL_TREE;
8334 }
8335
8336 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8337 along with N new arguments specified as the "..." parameters. SKIP
8338 is the number of arguments in EXP to be omitted. This function is used
8339 to do varargs-to-varargs transformations. */
8340
8341 static tree
8342 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8343 {
8344 va_list ap;
8345 tree t;
8346
8347 va_start (ap, n);
8348 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8349 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8350 va_end (ap);
8351
8352 return t;
8353 }
8354
8355 /* Validate a single argument ARG against a tree code CODE representing
8356 a type. */
8357
8358 static bool
8359 validate_arg (const_tree arg, enum tree_code code)
8360 {
8361 if (!arg)
8362 return false;
8363 else if (code == POINTER_TYPE)
8364 return POINTER_TYPE_P (TREE_TYPE (arg));
8365 else if (code == INTEGER_TYPE)
8366 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8367 return code == TREE_CODE (TREE_TYPE (arg));
8368 }
8369
8370 /* This function validates the types of a function call argument list
8371 against a specified list of tree_codes. If the last specifier is a 0,
8372 that represents an ellipses, otherwise the last specifier must be a
8373 VOID_TYPE.
8374
8375 This is the GIMPLE version of validate_arglist. Eventually we want to
8376 completely convert builtins.c to work from GIMPLEs and the tree based
8377 validate_arglist will then be removed. */
8378
8379 bool
8380 validate_gimple_arglist (const gcall *call, ...)
8381 {
8382 enum tree_code code;
8383 bool res = 0;
8384 va_list ap;
8385 const_tree arg;
8386 size_t i;
8387
8388 va_start (ap, call);
8389 i = 0;
8390
8391 do
8392 {
8393 code = (enum tree_code) va_arg (ap, int);
8394 switch (code)
8395 {
8396 case 0:
8397 /* This signifies an ellipses, any further arguments are all ok. */
8398 res = true;
8399 goto end;
8400 case VOID_TYPE:
8401 /* This signifies an endlink, if no arguments remain, return
8402 true, otherwise return false. */
8403 res = (i == gimple_call_num_args (call));
8404 goto end;
8405 default:
8406 /* If no parameters remain or the parameter's code does not
8407 match the specified code, return false. Otherwise continue
8408 checking any remaining arguments. */
8409 arg = gimple_call_arg (call, i++);
8410 if (!validate_arg (arg, code))
8411 goto end;
8412 break;
8413 }
8414 }
8415 while (1);
8416
8417 /* We need gotos here since we can only have one VA_CLOSE in a
8418 function. */
8419 end: ;
8420 va_end (ap);
8421
8422 return res;
8423 }
8424
8425 /* Default target-specific builtin expander that does nothing. */
8426
8427 rtx
8428 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8429 rtx target ATTRIBUTE_UNUSED,
8430 rtx subtarget ATTRIBUTE_UNUSED,
8431 machine_mode mode ATTRIBUTE_UNUSED,
8432 int ignore ATTRIBUTE_UNUSED)
8433 {
8434 return NULL_RTX;
8435 }
8436
8437 /* Returns true is EXP represents data that would potentially reside
8438 in a readonly section. */
8439
8440 bool
8441 readonly_data_expr (tree exp)
8442 {
8443 STRIP_NOPS (exp);
8444
8445 if (TREE_CODE (exp) != ADDR_EXPR)
8446 return false;
8447
8448 exp = get_base_address (TREE_OPERAND (exp, 0));
8449 if (!exp)
8450 return false;
8451
8452 /* Make sure we call decl_readonly_section only for trees it
8453 can handle (since it returns true for everything it doesn't
8454 understand). */
8455 if (TREE_CODE (exp) == STRING_CST
8456 || TREE_CODE (exp) == CONSTRUCTOR
8457 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8458 return decl_readonly_section (exp, 0);
8459 else
8460 return false;
8461 }
8462
8463 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8464 to the call, and TYPE is its return type.
8465
8466 Return NULL_TREE if no simplification was possible, otherwise return the
8467 simplified form of the call as a tree.
8468
8469 The simplified form may be a constant or other expression which
8470 computes the same value, but in a more efficient manner (including
8471 calls to other builtin functions).
8472
8473 The call may contain arguments which need to be evaluated, but
8474 which are not useful to determine the result of the call. In
8475 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8476 COMPOUND_EXPR will be an argument which must be evaluated.
8477 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8478 COMPOUND_EXPR in the chain will contain the tree for the simplified
8479 form of the builtin function call. */
8480
8481 static tree
8482 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8483 {
8484 if (!validate_arg (s1, POINTER_TYPE)
8485 || !validate_arg (s2, POINTER_TYPE))
8486 return NULL_TREE;
8487 else
8488 {
8489 tree fn;
8490 const char *p1, *p2;
8491
8492 p2 = c_getstr (s2);
8493 if (p2 == NULL)
8494 return NULL_TREE;
8495
8496 p1 = c_getstr (s1);
8497 if (p1 != NULL)
8498 {
8499 const char *r = strstr (p1, p2);
8500 tree tem;
8501
8502 if (r == NULL)
8503 return build_int_cst (TREE_TYPE (s1), 0);
8504
8505 /* Return an offset into the constant string argument. */
8506 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8507 return fold_convert_loc (loc, type, tem);
8508 }
8509
8510 /* The argument is const char *, and the result is char *, so we need
8511 a type conversion here to avoid a warning. */
8512 if (p2[0] == '\0')
8513 return fold_convert_loc (loc, type, s1);
8514
8515 if (p2[1] != '\0')
8516 return NULL_TREE;
8517
8518 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8519 if (!fn)
8520 return NULL_TREE;
8521
8522 /* New argument list transforming strstr(s1, s2) to
8523 strchr(s1, s2[0]). */
8524 return build_call_expr_loc (loc, fn, 2, s1,
8525 build_int_cst (integer_type_node, p2[0]));
8526 }
8527 }
8528
8529 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8530 the call, and TYPE is its return type.
8531
8532 Return NULL_TREE if no simplification was possible, otherwise return the
8533 simplified form of the call as a tree.
8534
8535 The simplified form may be a constant or other expression which
8536 computes the same value, but in a more efficient manner (including
8537 calls to other builtin functions).
8538
8539 The call may contain arguments which need to be evaluated, but
8540 which are not useful to determine the result of the call. In
8541 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8542 COMPOUND_EXPR will be an argument which must be evaluated.
8543 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8544 COMPOUND_EXPR in the chain will contain the tree for the simplified
8545 form of the builtin function call. */
8546
8547 static tree
8548 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8549 {
8550 if (!validate_arg (s1, POINTER_TYPE)
8551 || !validate_arg (s2, INTEGER_TYPE))
8552 return NULL_TREE;
8553 else
8554 {
8555 const char *p1;
8556
8557 if (TREE_CODE (s2) != INTEGER_CST)
8558 return NULL_TREE;
8559
8560 p1 = c_getstr (s1);
8561 if (p1 != NULL)
8562 {
8563 char c;
8564 const char *r;
8565 tree tem;
8566
8567 if (target_char_cast (s2, &c))
8568 return NULL_TREE;
8569
8570 r = strchr (p1, c);
8571
8572 if (r == NULL)
8573 return build_int_cst (TREE_TYPE (s1), 0);
8574
8575 /* Return an offset into the constant string argument. */
8576 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8577 return fold_convert_loc (loc, type, tem);
8578 }
8579 return NULL_TREE;
8580 }
8581 }
8582
8583 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8584 the call, and TYPE is its return type.
8585
8586 Return NULL_TREE if no simplification was possible, otherwise return the
8587 simplified form of the call as a tree.
8588
8589 The simplified form may be a constant or other expression which
8590 computes the same value, but in a more efficient manner (including
8591 calls to other builtin functions).
8592
8593 The call may contain arguments which need to be evaluated, but
8594 which are not useful to determine the result of the call. In
8595 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8596 COMPOUND_EXPR will be an argument which must be evaluated.
8597 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8598 COMPOUND_EXPR in the chain will contain the tree for the simplified
8599 form of the builtin function call. */
8600
8601 static tree
8602 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8603 {
8604 if (!validate_arg (s1, POINTER_TYPE)
8605 || !validate_arg (s2, INTEGER_TYPE))
8606 return NULL_TREE;
8607 else
8608 {
8609 tree fn;
8610 const char *p1;
8611
8612 if (TREE_CODE (s2) != INTEGER_CST)
8613 return NULL_TREE;
8614
8615 p1 = c_getstr (s1);
8616 if (p1 != NULL)
8617 {
8618 char c;
8619 const char *r;
8620 tree tem;
8621
8622 if (target_char_cast (s2, &c))
8623 return NULL_TREE;
8624
8625 r = strrchr (p1, c);
8626
8627 if (r == NULL)
8628 return build_int_cst (TREE_TYPE (s1), 0);
8629
8630 /* Return an offset into the constant string argument. */
8631 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8632 return fold_convert_loc (loc, type, tem);
8633 }
8634
8635 if (! integer_zerop (s2))
8636 return NULL_TREE;
8637
8638 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8639 if (!fn)
8640 return NULL_TREE;
8641
8642 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
8643 return build_call_expr_loc (loc, fn, 2, s1, s2);
8644 }
8645 }
8646
8647 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8648 to the call, and TYPE is its return type.
8649
8650 Return NULL_TREE if no simplification was possible, otherwise return the
8651 simplified form of the call as a tree.
8652
8653 The simplified form may be a constant or other expression which
8654 computes the same value, but in a more efficient manner (including
8655 calls to other builtin functions).
8656
8657 The call may contain arguments which need to be evaluated, but
8658 which are not useful to determine the result of the call. In
8659 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8660 COMPOUND_EXPR will be an argument which must be evaluated.
8661 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8662 COMPOUND_EXPR in the chain will contain the tree for the simplified
8663 form of the builtin function call. */
8664
8665 static tree
8666 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8667 {
8668 if (!validate_arg (s1, POINTER_TYPE)
8669 || !validate_arg (s2, POINTER_TYPE))
8670 return NULL_TREE;
8671 else
8672 {
8673 tree fn;
8674 const char *p1, *p2;
8675
8676 p2 = c_getstr (s2);
8677 if (p2 == NULL)
8678 return NULL_TREE;
8679
8680 p1 = c_getstr (s1);
8681 if (p1 != NULL)
8682 {
8683 const char *r = strpbrk (p1, p2);
8684 tree tem;
8685
8686 if (r == NULL)
8687 return build_int_cst (TREE_TYPE (s1), 0);
8688
8689 /* Return an offset into the constant string argument. */
8690 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8691 return fold_convert_loc (loc, type, tem);
8692 }
8693
8694 if (p2[0] == '\0')
8695 /* strpbrk(x, "") == NULL.
8696 Evaluate and ignore s1 in case it had side-effects. */
8697 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8698
8699 if (p2[1] != '\0')
8700 return NULL_TREE; /* Really call strpbrk. */
8701
8702 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8703 if (!fn)
8704 return NULL_TREE;
8705
8706 /* New argument list transforming strpbrk(s1, s2) to
8707 strchr(s1, s2[0]). */
8708 return build_call_expr_loc (loc, fn, 2, s1,
8709 build_int_cst (integer_type_node, p2[0]));
8710 }
8711 }
8712
8713 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8714 to the call.
8715
8716 Return NULL_TREE if no simplification was possible, otherwise return the
8717 simplified form of the call as a tree.
8718
8719 The simplified form may be a constant or other expression which
8720 computes the same value, but in a more efficient manner (including
8721 calls to other builtin functions).
8722
8723 The call may contain arguments which need to be evaluated, but
8724 which are not useful to determine the result of the call. In
8725 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8726 COMPOUND_EXPR will be an argument which must be evaluated.
8727 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8728 COMPOUND_EXPR in the chain will contain the tree for the simplified
8729 form of the builtin function call. */
8730
8731 static tree
8732 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8733 {
8734 if (!validate_arg (s1, POINTER_TYPE)
8735 || !validate_arg (s2, POINTER_TYPE))
8736 return NULL_TREE;
8737 else
8738 {
8739 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8740
8741 /* If either argument is "", return NULL_TREE. */
8742 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
8743 /* Evaluate and ignore both arguments in case either one has
8744 side-effects. */
8745 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
8746 s1, s2);
8747 return NULL_TREE;
8748 }
8749 }
8750
8751 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8752 to the call.
8753
8754 Return NULL_TREE if no simplification was possible, otherwise return the
8755 simplified form of the call as a tree.
8756
8757 The simplified form may be a constant or other expression which
8758 computes the same value, but in a more efficient manner (including
8759 calls to other builtin functions).
8760
8761 The call may contain arguments which need to be evaluated, but
8762 which are not useful to determine the result of the call. In
8763 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8764 COMPOUND_EXPR will be an argument which must be evaluated.
8765 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8766 COMPOUND_EXPR in the chain will contain the tree for the simplified
8767 form of the builtin function call. */
8768
8769 static tree
8770 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
8771 {
8772 if (!validate_arg (s1, POINTER_TYPE)
8773 || !validate_arg (s2, POINTER_TYPE))
8774 return NULL_TREE;
8775 else
8776 {
8777 /* If the first argument is "", return NULL_TREE. */
8778 const char *p1 = c_getstr (s1);
8779 if (p1 && *p1 == '\0')
8780 {
8781 /* Evaluate and ignore argument s2 in case it has
8782 side-effects. */
8783 return omit_one_operand_loc (loc, size_type_node,
8784 size_zero_node, s2);
8785 }
8786
8787 /* If the second argument is "", return __builtin_strlen(s1). */
8788 const char *p2 = c_getstr (s2);
8789 if (p2 && *p2 == '\0')
8790 {
8791 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
8792
8793 /* If the replacement _DECL isn't initialized, don't do the
8794 transformation. */
8795 if (!fn)
8796 return NULL_TREE;
8797
8798 return build_call_expr_loc (loc, fn, 1, s1);
8799 }
8800 return NULL_TREE;
8801 }
8802 }
8803
8804 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
8805 produced. False otherwise. This is done so that we don't output the error
8806 or warning twice or three times. */
8807
8808 bool
8809 fold_builtin_next_arg (tree exp, bool va_start_p)
8810 {
8811 tree fntype = TREE_TYPE (current_function_decl);
8812 int nargs = call_expr_nargs (exp);
8813 tree arg;
8814 /* There is good chance the current input_location points inside the
8815 definition of the va_start macro (perhaps on the token for
8816 builtin) in a system header, so warnings will not be emitted.
8817 Use the location in real source code. */
8818 source_location current_location =
8819 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
8820 NULL);
8821
8822 if (!stdarg_p (fntype))
8823 {
8824 error ("%<va_start%> used in function with fixed args");
8825 return true;
8826 }
8827
8828 if (va_start_p)
8829 {
8830 if (va_start_p && (nargs != 2))
8831 {
8832 error ("wrong number of arguments to function %<va_start%>");
8833 return true;
8834 }
8835 arg = CALL_EXPR_ARG (exp, 1);
8836 }
8837 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8838 when we checked the arguments and if needed issued a warning. */
8839 else
8840 {
8841 if (nargs == 0)
8842 {
8843 /* Evidently an out of date version of <stdarg.h>; can't validate
8844 va_start's second argument, but can still work as intended. */
8845 warning_at (current_location,
8846 OPT_Wvarargs,
8847 "%<__builtin_next_arg%> called without an argument");
8848 return true;
8849 }
8850 else if (nargs > 1)
8851 {
8852 error ("wrong number of arguments to function %<__builtin_next_arg%>");
8853 return true;
8854 }
8855 arg = CALL_EXPR_ARG (exp, 0);
8856 }
8857
8858 if (TREE_CODE (arg) == SSA_NAME)
8859 arg = SSA_NAME_VAR (arg);
8860
8861 /* We destructively modify the call to be __builtin_va_start (ap, 0)
8862 or __builtin_next_arg (0) the first time we see it, after checking
8863 the arguments and if needed issuing a warning. */
8864 if (!integer_zerop (arg))
8865 {
8866 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8867
8868 /* Strip off all nops for the sake of the comparison. This
8869 is not quite the same as STRIP_NOPS. It does more.
8870 We must also strip off INDIRECT_EXPR for C++ reference
8871 parameters. */
8872 while (CONVERT_EXPR_P (arg)
8873 || TREE_CODE (arg) == INDIRECT_REF)
8874 arg = TREE_OPERAND (arg, 0);
8875 if (arg != last_parm)
8876 {
8877 /* FIXME: Sometimes with the tree optimizers we can get the
8878 not the last argument even though the user used the last
8879 argument. We just warn and set the arg to be the last
8880 argument so that we will get wrong-code because of
8881 it. */
8882 warning_at (current_location,
8883 OPT_Wvarargs,
8884 "second parameter of %<va_start%> not last named argument");
8885 }
8886
8887 /* Undefined by C99 7.15.1.4p4 (va_start):
8888 "If the parameter parmN is declared with the register storage
8889 class, with a function or array type, or with a type that is
8890 not compatible with the type that results after application of
8891 the default argument promotions, the behavior is undefined."
8892 */
8893 else if (DECL_REGISTER (arg))
8894 {
8895 warning_at (current_location,
8896 OPT_Wvarargs,
8897 "undefined behaviour when second parameter of "
8898 "%<va_start%> is declared with %<register%> storage");
8899 }
8900
8901 /* We want to verify the second parameter just once before the tree
8902 optimizers are run and then avoid keeping it in the tree,
8903 as otherwise we could warn even for correct code like:
8904 void foo (int i, ...)
8905 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
8906 if (va_start_p)
8907 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
8908 else
8909 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
8910 }
8911 return false;
8912 }
8913
8914
8915 /* Expand a call EXP to __builtin_object_size. */
8916
8917 static rtx
8918 expand_builtin_object_size (tree exp)
8919 {
8920 tree ost;
8921 int object_size_type;
8922 tree fndecl = get_callee_fndecl (exp);
8923
8924 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8925 {
8926 error ("%Kfirst argument of %D must be a pointer, second integer constant",
8927 exp, fndecl);
8928 expand_builtin_trap ();
8929 return const0_rtx;
8930 }
8931
8932 ost = CALL_EXPR_ARG (exp, 1);
8933 STRIP_NOPS (ost);
8934
8935 if (TREE_CODE (ost) != INTEGER_CST
8936 || tree_int_cst_sgn (ost) < 0
8937 || compare_tree_int (ost, 3) > 0)
8938 {
8939 error ("%Klast argument of %D is not integer constant between 0 and 3",
8940 exp, fndecl);
8941 expand_builtin_trap ();
8942 return const0_rtx;
8943 }
8944
8945 object_size_type = tree_to_shwi (ost);
8946
8947 return object_size_type < 2 ? constm1_rtx : const0_rtx;
8948 }
8949
8950 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
8951 FCODE is the BUILT_IN_* to use.
8952 Return NULL_RTX if we failed; the caller should emit a normal call,
8953 otherwise try to get the result in TARGET, if convenient (and in
8954 mode MODE if that's convenient). */
8955
8956 static rtx
8957 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
8958 enum built_in_function fcode)
8959 {
8960 tree dest, src, len, size;
8961
8962 if (!validate_arglist (exp,
8963 POINTER_TYPE,
8964 fcode == BUILT_IN_MEMSET_CHK
8965 ? INTEGER_TYPE : POINTER_TYPE,
8966 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
8967 return NULL_RTX;
8968
8969 dest = CALL_EXPR_ARG (exp, 0);
8970 src = CALL_EXPR_ARG (exp, 1);
8971 len = CALL_EXPR_ARG (exp, 2);
8972 size = CALL_EXPR_ARG (exp, 3);
8973
8974 if (! tree_fits_uhwi_p (size))
8975 return NULL_RTX;
8976
8977 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
8978 {
8979 tree fn;
8980
8981 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
8982 {
8983 warning_at (tree_nonartificial_location (exp),
8984 0, "%Kcall to %D will always overflow destination buffer",
8985 exp, get_callee_fndecl (exp));
8986 return NULL_RTX;
8987 }
8988
8989 fn = NULL_TREE;
8990 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
8991 mem{cpy,pcpy,move,set} is available. */
8992 switch (fcode)
8993 {
8994 case BUILT_IN_MEMCPY_CHK:
8995 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
8996 break;
8997 case BUILT_IN_MEMPCPY_CHK:
8998 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
8999 break;
9000 case BUILT_IN_MEMMOVE_CHK:
9001 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9002 break;
9003 case BUILT_IN_MEMSET_CHK:
9004 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9005 break;
9006 default:
9007 break;
9008 }
9009
9010 if (! fn)
9011 return NULL_RTX;
9012
9013 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9014 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9015 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9016 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9017 }
9018 else if (fcode == BUILT_IN_MEMSET_CHK)
9019 return NULL_RTX;
9020 else
9021 {
9022 unsigned int dest_align = get_pointer_alignment (dest);
9023
9024 /* If DEST is not a pointer type, call the normal function. */
9025 if (dest_align == 0)
9026 return NULL_RTX;
9027
9028 /* If SRC and DEST are the same (and not volatile), do nothing. */
9029 if (operand_equal_p (src, dest, 0))
9030 {
9031 tree expr;
9032
9033 if (fcode != BUILT_IN_MEMPCPY_CHK)
9034 {
9035 /* Evaluate and ignore LEN in case it has side-effects. */
9036 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9037 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9038 }
9039
9040 expr = fold_build_pointer_plus (dest, len);
9041 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9042 }
9043
9044 /* __memmove_chk special case. */
9045 if (fcode == BUILT_IN_MEMMOVE_CHK)
9046 {
9047 unsigned int src_align = get_pointer_alignment (src);
9048
9049 if (src_align == 0)
9050 return NULL_RTX;
9051
9052 /* If src is categorized for a readonly section we can use
9053 normal __memcpy_chk. */
9054 if (readonly_data_expr (src))
9055 {
9056 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9057 if (!fn)
9058 return NULL_RTX;
9059 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9060 dest, src, len, size);
9061 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9062 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9063 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9064 }
9065 }
9066 return NULL_RTX;
9067 }
9068 }
9069
9070 /* Emit warning if a buffer overflow is detected at compile time. */
9071
9072 static void
9073 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9074 {
9075 int is_strlen = 0;
9076 tree len, size;
9077 location_t loc = tree_nonartificial_location (exp);
9078
9079 switch (fcode)
9080 {
9081 case BUILT_IN_STRCPY_CHK:
9082 case BUILT_IN_STPCPY_CHK:
9083 /* For __strcat_chk the warning will be emitted only if overflowing
9084 by at least strlen (dest) + 1 bytes. */
9085 case BUILT_IN_STRCAT_CHK:
9086 len = CALL_EXPR_ARG (exp, 1);
9087 size = CALL_EXPR_ARG (exp, 2);
9088 is_strlen = 1;
9089 break;
9090 case BUILT_IN_STRNCAT_CHK:
9091 case BUILT_IN_STRNCPY_CHK:
9092 case BUILT_IN_STPNCPY_CHK:
9093 len = CALL_EXPR_ARG (exp, 2);
9094 size = CALL_EXPR_ARG (exp, 3);
9095 break;
9096 case BUILT_IN_SNPRINTF_CHK:
9097 case BUILT_IN_VSNPRINTF_CHK:
9098 len = CALL_EXPR_ARG (exp, 1);
9099 size = CALL_EXPR_ARG (exp, 3);
9100 break;
9101 default:
9102 gcc_unreachable ();
9103 }
9104
9105 if (!len || !size)
9106 return;
9107
9108 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9109 return;
9110
9111 if (is_strlen)
9112 {
9113 len = c_strlen (len, 1);
9114 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9115 return;
9116 }
9117 else if (fcode == BUILT_IN_STRNCAT_CHK)
9118 {
9119 tree src = CALL_EXPR_ARG (exp, 1);
9120 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9121 return;
9122 src = c_strlen (src, 1);
9123 if (! src || ! tree_fits_uhwi_p (src))
9124 {
9125 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9126 exp, get_callee_fndecl (exp));
9127 return;
9128 }
9129 else if (tree_int_cst_lt (src, size))
9130 return;
9131 }
9132 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9133 return;
9134
9135 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9136 exp, get_callee_fndecl (exp));
9137 }
9138
9139 /* Emit warning if a buffer overflow is detected at compile time
9140 in __sprintf_chk/__vsprintf_chk calls. */
9141
9142 static void
9143 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9144 {
9145 tree size, len, fmt;
9146 const char *fmt_str;
9147 int nargs = call_expr_nargs (exp);
9148
9149 /* Verify the required arguments in the original call. */
9150
9151 if (nargs < 4)
9152 return;
9153 size = CALL_EXPR_ARG (exp, 2);
9154 fmt = CALL_EXPR_ARG (exp, 3);
9155
9156 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9157 return;
9158
9159 /* Check whether the format is a literal string constant. */
9160 fmt_str = c_getstr (fmt);
9161 if (fmt_str == NULL)
9162 return;
9163
9164 if (!init_target_chars ())
9165 return;
9166
9167 /* If the format doesn't contain % args or %%, we know its size. */
9168 if (strchr (fmt_str, target_percent) == 0)
9169 len = build_int_cstu (size_type_node, strlen (fmt_str));
9170 /* If the format is "%s" and first ... argument is a string literal,
9171 we know it too. */
9172 else if (fcode == BUILT_IN_SPRINTF_CHK
9173 && strcmp (fmt_str, target_percent_s) == 0)
9174 {
9175 tree arg;
9176
9177 if (nargs < 5)
9178 return;
9179 arg = CALL_EXPR_ARG (exp, 4);
9180 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9181 return;
9182
9183 len = c_strlen (arg, 1);
9184 if (!len || ! tree_fits_uhwi_p (len))
9185 return;
9186 }
9187 else
9188 return;
9189
9190 if (! tree_int_cst_lt (len, size))
9191 warning_at (tree_nonartificial_location (exp),
9192 0, "%Kcall to %D will always overflow destination buffer",
9193 exp, get_callee_fndecl (exp));
9194 }
9195
9196 /* Emit warning if a free is called with address of a variable. */
9197
9198 static void
9199 maybe_emit_free_warning (tree exp)
9200 {
9201 tree arg = CALL_EXPR_ARG (exp, 0);
9202
9203 STRIP_NOPS (arg);
9204 if (TREE_CODE (arg) != ADDR_EXPR)
9205 return;
9206
9207 arg = get_base_address (TREE_OPERAND (arg, 0));
9208 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9209 return;
9210
9211 if (SSA_VAR_P (arg))
9212 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9213 "%Kattempt to free a non-heap object %qD", exp, arg);
9214 else
9215 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9216 "%Kattempt to free a non-heap object", exp);
9217 }
9218
9219 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9220 if possible. */
9221
9222 static tree
9223 fold_builtin_object_size (tree ptr, tree ost)
9224 {
9225 unsigned HOST_WIDE_INT bytes;
9226 int object_size_type;
9227
9228 if (!validate_arg (ptr, POINTER_TYPE)
9229 || !validate_arg (ost, INTEGER_TYPE))
9230 return NULL_TREE;
9231
9232 STRIP_NOPS (ost);
9233
9234 if (TREE_CODE (ost) != INTEGER_CST
9235 || tree_int_cst_sgn (ost) < 0
9236 || compare_tree_int (ost, 3) > 0)
9237 return NULL_TREE;
9238
9239 object_size_type = tree_to_shwi (ost);
9240
9241 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9242 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9243 and (size_t) 0 for types 2 and 3. */
9244 if (TREE_SIDE_EFFECTS (ptr))
9245 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9246
9247 if (TREE_CODE (ptr) == ADDR_EXPR)
9248 {
9249 bytes = compute_builtin_object_size (ptr, object_size_type);
9250 if (wi::fits_to_tree_p (bytes, size_type_node))
9251 return build_int_cstu (size_type_node, bytes);
9252 }
9253 else if (TREE_CODE (ptr) == SSA_NAME)
9254 {
9255 /* If object size is not known yet, delay folding until
9256 later. Maybe subsequent passes will help determining
9257 it. */
9258 bytes = compute_builtin_object_size (ptr, object_size_type);
9259 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
9260 && wi::fits_to_tree_p (bytes, size_type_node))
9261 return build_int_cstu (size_type_node, bytes);
9262 }
9263
9264 return NULL_TREE;
9265 }
9266
9267 /* Builtins with folding operations that operate on "..." arguments
9268 need special handling; we need to store the arguments in a convenient
9269 data structure before attempting any folding. Fortunately there are
9270 only a few builtins that fall into this category. FNDECL is the
9271 function, EXP is the CALL_EXPR for the call. */
9272
9273 static tree
9274 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9275 {
9276 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9277 tree ret = NULL_TREE;
9278
9279 switch (fcode)
9280 {
9281 case BUILT_IN_FPCLASSIFY:
9282 ret = fold_builtin_fpclassify (loc, args, nargs);
9283 break;
9284
9285 default:
9286 break;
9287 }
9288 if (ret)
9289 {
9290 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9291 SET_EXPR_LOCATION (ret, loc);
9292 TREE_NO_WARNING (ret) = 1;
9293 return ret;
9294 }
9295 return NULL_TREE;
9296 }
9297
9298 /* Initialize format string characters in the target charset. */
9299
9300 bool
9301 init_target_chars (void)
9302 {
9303 static bool init;
9304 if (!init)
9305 {
9306 target_newline = lang_hooks.to_target_charset ('\n');
9307 target_percent = lang_hooks.to_target_charset ('%');
9308 target_c = lang_hooks.to_target_charset ('c');
9309 target_s = lang_hooks.to_target_charset ('s');
9310 if (target_newline == 0 || target_percent == 0 || target_c == 0
9311 || target_s == 0)
9312 return false;
9313
9314 target_percent_c[0] = target_percent;
9315 target_percent_c[1] = target_c;
9316 target_percent_c[2] = '\0';
9317
9318 target_percent_s[0] = target_percent;
9319 target_percent_s[1] = target_s;
9320 target_percent_s[2] = '\0';
9321
9322 target_percent_s_newline[0] = target_percent;
9323 target_percent_s_newline[1] = target_s;
9324 target_percent_s_newline[2] = target_newline;
9325 target_percent_s_newline[3] = '\0';
9326
9327 init = true;
9328 }
9329 return true;
9330 }
9331
9332 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9333 and no overflow/underflow occurred. INEXACT is true if M was not
9334 exactly calculated. TYPE is the tree type for the result. This
9335 function assumes that you cleared the MPFR flags and then
9336 calculated M to see if anything subsequently set a flag prior to
9337 entering this function. Return NULL_TREE if any checks fail. */
9338
9339 static tree
9340 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9341 {
9342 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9343 overflow/underflow occurred. If -frounding-math, proceed iff the
9344 result of calling FUNC was exact. */
9345 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9346 && (!flag_rounding_math || !inexact))
9347 {
9348 REAL_VALUE_TYPE rr;
9349
9350 real_from_mpfr (&rr, m, type, GMP_RNDN);
9351 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9352 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9353 but the mpft_t is not, then we underflowed in the
9354 conversion. */
9355 if (real_isfinite (&rr)
9356 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9357 {
9358 REAL_VALUE_TYPE rmode;
9359
9360 real_convert (&rmode, TYPE_MODE (type), &rr);
9361 /* Proceed iff the specified mode can hold the value. */
9362 if (real_identical (&rmode, &rr))
9363 return build_real (type, rmode);
9364 }
9365 }
9366 return NULL_TREE;
9367 }
9368
9369 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9370 number and no overflow/underflow occurred. INEXACT is true if M
9371 was not exactly calculated. TYPE is the tree type for the result.
9372 This function assumes that you cleared the MPFR flags and then
9373 calculated M to see if anything subsequently set a flag prior to
9374 entering this function. Return NULL_TREE if any checks fail, if
9375 FORCE_CONVERT is true, then bypass the checks. */
9376
9377 static tree
9378 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9379 {
9380 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9381 overflow/underflow occurred. If -frounding-math, proceed iff the
9382 result of calling FUNC was exact. */
9383 if (force_convert
9384 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9385 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9386 && (!flag_rounding_math || !inexact)))
9387 {
9388 REAL_VALUE_TYPE re, im;
9389
9390 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9391 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9392 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9393 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9394 but the mpft_t is not, then we underflowed in the
9395 conversion. */
9396 if (force_convert
9397 || (real_isfinite (&re) && real_isfinite (&im)
9398 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9399 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9400 {
9401 REAL_VALUE_TYPE re_mode, im_mode;
9402
9403 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9404 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9405 /* Proceed iff the specified mode can hold the value. */
9406 if (force_convert
9407 || (real_identical (&re_mode, &re)
9408 && real_identical (&im_mode, &im)))
9409 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9410 build_real (TREE_TYPE (type), im_mode));
9411 }
9412 }
9413 return NULL_TREE;
9414 }
9415
9416 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9417 the pointer *(ARG_QUO) and return the result. The type is taken
9418 from the type of ARG0 and is used for setting the precision of the
9419 calculation and results. */
9420
9421 static tree
9422 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9423 {
9424 tree const type = TREE_TYPE (arg0);
9425 tree result = NULL_TREE;
9426
9427 STRIP_NOPS (arg0);
9428 STRIP_NOPS (arg1);
9429
9430 /* To proceed, MPFR must exactly represent the target floating point
9431 format, which only happens when the target base equals two. */
9432 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9433 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9434 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9435 {
9436 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9437 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9438
9439 if (real_isfinite (ra0) && real_isfinite (ra1))
9440 {
9441 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9442 const int prec = fmt->p;
9443 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9444 tree result_rem;
9445 long integer_quo;
9446 mpfr_t m0, m1;
9447
9448 mpfr_inits2 (prec, m0, m1, NULL);
9449 mpfr_from_real (m0, ra0, GMP_RNDN);
9450 mpfr_from_real (m1, ra1, GMP_RNDN);
9451 mpfr_clear_flags ();
9452 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9453 /* Remquo is independent of the rounding mode, so pass
9454 inexact=0 to do_mpfr_ckconv(). */
9455 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9456 mpfr_clears (m0, m1, NULL);
9457 if (result_rem)
9458 {
9459 /* MPFR calculates quo in the host's long so it may
9460 return more bits in quo than the target int can hold
9461 if sizeof(host long) > sizeof(target int). This can
9462 happen even for native compilers in LP64 mode. In
9463 these cases, modulo the quo value with the largest
9464 number that the target int can hold while leaving one
9465 bit for the sign. */
9466 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9467 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9468
9469 /* Dereference the quo pointer argument. */
9470 arg_quo = build_fold_indirect_ref (arg_quo);
9471 /* Proceed iff a valid pointer type was passed in. */
9472 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9473 {
9474 /* Set the value. */
9475 tree result_quo
9476 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9477 build_int_cst (TREE_TYPE (arg_quo),
9478 integer_quo));
9479 TREE_SIDE_EFFECTS (result_quo) = 1;
9480 /* Combine the quo assignment with the rem. */
9481 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9482 result_quo, result_rem));
9483 }
9484 }
9485 }
9486 }
9487 return result;
9488 }
9489
9490 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9491 resulting value as a tree with type TYPE. The mpfr precision is
9492 set to the precision of TYPE. We assume that this mpfr function
9493 returns zero if the result could be calculated exactly within the
9494 requested precision. In addition, the integer pointer represented
9495 by ARG_SG will be dereferenced and set to the appropriate signgam
9496 (-1,1) value. */
9497
9498 static tree
9499 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9500 {
9501 tree result = NULL_TREE;
9502
9503 STRIP_NOPS (arg);
9504
9505 /* To proceed, MPFR must exactly represent the target floating point
9506 format, which only happens when the target base equals two. Also
9507 verify ARG is a constant and that ARG_SG is an int pointer. */
9508 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9509 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9510 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9511 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9512 {
9513 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9514
9515 /* In addition to NaN and Inf, the argument cannot be zero or a
9516 negative integer. */
9517 if (real_isfinite (ra)
9518 && ra->cl != rvc_zero
9519 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9520 {
9521 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9522 const int prec = fmt->p;
9523 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9524 int inexact, sg;
9525 mpfr_t m;
9526 tree result_lg;
9527
9528 mpfr_init2 (m, prec);
9529 mpfr_from_real (m, ra, GMP_RNDN);
9530 mpfr_clear_flags ();
9531 inexact = mpfr_lgamma (m, &sg, m, rnd);
9532 result_lg = do_mpfr_ckconv (m, type, inexact);
9533 mpfr_clear (m);
9534 if (result_lg)
9535 {
9536 tree result_sg;
9537
9538 /* Dereference the arg_sg pointer argument. */
9539 arg_sg = build_fold_indirect_ref (arg_sg);
9540 /* Assign the signgam value into *arg_sg. */
9541 result_sg = fold_build2 (MODIFY_EXPR,
9542 TREE_TYPE (arg_sg), arg_sg,
9543 build_int_cst (TREE_TYPE (arg_sg), sg));
9544 TREE_SIDE_EFFECTS (result_sg) = 1;
9545 /* Combine the signgam assignment with the lgamma result. */
9546 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9547 result_sg, result_lg));
9548 }
9549 }
9550 }
9551
9552 return result;
9553 }
9554
9555 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9556 mpc function FUNC on it and return the resulting value as a tree
9557 with type TYPE. The mpfr precision is set to the precision of
9558 TYPE. We assume that function FUNC returns zero if the result
9559 could be calculated exactly within the requested precision. If
9560 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9561 in the arguments and/or results. */
9562
9563 tree
9564 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9565 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9566 {
9567 tree result = NULL_TREE;
9568
9569 STRIP_NOPS (arg0);
9570 STRIP_NOPS (arg1);
9571
9572 /* To proceed, MPFR must exactly represent the target floating point
9573 format, which only happens when the target base equals two. */
9574 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9575 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9576 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9577 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9578 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9579 {
9580 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9581 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9582 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9583 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9584
9585 if (do_nonfinite
9586 || (real_isfinite (re0) && real_isfinite (im0)
9587 && real_isfinite (re1) && real_isfinite (im1)))
9588 {
9589 const struct real_format *const fmt =
9590 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9591 const int prec = fmt->p;
9592 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9593 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9594 int inexact;
9595 mpc_t m0, m1;
9596
9597 mpc_init2 (m0, prec);
9598 mpc_init2 (m1, prec);
9599 mpfr_from_real (mpc_realref (m0), re0, rnd);
9600 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9601 mpfr_from_real (mpc_realref (m1), re1, rnd);
9602 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9603 mpfr_clear_flags ();
9604 inexact = func (m0, m0, m1, crnd);
9605 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9606 mpc_clear (m0);
9607 mpc_clear (m1);
9608 }
9609 }
9610
9611 return result;
9612 }
9613
9614 /* A wrapper function for builtin folding that prevents warnings for
9615 "statement without effect" and the like, caused by removing the
9616 call node earlier than the warning is generated. */
9617
9618 tree
9619 fold_call_stmt (gcall *stmt, bool ignore)
9620 {
9621 tree ret = NULL_TREE;
9622 tree fndecl = gimple_call_fndecl (stmt);
9623 location_t loc = gimple_location (stmt);
9624 if (fndecl
9625 && TREE_CODE (fndecl) == FUNCTION_DECL
9626 && DECL_BUILT_IN (fndecl)
9627 && !gimple_call_va_arg_pack_p (stmt))
9628 {
9629 int nargs = gimple_call_num_args (stmt);
9630 tree *args = (nargs > 0
9631 ? gimple_call_arg_ptr (stmt, 0)
9632 : &error_mark_node);
9633
9634 if (avoid_folding_inline_builtin (fndecl))
9635 return NULL_TREE;
9636 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9637 {
9638 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9639 }
9640 else
9641 {
9642 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9643 if (ret)
9644 {
9645 /* Propagate location information from original call to
9646 expansion of builtin. Otherwise things like
9647 maybe_emit_chk_warning, that operate on the expansion
9648 of a builtin, will use the wrong location information. */
9649 if (gimple_has_location (stmt))
9650 {
9651 tree realret = ret;
9652 if (TREE_CODE (ret) == NOP_EXPR)
9653 realret = TREE_OPERAND (ret, 0);
9654 if (CAN_HAVE_LOCATION_P (realret)
9655 && !EXPR_HAS_LOCATION (realret))
9656 SET_EXPR_LOCATION (realret, loc);
9657 return realret;
9658 }
9659 return ret;
9660 }
9661 }
9662 }
9663 return NULL_TREE;
9664 }
9665
9666 /* Look up the function in builtin_decl that corresponds to DECL
9667 and set ASMSPEC as its user assembler name. DECL must be a
9668 function decl that declares a builtin. */
9669
9670 void
9671 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9672 {
9673 tree builtin;
9674 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9675 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9676 && asmspec != 0);
9677
9678 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9679 set_user_assembler_name (builtin, asmspec);
9680 switch (DECL_FUNCTION_CODE (decl))
9681 {
9682 case BUILT_IN_MEMCPY:
9683 init_block_move_fn (asmspec);
9684 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
9685 break;
9686 case BUILT_IN_MEMSET:
9687 init_block_clear_fn (asmspec);
9688 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
9689 break;
9690 case BUILT_IN_MEMMOVE:
9691 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
9692 break;
9693 case BUILT_IN_MEMCMP:
9694 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
9695 break;
9696 case BUILT_IN_ABORT:
9697 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
9698 break;
9699 case BUILT_IN_FFS:
9700 if (INT_TYPE_SIZE < BITS_PER_WORD)
9701 {
9702 set_user_assembler_libfunc ("ffs", asmspec);
9703 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
9704 MODE_INT, 0), "ffs");
9705 }
9706 break;
9707 default:
9708 break;
9709 }
9710 }
9711
9712 /* Return true if DECL is a builtin that expands to a constant or similarly
9713 simple code. */
9714 bool
9715 is_simple_builtin (tree decl)
9716 {
9717 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9718 switch (DECL_FUNCTION_CODE (decl))
9719 {
9720 /* Builtins that expand to constants. */
9721 case BUILT_IN_CONSTANT_P:
9722 case BUILT_IN_EXPECT:
9723 case BUILT_IN_OBJECT_SIZE:
9724 case BUILT_IN_UNREACHABLE:
9725 /* Simple register moves or loads from stack. */
9726 case BUILT_IN_ASSUME_ALIGNED:
9727 case BUILT_IN_RETURN_ADDRESS:
9728 case BUILT_IN_EXTRACT_RETURN_ADDR:
9729 case BUILT_IN_FROB_RETURN_ADDR:
9730 case BUILT_IN_RETURN:
9731 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9732 case BUILT_IN_FRAME_ADDRESS:
9733 case BUILT_IN_VA_END:
9734 case BUILT_IN_STACK_SAVE:
9735 case BUILT_IN_STACK_RESTORE:
9736 /* Exception state returns or moves registers around. */
9737 case BUILT_IN_EH_FILTER:
9738 case BUILT_IN_EH_POINTER:
9739 case BUILT_IN_EH_COPY_VALUES:
9740 return true;
9741
9742 default:
9743 return false;
9744 }
9745
9746 return false;
9747 }
9748
9749 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9750 most probably expanded inline into reasonably simple code. This is a
9751 superset of is_simple_builtin. */
9752 bool
9753 is_inexpensive_builtin (tree decl)
9754 {
9755 if (!decl)
9756 return false;
9757 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
9758 return true;
9759 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9760 switch (DECL_FUNCTION_CODE (decl))
9761 {
9762 case BUILT_IN_ABS:
9763 case BUILT_IN_ALLOCA:
9764 case BUILT_IN_ALLOCA_WITH_ALIGN:
9765 case BUILT_IN_BSWAP16:
9766 case BUILT_IN_BSWAP32:
9767 case BUILT_IN_BSWAP64:
9768 case BUILT_IN_CLZ:
9769 case BUILT_IN_CLZIMAX:
9770 case BUILT_IN_CLZL:
9771 case BUILT_IN_CLZLL:
9772 case BUILT_IN_CTZ:
9773 case BUILT_IN_CTZIMAX:
9774 case BUILT_IN_CTZL:
9775 case BUILT_IN_CTZLL:
9776 case BUILT_IN_FFS:
9777 case BUILT_IN_FFSIMAX:
9778 case BUILT_IN_FFSL:
9779 case BUILT_IN_FFSLL:
9780 case BUILT_IN_IMAXABS:
9781 case BUILT_IN_FINITE:
9782 case BUILT_IN_FINITEF:
9783 case BUILT_IN_FINITEL:
9784 case BUILT_IN_FINITED32:
9785 case BUILT_IN_FINITED64:
9786 case BUILT_IN_FINITED128:
9787 case BUILT_IN_FPCLASSIFY:
9788 case BUILT_IN_ISFINITE:
9789 case BUILT_IN_ISINF_SIGN:
9790 case BUILT_IN_ISINF:
9791 case BUILT_IN_ISINFF:
9792 case BUILT_IN_ISINFL:
9793 case BUILT_IN_ISINFD32:
9794 case BUILT_IN_ISINFD64:
9795 case BUILT_IN_ISINFD128:
9796 case BUILT_IN_ISNAN:
9797 case BUILT_IN_ISNANF:
9798 case BUILT_IN_ISNANL:
9799 case BUILT_IN_ISNAND32:
9800 case BUILT_IN_ISNAND64:
9801 case BUILT_IN_ISNAND128:
9802 case BUILT_IN_ISNORMAL:
9803 case BUILT_IN_ISGREATER:
9804 case BUILT_IN_ISGREATEREQUAL:
9805 case BUILT_IN_ISLESS:
9806 case BUILT_IN_ISLESSEQUAL:
9807 case BUILT_IN_ISLESSGREATER:
9808 case BUILT_IN_ISUNORDERED:
9809 case BUILT_IN_VA_ARG_PACK:
9810 case BUILT_IN_VA_ARG_PACK_LEN:
9811 case BUILT_IN_VA_COPY:
9812 case BUILT_IN_TRAP:
9813 case BUILT_IN_SAVEREGS:
9814 case BUILT_IN_POPCOUNTL:
9815 case BUILT_IN_POPCOUNTLL:
9816 case BUILT_IN_POPCOUNTIMAX:
9817 case BUILT_IN_POPCOUNT:
9818 case BUILT_IN_PARITYL:
9819 case BUILT_IN_PARITYLL:
9820 case BUILT_IN_PARITYIMAX:
9821 case BUILT_IN_PARITY:
9822 case BUILT_IN_LABS:
9823 case BUILT_IN_LLABS:
9824 case BUILT_IN_PREFETCH:
9825 case BUILT_IN_ACC_ON_DEVICE:
9826 return true;
9827
9828 default:
9829 return is_simple_builtin (decl);
9830 }
9831
9832 return false;
9833 }