re PR target/21412 (ICE loading TLS address)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tree-gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50
51 #ifndef PAD_VARARGS_DOWN
52 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
53 #endif
54
55 /* Define the names of the builtin function types and codes. */
56 const char *const built_in_class_names[4]
57 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
58
59 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
60 const char * built_in_names[(int) END_BUILTINS] =
61 {
62 #include "builtins.def"
63 };
64 #undef DEF_BUILTIN
65
66 /* Setup an array of _DECL trees, make sure each element is
67 initialized to NULL_TREE. */
68 tree built_in_decls[(int) END_BUILTINS];
69 /* Declarations used when constructing the builtin implicitly in the compiler.
70 It may be NULL_TREE when this is invalid (for instance runtime is not
71 required to implement the function call in all cases). */
72 tree implicit_built_in_decls[(int) END_BUILTINS];
73
74 static int get_pointer_alignment (tree, unsigned int);
75 static const char *c_getstr (tree);
76 static rtx c_readstr (const char *, enum machine_mode);
77 static int target_char_cast (tree, char *);
78 static rtx get_memory_rtx (tree);
79 static tree build_string_literal (int, const char *);
80 static int apply_args_size (void);
81 static int apply_result_size (void);
82 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
83 static rtx result_vector (int, rtx);
84 #endif
85 static rtx expand_builtin_setjmp (tree, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
99 static rtx expand_builtin_args_info (tree);
100 static rtx expand_builtin_next_arg (void);
101 static rtx expand_builtin_va_start (tree);
102 static rtx expand_builtin_va_end (tree);
103 static rtx expand_builtin_va_copy (tree);
104 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
105 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
106 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
107 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
108 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
109 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_mempcpy (tree, tree, rtx, enum machine_mode, int);
114 static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode, tree);
115 static rtx expand_builtin_bcopy (tree);
116 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
118 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
120 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
121 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memset (tree, rtx, enum machine_mode, tree);
123 static rtx expand_builtin_bzero (tree);
124 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_strstr (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strpbrk (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strchr (tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strrchr (tree, tree, rtx, enum machine_mode);
129 static rtx expand_builtin_alloca (tree, rtx);
130 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
131 static rtx expand_builtin_frame_address (tree, tree);
132 static rtx expand_builtin_fputs (tree, rtx, bool);
133 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
134 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
135 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
136 static tree stabilize_va_list (tree, int);
137 static rtx expand_builtin_expect (tree, rtx);
138 static tree fold_builtin_constant_p (tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (tree);
141 static tree fold_builtin_inf (tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static int validate_arglist (tree, ...);
144 static bool integer_valued_real_p (tree);
145 static tree fold_trunc_transparent_mathfn (tree, tree);
146 static bool readonly_data_expr (tree);
147 static rtx expand_builtin_fabs (tree, rtx, rtx);
148 static rtx expand_builtin_signbit (tree, rtx);
149 static tree fold_builtin_cabs (tree, tree);
150 static tree fold_builtin_sqrt (tree, tree);
151 static tree fold_builtin_cbrt (tree, tree);
152 static tree fold_builtin_pow (tree, tree, tree);
153 static tree fold_builtin_powi (tree, tree, tree);
154 static tree fold_builtin_sin (tree);
155 static tree fold_builtin_cos (tree, tree, tree);
156 static tree fold_builtin_tan (tree);
157 static tree fold_builtin_atan (tree, tree);
158 static tree fold_builtin_trunc (tree, tree);
159 static tree fold_builtin_floor (tree, tree);
160 static tree fold_builtin_ceil (tree, tree);
161 static tree fold_builtin_round (tree, tree);
162 static tree fold_builtin_int_roundingfn (tree, tree);
163 static tree fold_builtin_bitop (tree, tree);
164 static tree fold_builtin_memcpy (tree, tree);
165 static tree fold_builtin_mempcpy (tree, tree, int);
166 static tree fold_builtin_memmove (tree, tree);
167 static tree fold_builtin_strchr (tree, tree);
168 static tree fold_builtin_memcmp (tree);
169 static tree fold_builtin_strcmp (tree);
170 static tree fold_builtin_strncmp (tree);
171 static tree fold_builtin_signbit (tree, tree);
172 static tree fold_builtin_copysign (tree, tree, tree);
173 static tree fold_builtin_isascii (tree);
174 static tree fold_builtin_toascii (tree);
175 static tree fold_builtin_isdigit (tree);
176 static tree fold_builtin_fabs (tree, tree);
177 static tree fold_builtin_abs (tree, tree);
178 static tree fold_builtin_unordered_cmp (tree, tree, enum tree_code,
179 enum tree_code);
180 static tree fold_builtin_1 (tree, tree, bool);
181
182 static tree fold_builtin_strpbrk (tree, tree);
183 static tree fold_builtin_strstr (tree, tree);
184 static tree fold_builtin_strrchr (tree, tree);
185 static tree fold_builtin_strcat (tree);
186 static tree fold_builtin_strncat (tree);
187 static tree fold_builtin_strspn (tree);
188 static tree fold_builtin_strcspn (tree);
189 static tree fold_builtin_sprintf (tree, int);
190
191 /* Return true if NODE should be considered for inline expansion regardless
192 of the optimization level. This means whenever a function is invoked with
193 its "internal" name, which normally contains the prefix "__builtin". */
194
195 static bool called_as_built_in (tree node)
196 {
197 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 return false;
203 }
204
205 /* Return the alignment in bits of EXP, a pointer valued expression.
206 But don't return more than MAX_ALIGN no matter what.
207 The alignment returned is, by default, the alignment of the thing that
208 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
209
210 Otherwise, look at the expression to see if we can do better, i.e., if the
211 expression is actually pointing at an object whose alignment is tighter. */
212
213 static int
214 get_pointer_alignment (tree exp, unsigned int max_align)
215 {
216 unsigned int align, inner;
217
218 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
219 return 0;
220
221 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
222 align = MIN (align, max_align);
223
224 while (1)
225 {
226 switch (TREE_CODE (exp))
227 {
228 case NOP_EXPR:
229 case CONVERT_EXPR:
230 case NON_LVALUE_EXPR:
231 exp = TREE_OPERAND (exp, 0);
232 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
233 return align;
234
235 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
236 align = MIN (inner, max_align);
237 break;
238
239 case PLUS_EXPR:
240 /* If sum of pointer + int, restrict our maximum alignment to that
241 imposed by the integer. If not, we can't do any better than
242 ALIGN. */
243 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
244 return align;
245
246 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
247 & (max_align / BITS_PER_UNIT - 1))
248 != 0)
249 max_align >>= 1;
250
251 exp = TREE_OPERAND (exp, 0);
252 break;
253
254 case ADDR_EXPR:
255 /* See what we are pointing at and look at its alignment. */
256 exp = TREE_OPERAND (exp, 0);
257 if (TREE_CODE (exp) == FUNCTION_DECL)
258 align = FUNCTION_BOUNDARY;
259 else if (DECL_P (exp))
260 align = DECL_ALIGN (exp);
261 #ifdef CONSTANT_ALIGNMENT
262 else if (CONSTANT_CLASS_P (exp))
263 align = CONSTANT_ALIGNMENT (exp, align);
264 #endif
265 return MIN (align, max_align);
266
267 default:
268 return align;
269 }
270 }
271 }
272
273 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
274 way, because it could contain a zero byte in the middle.
275 TREE_STRING_LENGTH is the size of the character array, not the string.
276
277 ONLY_VALUE should be nonzero if the result is not going to be emitted
278 into the instruction stream and zero if it is going to be expanded.
279 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
280 is returned, otherwise NULL, since
281 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
282 evaluate the side-effects.
283
284 The value returned is of type `ssizetype'.
285
286 Unfortunately, string_constant can't access the values of const char
287 arrays with initializers, so neither can we do so here. */
288
289 tree
290 c_strlen (tree src, int only_value)
291 {
292 tree offset_node;
293 HOST_WIDE_INT offset;
294 int max;
295 const char *ptr;
296
297 STRIP_NOPS (src);
298 if (TREE_CODE (src) == COND_EXPR
299 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
300 {
301 tree len1, len2;
302
303 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
304 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
305 if (tree_int_cst_equal (len1, len2))
306 return len1;
307 }
308
309 if (TREE_CODE (src) == COMPOUND_EXPR
310 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
311 return c_strlen (TREE_OPERAND (src, 1), only_value);
312
313 src = string_constant (src, &offset_node);
314 if (src == 0)
315 return 0;
316
317 max = TREE_STRING_LENGTH (src) - 1;
318 ptr = TREE_STRING_POINTER (src);
319
320 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
321 {
322 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
323 compute the offset to the following null if we don't know where to
324 start searching for it. */
325 int i;
326
327 for (i = 0; i < max; i++)
328 if (ptr[i] == 0)
329 return 0;
330
331 /* We don't know the starting offset, but we do know that the string
332 has no internal zero bytes. We can assume that the offset falls
333 within the bounds of the string; otherwise, the programmer deserves
334 what he gets. Subtract the offset from the length of the string,
335 and return that. This would perhaps not be valid if we were dealing
336 with named arrays in addition to literal string constants. */
337
338 return size_diffop (size_int (max), offset_node);
339 }
340
341 /* We have a known offset into the string. Start searching there for
342 a null character if we can represent it as a single HOST_WIDE_INT. */
343 if (offset_node == 0)
344 offset = 0;
345 else if (! host_integerp (offset_node, 0))
346 offset = -1;
347 else
348 offset = tree_low_cst (offset_node, 0);
349
350 /* If the offset is known to be out of bounds, warn, and call strlen at
351 runtime. */
352 if (offset < 0 || offset > max)
353 {
354 warning (0, "offset outside bounds of constant string");
355 return 0;
356 }
357
358 /* Use strlen to search for the first zero byte. Since any strings
359 constructed with build_string will have nulls appended, we win even
360 if we get handed something like (char[4])"abcd".
361
362 Since OFFSET is our starting index into the string, no further
363 calculation is needed. */
364 return ssize_int (strlen (ptr + offset));
365 }
366
367 /* Return a char pointer for a C string if it is a string constant
368 or sum of string constant and integer constant. */
369
370 static const char *
371 c_getstr (tree src)
372 {
373 tree offset_node;
374
375 src = string_constant (src, &offset_node);
376 if (src == 0)
377 return 0;
378
379 if (offset_node == 0)
380 return TREE_STRING_POINTER (src);
381 else if (!host_integerp (offset_node, 1)
382 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
383 return 0;
384
385 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
386 }
387
388 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
389 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
390
391 static rtx
392 c_readstr (const char *str, enum machine_mode mode)
393 {
394 HOST_WIDE_INT c[2];
395 HOST_WIDE_INT ch;
396 unsigned int i, j;
397
398 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
399
400 c[0] = 0;
401 c[1] = 0;
402 ch = 1;
403 for (i = 0; i < GET_MODE_SIZE (mode); i++)
404 {
405 j = i;
406 if (WORDS_BIG_ENDIAN)
407 j = GET_MODE_SIZE (mode) - i - 1;
408 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
409 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
410 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
411 j *= BITS_PER_UNIT;
412 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
413
414 if (ch)
415 ch = (unsigned char) str[i];
416 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
417 }
418 return immed_double_const (c[0], c[1], mode);
419 }
420
421 /* Cast a target constant CST to target CHAR and if that value fits into
422 host char type, return zero and put that value into variable pointed by
423 P. */
424
425 static int
426 target_char_cast (tree cst, char *p)
427 {
428 unsigned HOST_WIDE_INT val, hostval;
429
430 if (!host_integerp (cst, 1)
431 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
432 return 1;
433
434 val = tree_low_cst (cst, 1);
435 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
436 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
437
438 hostval = val;
439 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
440 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
441
442 if (val != hostval)
443 return 1;
444
445 *p = hostval;
446 return 0;
447 }
448
449 /* Similar to save_expr, but assumes that arbitrary code is not executed
450 in between the multiple evaluations. In particular, we assume that a
451 non-addressable local variable will not be modified. */
452
453 static tree
454 builtin_save_expr (tree exp)
455 {
456 if (TREE_ADDRESSABLE (exp) == 0
457 && (TREE_CODE (exp) == PARM_DECL
458 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
459 return exp;
460
461 return save_expr (exp);
462 }
463
464 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
465 times to get the address of either a higher stack frame, or a return
466 address located within it (depending on FNDECL_CODE). */
467
468 static rtx
469 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
470 {
471 int i;
472
473 #ifdef INITIAL_FRAME_ADDRESS_RTX
474 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
475 #else
476 rtx tem = hard_frame_pointer_rtx;
477 #endif
478
479 /* Some machines need special handling before we can access
480 arbitrary frames. For example, on the sparc, we must first flush
481 all register windows to the stack. */
482 #ifdef SETUP_FRAME_ADDRESSES
483 if (count > 0)
484 SETUP_FRAME_ADDRESSES ();
485 #endif
486
487 /* On the sparc, the return address is not in the frame, it is in a
488 register. There is no way to access it off of the current frame
489 pointer, but it can be accessed off the previous frame pointer by
490 reading the value from the register window save area. */
491 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
492 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
493 count--;
494 #endif
495
496 /* Scan back COUNT frames to the specified frame. */
497 for (i = 0; i < count; i++)
498 {
499 /* Assume the dynamic chain pointer is in the word that the
500 frame address points to, unless otherwise specified. */
501 #ifdef DYNAMIC_CHAIN_ADDRESS
502 tem = DYNAMIC_CHAIN_ADDRESS (tem);
503 #endif
504 tem = memory_address (Pmode, tem);
505 tem = gen_rtx_MEM (Pmode, tem);
506 set_mem_alias_set (tem, get_frame_alias_set ());
507 tem = copy_to_reg (tem);
508 }
509
510 /* For __builtin_frame_address, return what we've got. */
511 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
512 return tem;
513
514 /* For __builtin_return_address, Get the return address from that
515 frame. */
516 #ifdef RETURN_ADDR_RTX
517 tem = RETURN_ADDR_RTX (count, tem);
518 #else
519 tem = memory_address (Pmode,
520 plus_constant (tem, GET_MODE_SIZE (Pmode)));
521 tem = gen_rtx_MEM (Pmode, tem);
522 set_mem_alias_set (tem, get_frame_alias_set ());
523 #endif
524 return tem;
525 }
526
527 /* Alias set used for setjmp buffer. */
528 static HOST_WIDE_INT setjmp_alias_set = -1;
529
530 /* Construct the leading half of a __builtin_setjmp call. Control will
531 return to RECEIVER_LABEL. This is used directly by sjlj exception
532 handling code. */
533
534 void
535 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
536 {
537 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
538 rtx stack_save;
539 rtx mem;
540
541 if (setjmp_alias_set == -1)
542 setjmp_alias_set = new_alias_set ();
543
544 buf_addr = convert_memory_address (Pmode, buf_addr);
545
546 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
547
548 /* We store the frame pointer and the address of receiver_label in
549 the buffer and use the rest of it for the stack save area, which
550 is machine-dependent. */
551
552 mem = gen_rtx_MEM (Pmode, buf_addr);
553 set_mem_alias_set (mem, setjmp_alias_set);
554 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
555
556 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
557 set_mem_alias_set (mem, setjmp_alias_set);
558
559 emit_move_insn (validize_mem (mem),
560 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
561
562 stack_save = gen_rtx_MEM (sa_mode,
563 plus_constant (buf_addr,
564 2 * GET_MODE_SIZE (Pmode)));
565 set_mem_alias_set (stack_save, setjmp_alias_set);
566 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
567
568 /* If there is further processing to do, do it. */
569 #ifdef HAVE_builtin_setjmp_setup
570 if (HAVE_builtin_setjmp_setup)
571 emit_insn (gen_builtin_setjmp_setup (buf_addr));
572 #endif
573
574 /* Tell optimize_save_area_alloca that extra work is going to
575 need to go on during alloca. */
576 current_function_calls_setjmp = 1;
577
578 /* Set this so all the registers get saved in our frame; we need to be
579 able to copy the saved values for any registers from frames we unwind. */
580 current_function_has_nonlocal_label = 1;
581 }
582
583 /* Construct the trailing part of a __builtin_setjmp call.
584 This is used directly by sjlj exception handling code. */
585
586 void
587 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
588 {
589 /* Clobber the FP when we get here, so we have to make sure it's
590 marked as used by this function. */
591 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
592
593 /* Mark the static chain as clobbered here so life information
594 doesn't get messed up for it. */
595 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
596
597 /* Now put in the code to restore the frame pointer, and argument
598 pointer, if needed. */
599 #ifdef HAVE_nonlocal_goto
600 if (! HAVE_nonlocal_goto)
601 #endif
602 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
603
604 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
605 if (fixed_regs[ARG_POINTER_REGNUM])
606 {
607 #ifdef ELIMINABLE_REGS
608 size_t i;
609 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
610
611 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
612 if (elim_regs[i].from == ARG_POINTER_REGNUM
613 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
614 break;
615
616 if (i == ARRAY_SIZE (elim_regs))
617 #endif
618 {
619 /* Now restore our arg pointer from the address at which it
620 was saved in our stack frame. */
621 emit_move_insn (virtual_incoming_args_rtx,
622 copy_to_reg (get_arg_pointer_save_area (cfun)));
623 }
624 }
625 #endif
626
627 #ifdef HAVE_builtin_setjmp_receiver
628 if (HAVE_builtin_setjmp_receiver)
629 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
630 else
631 #endif
632 #ifdef HAVE_nonlocal_goto_receiver
633 if (HAVE_nonlocal_goto_receiver)
634 emit_insn (gen_nonlocal_goto_receiver ());
635 else
636 #endif
637 { /* Nothing */ }
638
639 /* @@@ This is a kludge. Not all machine descriptions define a blockage
640 insn, but we must not allow the code we just generated to be reordered
641 by scheduling. Specifically, the update of the frame pointer must
642 happen immediately, not later. So emit an ASM_INPUT to act as blockage
643 insn. */
644 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
645 }
646
647 /* __builtin_setjmp is passed a pointer to an array of five words (not
648 all will be used on all machines). It operates similarly to the C
649 library function of the same name, but is more efficient. Much of
650 the code below (and for longjmp) is copied from the handling of
651 non-local gotos.
652
653 NOTE: This is intended for use by GNAT and the exception handling
654 scheme in the compiler and will only work in the method used by
655 them. */
656
657 static rtx
658 expand_builtin_setjmp (tree arglist, rtx target)
659 {
660 rtx buf_addr, next_lab, cont_lab;
661
662 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
663 return NULL_RTX;
664
665 if (target == 0 || !REG_P (target)
666 || REGNO (target) < FIRST_PSEUDO_REGISTER)
667 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
668
669 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
670
671 next_lab = gen_label_rtx ();
672 cont_lab = gen_label_rtx ();
673
674 expand_builtin_setjmp_setup (buf_addr, next_lab);
675
676 /* Set TARGET to zero and branch to the continue label. Use emit_jump to
677 ensure that pending stack adjustments are flushed. */
678 emit_move_insn (target, const0_rtx);
679 emit_jump (cont_lab);
680
681 emit_label (next_lab);
682
683 expand_builtin_setjmp_receiver (next_lab);
684
685 /* Set TARGET to one. */
686 emit_move_insn (target, const1_rtx);
687 emit_label (cont_lab);
688
689 /* Tell flow about the strange goings on. Putting `next_lab' on
690 `nonlocal_goto_handler_labels' to indicates that function
691 calls may traverse the arc back to this label. */
692
693 current_function_has_nonlocal_label = 1;
694 nonlocal_goto_handler_labels
695 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
696
697 return target;
698 }
699
700 /* __builtin_longjmp is passed a pointer to an array of five words (not
701 all will be used on all machines). It operates similarly to the C
702 library function of the same name, but is more efficient. Much of
703 the code below is copied from the handling of non-local gotos.
704
705 NOTE: This is intended for use by GNAT and the exception handling
706 scheme in the compiler and will only work in the method used by
707 them. */
708
709 static void
710 expand_builtin_longjmp (rtx buf_addr, rtx value)
711 {
712 rtx fp, lab, stack, insn, last;
713 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
714
715 if (setjmp_alias_set == -1)
716 setjmp_alias_set = new_alias_set ();
717
718 buf_addr = convert_memory_address (Pmode, buf_addr);
719
720 buf_addr = force_reg (Pmode, buf_addr);
721
722 /* We used to store value in static_chain_rtx, but that fails if pointers
723 are smaller than integers. We instead require that the user must pass
724 a second argument of 1, because that is what builtin_setjmp will
725 return. This also makes EH slightly more efficient, since we are no
726 longer copying around a value that we don't care about. */
727 gcc_assert (value == const1_rtx);
728
729 last = get_last_insn ();
730 #ifdef HAVE_builtin_longjmp
731 if (HAVE_builtin_longjmp)
732 emit_insn (gen_builtin_longjmp (buf_addr));
733 else
734 #endif
735 {
736 fp = gen_rtx_MEM (Pmode, buf_addr);
737 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
738 GET_MODE_SIZE (Pmode)));
739
740 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
741 2 * GET_MODE_SIZE (Pmode)));
742 set_mem_alias_set (fp, setjmp_alias_set);
743 set_mem_alias_set (lab, setjmp_alias_set);
744 set_mem_alias_set (stack, setjmp_alias_set);
745
746 /* Pick up FP, label, and SP from the block and jump. This code is
747 from expand_goto in stmt.c; see there for detailed comments. */
748 #if HAVE_nonlocal_goto
749 if (HAVE_nonlocal_goto)
750 /* We have to pass a value to the nonlocal_goto pattern that will
751 get copied into the static_chain pointer, but it does not matter
752 what that value is, because builtin_setjmp does not use it. */
753 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
754 else
755 #endif
756 {
757 lab = copy_to_reg (lab);
758
759 emit_insn (gen_rtx_CLOBBER (VOIDmode,
760 gen_rtx_MEM (BLKmode,
761 gen_rtx_SCRATCH (VOIDmode))));
762 emit_insn (gen_rtx_CLOBBER (VOIDmode,
763 gen_rtx_MEM (BLKmode,
764 hard_frame_pointer_rtx)));
765
766 emit_move_insn (hard_frame_pointer_rtx, fp);
767 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
768
769 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
770 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
771 emit_indirect_jump (lab);
772 }
773 }
774
775 /* Search backwards and mark the jump insn as a non-local goto.
776 Note that this precludes the use of __builtin_longjmp to a
777 __builtin_setjmp target in the same function. However, we've
778 already cautioned the user that these functions are for
779 internal exception handling use only. */
780 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
781 {
782 gcc_assert (insn != last);
783
784 if (JUMP_P (insn))
785 {
786 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
787 REG_NOTES (insn));
788 break;
789 }
790 else if (CALL_P (insn))
791 break;
792 }
793 }
794
795 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
796 and the address of the save area. */
797
798 static rtx
799 expand_builtin_nonlocal_goto (tree arglist)
800 {
801 tree t_label, t_save_area;
802 rtx r_label, r_save_area, r_fp, r_sp, insn;
803
804 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
805 return NULL_RTX;
806
807 t_label = TREE_VALUE (arglist);
808 arglist = TREE_CHAIN (arglist);
809 t_save_area = TREE_VALUE (arglist);
810
811 r_label = expand_expr (t_label, NULL_RTX, VOIDmode, 0);
812 r_label = convert_memory_address (Pmode, r_label);
813 r_save_area = expand_expr (t_save_area, NULL_RTX, VOIDmode, 0);
814 r_save_area = convert_memory_address (Pmode, r_save_area);
815 r_fp = gen_rtx_MEM (Pmode, r_save_area);
816 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
817 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
818
819 current_function_has_nonlocal_goto = 1;
820
821 #if HAVE_nonlocal_goto
822 /* ??? We no longer need to pass the static chain value, afaik. */
823 if (HAVE_nonlocal_goto)
824 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
825 else
826 #endif
827 {
828 r_label = copy_to_reg (r_label);
829
830 emit_insn (gen_rtx_CLOBBER (VOIDmode,
831 gen_rtx_MEM (BLKmode,
832 gen_rtx_SCRATCH (VOIDmode))));
833
834 emit_insn (gen_rtx_CLOBBER (VOIDmode,
835 gen_rtx_MEM (BLKmode,
836 hard_frame_pointer_rtx)));
837
838 /* Restore frame pointer for containing function.
839 This sets the actual hard register used for the frame pointer
840 to the location of the function's incoming static chain info.
841 The non-local goto handler will then adjust it to contain the
842 proper value and reload the argument pointer, if needed. */
843 emit_move_insn (hard_frame_pointer_rtx, r_fp);
844 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
845
846 /* USE of hard_frame_pointer_rtx added for consistency;
847 not clear if really needed. */
848 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
849 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
850 emit_indirect_jump (r_label);
851 }
852
853 /* Search backwards to the jump insn and mark it as a
854 non-local goto. */
855 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
856 {
857 if (JUMP_P (insn))
858 {
859 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
860 const0_rtx, REG_NOTES (insn));
861 break;
862 }
863 else if (CALL_P (insn))
864 break;
865 }
866
867 return const0_rtx;
868 }
869
870 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
871 (not all will be used on all machines) that was passed to __builtin_setjmp.
872 It updates the stack pointer in that block to correspond to the current
873 stack pointer. */
874
875 static void
876 expand_builtin_update_setjmp_buf (rtx buf_addr)
877 {
878 enum machine_mode sa_mode = Pmode;
879 rtx stack_save;
880
881
882 #ifdef HAVE_save_stack_nonlocal
883 if (HAVE_save_stack_nonlocal)
884 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
885 #endif
886 #ifdef STACK_SAVEAREA_MODE
887 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
888 #endif
889
890 stack_save
891 = gen_rtx_MEM (sa_mode,
892 memory_address
893 (sa_mode,
894 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
895
896 #ifdef HAVE_setjmp
897 if (HAVE_setjmp)
898 emit_insn (gen_setjmp ());
899 #endif
900
901 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
902 }
903
904 /* Expand a call to __builtin_prefetch. For a target that does not support
905 data prefetch, evaluate the memory address argument in case it has side
906 effects. */
907
908 static void
909 expand_builtin_prefetch (tree arglist)
910 {
911 tree arg0, arg1, arg2;
912 rtx op0, op1, op2;
913
914 if (!validate_arglist (arglist, POINTER_TYPE, 0))
915 return;
916
917 arg0 = TREE_VALUE (arglist);
918 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
919 zero (read) and argument 2 (locality) defaults to 3 (high degree of
920 locality). */
921 if (TREE_CHAIN (arglist))
922 {
923 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
924 if (TREE_CHAIN (TREE_CHAIN (arglist)))
925 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
926 else
927 arg2 = build_int_cst (NULL_TREE, 3);
928 }
929 else
930 {
931 arg1 = integer_zero_node;
932 arg2 = build_int_cst (NULL_TREE, 3);
933 }
934
935 /* Argument 0 is an address. */
936 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
937
938 /* Argument 1 (read/write flag) must be a compile-time constant int. */
939 if (TREE_CODE (arg1) != INTEGER_CST)
940 {
941 error ("second argument to %<__builtin_prefetch%> must be a constant");
942 arg1 = integer_zero_node;
943 }
944 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
945 /* Argument 1 must be either zero or one. */
946 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
947 {
948 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
949 " using zero");
950 op1 = const0_rtx;
951 }
952
953 /* Argument 2 (locality) must be a compile-time constant int. */
954 if (TREE_CODE (arg2) != INTEGER_CST)
955 {
956 error ("third argument to %<__builtin_prefetch%> must be a constant");
957 arg2 = integer_zero_node;
958 }
959 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
960 /* Argument 2 must be 0, 1, 2, or 3. */
961 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
962 {
963 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
964 op2 = const0_rtx;
965 }
966
967 #ifdef HAVE_prefetch
968 if (HAVE_prefetch)
969 {
970 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
971 (op0,
972 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
973 || (GET_MODE (op0) != Pmode))
974 {
975 op0 = convert_memory_address (Pmode, op0);
976 op0 = force_reg (Pmode, op0);
977 }
978 emit_insn (gen_prefetch (op0, op1, op2));
979 }
980 #endif
981
982 /* Don't do anything with direct references to volatile memory, but
983 generate code to handle other side effects. */
984 if (!MEM_P (op0) && side_effects_p (op0))
985 emit_insn (op0);
986 }
987
988 /* Get a MEM rtx for expression EXP which is the address of an operand
989 to be used to be used in a string instruction (cmpstrsi, movmemsi, ..). */
990
991 static rtx
992 get_memory_rtx (tree exp)
993 {
994 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
995 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
996
997 /* Get an expression we can use to find the attributes to assign to MEM.
998 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
999 we can. First remove any nops. */
1000 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1001 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1002 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1003 exp = TREE_OPERAND (exp, 0);
1004
1005 if (TREE_CODE (exp) == ADDR_EXPR)
1006 exp = TREE_OPERAND (exp, 0);
1007 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1008 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1009 else
1010 exp = NULL;
1011
1012 /* Honor attributes derived from exp, except for the alias set
1013 (as builtin stringops may alias with anything) and the size
1014 (as stringops may access multiple array elements). */
1015 if (exp)
1016 {
1017 set_mem_attributes (mem, exp, 0);
1018 set_mem_alias_set (mem, 0);
1019 set_mem_size (mem, NULL_RTX);
1020 }
1021
1022 return mem;
1023 }
1024 \f
1025 /* Built-in functions to perform an untyped call and return. */
1026
1027 /* For each register that may be used for calling a function, this
1028 gives a mode used to copy the register's value. VOIDmode indicates
1029 the register is not used for calling a function. If the machine
1030 has register windows, this gives only the outbound registers.
1031 INCOMING_REGNO gives the corresponding inbound register. */
1032 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1033
1034 /* For each register that may be used for returning values, this gives
1035 a mode used to copy the register's value. VOIDmode indicates the
1036 register is not used for returning values. If the machine has
1037 register windows, this gives only the outbound registers.
1038 INCOMING_REGNO gives the corresponding inbound register. */
1039 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1040
1041 /* For each register that may be used for calling a function, this
1042 gives the offset of that register into the block returned by
1043 __builtin_apply_args. 0 indicates that the register is not
1044 used for calling a function. */
1045 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1046
1047 /* Return the size required for the block returned by __builtin_apply_args,
1048 and initialize apply_args_mode. */
1049
1050 static int
1051 apply_args_size (void)
1052 {
1053 static int size = -1;
1054 int align;
1055 unsigned int regno;
1056 enum machine_mode mode;
1057
1058 /* The values computed by this function never change. */
1059 if (size < 0)
1060 {
1061 /* The first value is the incoming arg-pointer. */
1062 size = GET_MODE_SIZE (Pmode);
1063
1064 /* The second value is the structure value address unless this is
1065 passed as an "invisible" first argument. */
1066 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1067 size += GET_MODE_SIZE (Pmode);
1068
1069 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1070 if (FUNCTION_ARG_REGNO_P (regno))
1071 {
1072 mode = reg_raw_mode[regno];
1073
1074 gcc_assert (mode != VOIDmode);
1075
1076 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1077 if (size % align != 0)
1078 size = CEIL (size, align) * align;
1079 apply_args_reg_offset[regno] = size;
1080 size += GET_MODE_SIZE (mode);
1081 apply_args_mode[regno] = mode;
1082 }
1083 else
1084 {
1085 apply_args_mode[regno] = VOIDmode;
1086 apply_args_reg_offset[regno] = 0;
1087 }
1088 }
1089 return size;
1090 }
1091
1092 /* Return the size required for the block returned by __builtin_apply,
1093 and initialize apply_result_mode. */
1094
1095 static int
1096 apply_result_size (void)
1097 {
1098 static int size = -1;
1099 int align, regno;
1100 enum machine_mode mode;
1101
1102 /* The values computed by this function never change. */
1103 if (size < 0)
1104 {
1105 size = 0;
1106
1107 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1108 if (FUNCTION_VALUE_REGNO_P (regno))
1109 {
1110 mode = reg_raw_mode[regno];
1111
1112 gcc_assert (mode != VOIDmode);
1113
1114 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1115 if (size % align != 0)
1116 size = CEIL (size, align) * align;
1117 size += GET_MODE_SIZE (mode);
1118 apply_result_mode[regno] = mode;
1119 }
1120 else
1121 apply_result_mode[regno] = VOIDmode;
1122
1123 /* Allow targets that use untyped_call and untyped_return to override
1124 the size so that machine-specific information can be stored here. */
1125 #ifdef APPLY_RESULT_SIZE
1126 size = APPLY_RESULT_SIZE;
1127 #endif
1128 }
1129 return size;
1130 }
1131
1132 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1133 /* Create a vector describing the result block RESULT. If SAVEP is true,
1134 the result block is used to save the values; otherwise it is used to
1135 restore the values. */
1136
1137 static rtx
1138 result_vector (int savep, rtx result)
1139 {
1140 int regno, size, align, nelts;
1141 enum machine_mode mode;
1142 rtx reg, mem;
1143 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1144
1145 size = nelts = 0;
1146 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1147 if ((mode = apply_result_mode[regno]) != VOIDmode)
1148 {
1149 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1150 if (size % align != 0)
1151 size = CEIL (size, align) * align;
1152 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1153 mem = adjust_address (result, mode, size);
1154 savevec[nelts++] = (savep
1155 ? gen_rtx_SET (VOIDmode, mem, reg)
1156 : gen_rtx_SET (VOIDmode, reg, mem));
1157 size += GET_MODE_SIZE (mode);
1158 }
1159 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1160 }
1161 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1162
1163 /* Save the state required to perform an untyped call with the same
1164 arguments as were passed to the current function. */
1165
1166 static rtx
1167 expand_builtin_apply_args_1 (void)
1168 {
1169 rtx registers, tem;
1170 int size, align, regno;
1171 enum machine_mode mode;
1172 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1173
1174 /* Create a block where the arg-pointer, structure value address,
1175 and argument registers can be saved. */
1176 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1177
1178 /* Walk past the arg-pointer and structure value address. */
1179 size = GET_MODE_SIZE (Pmode);
1180 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1181 size += GET_MODE_SIZE (Pmode);
1182
1183 /* Save each register used in calling a function to the block. */
1184 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1185 if ((mode = apply_args_mode[regno]) != VOIDmode)
1186 {
1187 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1188 if (size % align != 0)
1189 size = CEIL (size, align) * align;
1190
1191 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1192
1193 emit_move_insn (adjust_address (registers, mode, size), tem);
1194 size += GET_MODE_SIZE (mode);
1195 }
1196
1197 /* Save the arg pointer to the block. */
1198 tem = copy_to_reg (virtual_incoming_args_rtx);
1199 #ifdef STACK_GROWS_DOWNWARD
1200 /* We need the pointer as the caller actually passed them to us, not
1201 as we might have pretended they were passed. Make sure it's a valid
1202 operand, as emit_move_insn isn't expected to handle a PLUS. */
1203 tem
1204 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1205 NULL_RTX);
1206 #endif
1207 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1208
1209 size = GET_MODE_SIZE (Pmode);
1210
1211 /* Save the structure value address unless this is passed as an
1212 "invisible" first argument. */
1213 if (struct_incoming_value)
1214 {
1215 emit_move_insn (adjust_address (registers, Pmode, size),
1216 copy_to_reg (struct_incoming_value));
1217 size += GET_MODE_SIZE (Pmode);
1218 }
1219
1220 /* Return the address of the block. */
1221 return copy_addr_to_reg (XEXP (registers, 0));
1222 }
1223
1224 /* __builtin_apply_args returns block of memory allocated on
1225 the stack into which is stored the arg pointer, structure
1226 value address, static chain, and all the registers that might
1227 possibly be used in performing a function call. The code is
1228 moved to the start of the function so the incoming values are
1229 saved. */
1230
1231 static rtx
1232 expand_builtin_apply_args (void)
1233 {
1234 /* Don't do __builtin_apply_args more than once in a function.
1235 Save the result of the first call and reuse it. */
1236 if (apply_args_value != 0)
1237 return apply_args_value;
1238 {
1239 /* When this function is called, it means that registers must be
1240 saved on entry to this function. So we migrate the
1241 call to the first insn of this function. */
1242 rtx temp;
1243 rtx seq;
1244
1245 start_sequence ();
1246 temp = expand_builtin_apply_args_1 ();
1247 seq = get_insns ();
1248 end_sequence ();
1249
1250 apply_args_value = temp;
1251
1252 /* Put the insns after the NOTE that starts the function.
1253 If this is inside a start_sequence, make the outer-level insn
1254 chain current, so the code is placed at the start of the
1255 function. */
1256 push_topmost_sequence ();
1257 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1258 pop_topmost_sequence ();
1259 return temp;
1260 }
1261 }
1262
1263 /* Perform an untyped call and save the state required to perform an
1264 untyped return of whatever value was returned by the given function. */
1265
1266 static rtx
1267 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1268 {
1269 int size, align, regno;
1270 enum machine_mode mode;
1271 rtx incoming_args, result, reg, dest, src, call_insn;
1272 rtx old_stack_level = 0;
1273 rtx call_fusage = 0;
1274 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1275
1276 arguments = convert_memory_address (Pmode, arguments);
1277
1278 /* Create a block where the return registers can be saved. */
1279 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1280
1281 /* Fetch the arg pointer from the ARGUMENTS block. */
1282 incoming_args = gen_reg_rtx (Pmode);
1283 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1284 #ifndef STACK_GROWS_DOWNWARD
1285 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1286 incoming_args, 0, OPTAB_LIB_WIDEN);
1287 #endif
1288
1289 /* Push a new argument block and copy the arguments. Do not allow
1290 the (potential) memcpy call below to interfere with our stack
1291 manipulations. */
1292 do_pending_stack_adjust ();
1293 NO_DEFER_POP;
1294
1295 /* Save the stack with nonlocal if available. */
1296 #ifdef HAVE_save_stack_nonlocal
1297 if (HAVE_save_stack_nonlocal)
1298 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1299 else
1300 #endif
1301 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1302
1303 /* Allocate a block of memory onto the stack and copy the memory
1304 arguments to the outgoing arguments address. */
1305 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1306 dest = virtual_outgoing_args_rtx;
1307 #ifndef STACK_GROWS_DOWNWARD
1308 if (GET_CODE (argsize) == CONST_INT)
1309 dest = plus_constant (dest, -INTVAL (argsize));
1310 else
1311 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1312 #endif
1313 dest = gen_rtx_MEM (BLKmode, dest);
1314 set_mem_align (dest, PARM_BOUNDARY);
1315 src = gen_rtx_MEM (BLKmode, incoming_args);
1316 set_mem_align (src, PARM_BOUNDARY);
1317 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1318
1319 /* Refer to the argument block. */
1320 apply_args_size ();
1321 arguments = gen_rtx_MEM (BLKmode, arguments);
1322 set_mem_align (arguments, PARM_BOUNDARY);
1323
1324 /* Walk past the arg-pointer and structure value address. */
1325 size = GET_MODE_SIZE (Pmode);
1326 if (struct_value)
1327 size += GET_MODE_SIZE (Pmode);
1328
1329 /* Restore each of the registers previously saved. Make USE insns
1330 for each of these registers for use in making the call. */
1331 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1332 if ((mode = apply_args_mode[regno]) != VOIDmode)
1333 {
1334 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1335 if (size % align != 0)
1336 size = CEIL (size, align) * align;
1337 reg = gen_rtx_REG (mode, regno);
1338 emit_move_insn (reg, adjust_address (arguments, mode, size));
1339 use_reg (&call_fusage, reg);
1340 size += GET_MODE_SIZE (mode);
1341 }
1342
1343 /* Restore the structure value address unless this is passed as an
1344 "invisible" first argument. */
1345 size = GET_MODE_SIZE (Pmode);
1346 if (struct_value)
1347 {
1348 rtx value = gen_reg_rtx (Pmode);
1349 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1350 emit_move_insn (struct_value, value);
1351 if (REG_P (struct_value))
1352 use_reg (&call_fusage, struct_value);
1353 size += GET_MODE_SIZE (Pmode);
1354 }
1355
1356 /* All arguments and registers used for the call are set up by now! */
1357 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1358
1359 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1360 and we don't want to load it into a register as an optimization,
1361 because prepare_call_address already did it if it should be done. */
1362 if (GET_CODE (function) != SYMBOL_REF)
1363 function = memory_address (FUNCTION_MODE, function);
1364
1365 /* Generate the actual call instruction and save the return value. */
1366 #ifdef HAVE_untyped_call
1367 if (HAVE_untyped_call)
1368 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1369 result, result_vector (1, result)));
1370 else
1371 #endif
1372 #ifdef HAVE_call_value
1373 if (HAVE_call_value)
1374 {
1375 rtx valreg = 0;
1376
1377 /* Locate the unique return register. It is not possible to
1378 express a call that sets more than one return register using
1379 call_value; use untyped_call for that. In fact, untyped_call
1380 only needs to save the return registers in the given block. */
1381 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1382 if ((mode = apply_result_mode[regno]) != VOIDmode)
1383 {
1384 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1385
1386 valreg = gen_rtx_REG (mode, regno);
1387 }
1388
1389 emit_call_insn (GEN_CALL_VALUE (valreg,
1390 gen_rtx_MEM (FUNCTION_MODE, function),
1391 const0_rtx, NULL_RTX, const0_rtx));
1392
1393 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1394 }
1395 else
1396 #endif
1397 gcc_unreachable ();
1398
1399 /* Find the CALL insn we just emitted, and attach the register usage
1400 information. */
1401 call_insn = last_call_insn ();
1402 add_function_usage_to (call_insn, call_fusage);
1403
1404 /* Restore the stack. */
1405 #ifdef HAVE_save_stack_nonlocal
1406 if (HAVE_save_stack_nonlocal)
1407 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1408 else
1409 #endif
1410 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1411
1412 OK_DEFER_POP;
1413
1414 /* Return the address of the result block. */
1415 result = copy_addr_to_reg (XEXP (result, 0));
1416 return convert_memory_address (ptr_mode, result);
1417 }
1418
1419 /* Perform an untyped return. */
1420
1421 static void
1422 expand_builtin_return (rtx result)
1423 {
1424 int size, align, regno;
1425 enum machine_mode mode;
1426 rtx reg;
1427 rtx call_fusage = 0;
1428
1429 result = convert_memory_address (Pmode, result);
1430
1431 apply_result_size ();
1432 result = gen_rtx_MEM (BLKmode, result);
1433
1434 #ifdef HAVE_untyped_return
1435 if (HAVE_untyped_return)
1436 {
1437 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1438 emit_barrier ();
1439 return;
1440 }
1441 #endif
1442
1443 /* Restore the return value and note that each value is used. */
1444 size = 0;
1445 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1446 if ((mode = apply_result_mode[regno]) != VOIDmode)
1447 {
1448 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1449 if (size % align != 0)
1450 size = CEIL (size, align) * align;
1451 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1452 emit_move_insn (reg, adjust_address (result, mode, size));
1453
1454 push_to_sequence (call_fusage);
1455 emit_insn (gen_rtx_USE (VOIDmode, reg));
1456 call_fusage = get_insns ();
1457 end_sequence ();
1458 size += GET_MODE_SIZE (mode);
1459 }
1460
1461 /* Put the USE insns before the return. */
1462 emit_insn (call_fusage);
1463
1464 /* Return whatever values was restored by jumping directly to the end
1465 of the function. */
1466 expand_naked_return ();
1467 }
1468
1469 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1470
1471 static enum type_class
1472 type_to_class (tree type)
1473 {
1474 switch (TREE_CODE (type))
1475 {
1476 case VOID_TYPE: return void_type_class;
1477 case INTEGER_TYPE: return integer_type_class;
1478 case CHAR_TYPE: return char_type_class;
1479 case ENUMERAL_TYPE: return enumeral_type_class;
1480 case BOOLEAN_TYPE: return boolean_type_class;
1481 case POINTER_TYPE: return pointer_type_class;
1482 case REFERENCE_TYPE: return reference_type_class;
1483 case OFFSET_TYPE: return offset_type_class;
1484 case REAL_TYPE: return real_type_class;
1485 case COMPLEX_TYPE: return complex_type_class;
1486 case FUNCTION_TYPE: return function_type_class;
1487 case METHOD_TYPE: return method_type_class;
1488 case RECORD_TYPE: return record_type_class;
1489 case UNION_TYPE:
1490 case QUAL_UNION_TYPE: return union_type_class;
1491 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1492 ? string_type_class : array_type_class);
1493 case LANG_TYPE: return lang_type_class;
1494 default: return no_type_class;
1495 }
1496 }
1497
1498 /* Expand a call to __builtin_classify_type with arguments found in
1499 ARGLIST. */
1500
1501 static rtx
1502 expand_builtin_classify_type (tree arglist)
1503 {
1504 if (arglist != 0)
1505 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1506 return GEN_INT (no_type_class);
1507 }
1508
1509 /* This helper macro, meant to be used in mathfn_built_in below,
1510 determines which among a set of three builtin math functions is
1511 appropriate for a given type mode. The `F' and `L' cases are
1512 automatically generated from the `double' case. */
1513 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1514 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1515 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1516 fcodel = BUILT_IN_MATHFN##L ; break;
1517
1518 /* Return mathematic function equivalent to FN but operating directly
1519 on TYPE, if available. If we can't do the conversion, return zero. */
1520 tree
1521 mathfn_built_in (tree type, enum built_in_function fn)
1522 {
1523 enum built_in_function fcode, fcodef, fcodel;
1524
1525 switch (fn)
1526 {
1527 CASE_MATHFN (BUILT_IN_ACOS)
1528 CASE_MATHFN (BUILT_IN_ACOSH)
1529 CASE_MATHFN (BUILT_IN_ASIN)
1530 CASE_MATHFN (BUILT_IN_ASINH)
1531 CASE_MATHFN (BUILT_IN_ATAN)
1532 CASE_MATHFN (BUILT_IN_ATAN2)
1533 CASE_MATHFN (BUILT_IN_ATANH)
1534 CASE_MATHFN (BUILT_IN_CBRT)
1535 CASE_MATHFN (BUILT_IN_CEIL)
1536 CASE_MATHFN (BUILT_IN_COPYSIGN)
1537 CASE_MATHFN (BUILT_IN_COS)
1538 CASE_MATHFN (BUILT_IN_COSH)
1539 CASE_MATHFN (BUILT_IN_DREM)
1540 CASE_MATHFN (BUILT_IN_ERF)
1541 CASE_MATHFN (BUILT_IN_ERFC)
1542 CASE_MATHFN (BUILT_IN_EXP)
1543 CASE_MATHFN (BUILT_IN_EXP10)
1544 CASE_MATHFN (BUILT_IN_EXP2)
1545 CASE_MATHFN (BUILT_IN_EXPM1)
1546 CASE_MATHFN (BUILT_IN_FABS)
1547 CASE_MATHFN (BUILT_IN_FDIM)
1548 CASE_MATHFN (BUILT_IN_FLOOR)
1549 CASE_MATHFN (BUILT_IN_FMA)
1550 CASE_MATHFN (BUILT_IN_FMAX)
1551 CASE_MATHFN (BUILT_IN_FMIN)
1552 CASE_MATHFN (BUILT_IN_FMOD)
1553 CASE_MATHFN (BUILT_IN_FREXP)
1554 CASE_MATHFN (BUILT_IN_GAMMA)
1555 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1556 CASE_MATHFN (BUILT_IN_HYPOT)
1557 CASE_MATHFN (BUILT_IN_ILOGB)
1558 CASE_MATHFN (BUILT_IN_INF)
1559 CASE_MATHFN (BUILT_IN_J0)
1560 CASE_MATHFN (BUILT_IN_J1)
1561 CASE_MATHFN (BUILT_IN_JN)
1562 CASE_MATHFN (BUILT_IN_LCEIL)
1563 CASE_MATHFN (BUILT_IN_LDEXP)
1564 CASE_MATHFN (BUILT_IN_LFLOOR)
1565 CASE_MATHFN (BUILT_IN_LGAMMA)
1566 CASE_MATHFN (BUILT_IN_LLCEIL)
1567 CASE_MATHFN (BUILT_IN_LLFLOOR)
1568 CASE_MATHFN (BUILT_IN_LLRINT)
1569 CASE_MATHFN (BUILT_IN_LLROUND)
1570 CASE_MATHFN (BUILT_IN_LOG)
1571 CASE_MATHFN (BUILT_IN_LOG10)
1572 CASE_MATHFN (BUILT_IN_LOG1P)
1573 CASE_MATHFN (BUILT_IN_LOG2)
1574 CASE_MATHFN (BUILT_IN_LOGB)
1575 CASE_MATHFN (BUILT_IN_LRINT)
1576 CASE_MATHFN (BUILT_IN_LROUND)
1577 CASE_MATHFN (BUILT_IN_MODF)
1578 CASE_MATHFN (BUILT_IN_NAN)
1579 CASE_MATHFN (BUILT_IN_NANS)
1580 CASE_MATHFN (BUILT_IN_NEARBYINT)
1581 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1582 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1583 CASE_MATHFN (BUILT_IN_POW)
1584 CASE_MATHFN (BUILT_IN_POWI)
1585 CASE_MATHFN (BUILT_IN_POW10)
1586 CASE_MATHFN (BUILT_IN_REMAINDER)
1587 CASE_MATHFN (BUILT_IN_REMQUO)
1588 CASE_MATHFN (BUILT_IN_RINT)
1589 CASE_MATHFN (BUILT_IN_ROUND)
1590 CASE_MATHFN (BUILT_IN_SCALB)
1591 CASE_MATHFN (BUILT_IN_SCALBLN)
1592 CASE_MATHFN (BUILT_IN_SCALBN)
1593 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1594 CASE_MATHFN (BUILT_IN_SIN)
1595 CASE_MATHFN (BUILT_IN_SINCOS)
1596 CASE_MATHFN (BUILT_IN_SINH)
1597 CASE_MATHFN (BUILT_IN_SQRT)
1598 CASE_MATHFN (BUILT_IN_TAN)
1599 CASE_MATHFN (BUILT_IN_TANH)
1600 CASE_MATHFN (BUILT_IN_TGAMMA)
1601 CASE_MATHFN (BUILT_IN_TRUNC)
1602 CASE_MATHFN (BUILT_IN_Y0)
1603 CASE_MATHFN (BUILT_IN_Y1)
1604 CASE_MATHFN (BUILT_IN_YN)
1605
1606 default:
1607 return 0;
1608 }
1609
1610 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1611 return implicit_built_in_decls[fcode];
1612 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1613 return implicit_built_in_decls[fcodef];
1614 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1615 return implicit_built_in_decls[fcodel];
1616 else
1617 return 0;
1618 }
1619
1620 /* If errno must be maintained, expand the RTL to check if the result,
1621 TARGET, of a built-in function call, EXP, is NaN, and if so set
1622 errno to EDOM. */
1623
1624 static void
1625 expand_errno_check (tree exp, rtx target)
1626 {
1627 rtx lab = gen_label_rtx ();
1628
1629 /* Test the result; if it is NaN, set errno=EDOM because
1630 the argument was not in the domain. */
1631 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1632 0, lab);
1633
1634 #ifdef TARGET_EDOM
1635 /* If this built-in doesn't throw an exception, set errno directly. */
1636 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1637 {
1638 #ifdef GEN_ERRNO_RTX
1639 rtx errno_rtx = GEN_ERRNO_RTX;
1640 #else
1641 rtx errno_rtx
1642 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1643 #endif
1644 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1645 emit_label (lab);
1646 return;
1647 }
1648 #endif
1649
1650 /* We can't set errno=EDOM directly; let the library call do it.
1651 Pop the arguments right away in case the call gets deleted. */
1652 NO_DEFER_POP;
1653 expand_call (exp, target, 0);
1654 OK_DEFER_POP;
1655 emit_label (lab);
1656 }
1657
1658
1659 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1660 Return 0 if a normal call should be emitted rather than expanding the
1661 function in-line. EXP is the expression that is a call to the builtin
1662 function; if convenient, the result should be placed in TARGET.
1663 SUBTARGET may be used as the target for computing one of EXP's operands. */
1664
1665 static rtx
1666 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1667 {
1668 optab builtin_optab;
1669 rtx op0, insns, before_call;
1670 tree fndecl = get_callee_fndecl (exp);
1671 tree arglist = TREE_OPERAND (exp, 1);
1672 enum machine_mode mode;
1673 bool errno_set = false;
1674 tree arg, narg;
1675
1676 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1677 return 0;
1678
1679 arg = TREE_VALUE (arglist);
1680
1681 switch (DECL_FUNCTION_CODE (fndecl))
1682 {
1683 case BUILT_IN_SQRT:
1684 case BUILT_IN_SQRTF:
1685 case BUILT_IN_SQRTL:
1686 errno_set = ! tree_expr_nonnegative_p (arg);
1687 builtin_optab = sqrt_optab;
1688 break;
1689 case BUILT_IN_EXP:
1690 case BUILT_IN_EXPF:
1691 case BUILT_IN_EXPL:
1692 errno_set = true; builtin_optab = exp_optab; break;
1693 case BUILT_IN_EXP10:
1694 case BUILT_IN_EXP10F:
1695 case BUILT_IN_EXP10L:
1696 case BUILT_IN_POW10:
1697 case BUILT_IN_POW10F:
1698 case BUILT_IN_POW10L:
1699 errno_set = true; builtin_optab = exp10_optab; break;
1700 case BUILT_IN_EXP2:
1701 case BUILT_IN_EXP2F:
1702 case BUILT_IN_EXP2L:
1703 errno_set = true; builtin_optab = exp2_optab; break;
1704 case BUILT_IN_EXPM1:
1705 case BUILT_IN_EXPM1F:
1706 case BUILT_IN_EXPM1L:
1707 errno_set = true; builtin_optab = expm1_optab; break;
1708 case BUILT_IN_LOGB:
1709 case BUILT_IN_LOGBF:
1710 case BUILT_IN_LOGBL:
1711 errno_set = true; builtin_optab = logb_optab; break;
1712 case BUILT_IN_ILOGB:
1713 case BUILT_IN_ILOGBF:
1714 case BUILT_IN_ILOGBL:
1715 errno_set = true; builtin_optab = ilogb_optab; break;
1716 case BUILT_IN_LOG:
1717 case BUILT_IN_LOGF:
1718 case BUILT_IN_LOGL:
1719 errno_set = true; builtin_optab = log_optab; break;
1720 case BUILT_IN_LOG10:
1721 case BUILT_IN_LOG10F:
1722 case BUILT_IN_LOG10L:
1723 errno_set = true; builtin_optab = log10_optab; break;
1724 case BUILT_IN_LOG2:
1725 case BUILT_IN_LOG2F:
1726 case BUILT_IN_LOG2L:
1727 errno_set = true; builtin_optab = log2_optab; break;
1728 case BUILT_IN_LOG1P:
1729 case BUILT_IN_LOG1PF:
1730 case BUILT_IN_LOG1PL:
1731 errno_set = true; builtin_optab = log1p_optab; break;
1732 case BUILT_IN_ASIN:
1733 case BUILT_IN_ASINF:
1734 case BUILT_IN_ASINL:
1735 builtin_optab = asin_optab; break;
1736 case BUILT_IN_ACOS:
1737 case BUILT_IN_ACOSF:
1738 case BUILT_IN_ACOSL:
1739 builtin_optab = acos_optab; break;
1740 case BUILT_IN_TAN:
1741 case BUILT_IN_TANF:
1742 case BUILT_IN_TANL:
1743 builtin_optab = tan_optab; break;
1744 case BUILT_IN_ATAN:
1745 case BUILT_IN_ATANF:
1746 case BUILT_IN_ATANL:
1747 builtin_optab = atan_optab; break;
1748 case BUILT_IN_FLOOR:
1749 case BUILT_IN_FLOORF:
1750 case BUILT_IN_FLOORL:
1751 builtin_optab = floor_optab; break;
1752 case BUILT_IN_CEIL:
1753 case BUILT_IN_CEILF:
1754 case BUILT_IN_CEILL:
1755 builtin_optab = ceil_optab; break;
1756 case BUILT_IN_TRUNC:
1757 case BUILT_IN_TRUNCF:
1758 case BUILT_IN_TRUNCL:
1759 builtin_optab = btrunc_optab; break;
1760 case BUILT_IN_ROUND:
1761 case BUILT_IN_ROUNDF:
1762 case BUILT_IN_ROUNDL:
1763 builtin_optab = round_optab; break;
1764 case BUILT_IN_NEARBYINT:
1765 case BUILT_IN_NEARBYINTF:
1766 case BUILT_IN_NEARBYINTL:
1767 builtin_optab = nearbyint_optab; break;
1768 case BUILT_IN_RINT:
1769 case BUILT_IN_RINTF:
1770 case BUILT_IN_RINTL:
1771 builtin_optab = rint_optab; break;
1772 case BUILT_IN_LRINT:
1773 case BUILT_IN_LRINTF:
1774 case BUILT_IN_LRINTL:
1775 case BUILT_IN_LLRINT:
1776 case BUILT_IN_LLRINTF:
1777 case BUILT_IN_LLRINTL:
1778 builtin_optab = lrint_optab; break;
1779 default:
1780 gcc_unreachable ();
1781 }
1782
1783 /* Make a suitable register to place result in. */
1784 mode = TYPE_MODE (TREE_TYPE (exp));
1785
1786 if (! flag_errno_math || ! HONOR_NANS (mode))
1787 errno_set = false;
1788
1789 /* Before working hard, check whether the instruction is available. */
1790 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1791 {
1792 target = gen_reg_rtx (mode);
1793
1794 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1795 need to expand the argument again. This way, we will not perform
1796 side-effects more the once. */
1797 narg = builtin_save_expr (arg);
1798 if (narg != arg)
1799 {
1800 arg = narg;
1801 arglist = build_tree_list (NULL_TREE, arg);
1802 exp = build_function_call_expr (fndecl, arglist);
1803 }
1804
1805 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1806
1807 start_sequence ();
1808
1809 /* Compute into TARGET.
1810 Set TARGET to wherever the result comes back. */
1811 target = expand_unop (mode, builtin_optab, op0, target, 0);
1812
1813 if (target != 0)
1814 {
1815 if (errno_set)
1816 expand_errno_check (exp, target);
1817
1818 /* Output the entire sequence. */
1819 insns = get_insns ();
1820 end_sequence ();
1821 emit_insn (insns);
1822 return target;
1823 }
1824
1825 /* If we were unable to expand via the builtin, stop the sequence
1826 (without outputting the insns) and call to the library function
1827 with the stabilized argument list. */
1828 end_sequence ();
1829 }
1830
1831 before_call = get_last_insn ();
1832
1833 target = expand_call (exp, target, target == const0_rtx);
1834
1835 /* If this is a sqrt operation and we don't care about errno, try to
1836 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1837 This allows the semantics of the libcall to be visible to the RTL
1838 optimizers. */
1839 if (builtin_optab == sqrt_optab && !errno_set)
1840 {
1841 /* Search backwards through the insns emitted by expand_call looking
1842 for the instruction with the REG_RETVAL note. */
1843 rtx last = get_last_insn ();
1844 while (last != before_call)
1845 {
1846 if (find_reg_note (last, REG_RETVAL, NULL))
1847 {
1848 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1849 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1850 two elements, i.e. symbol_ref(sqrt) and the operand. */
1851 if (note
1852 && GET_CODE (note) == EXPR_LIST
1853 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1854 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1855 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1856 {
1857 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1858 /* Check operand is a register with expected mode. */
1859 if (operand
1860 && REG_P (operand)
1861 && GET_MODE (operand) == mode)
1862 {
1863 /* Replace the REG_EQUAL note with a SQRT rtx. */
1864 rtx equiv = gen_rtx_SQRT (mode, operand);
1865 set_unique_reg_note (last, REG_EQUAL, equiv);
1866 }
1867 }
1868 break;
1869 }
1870 last = PREV_INSN (last);
1871 }
1872 }
1873
1874 return target;
1875 }
1876
1877 /* Expand a call to the builtin binary math functions (pow and atan2).
1878 Return 0 if a normal call should be emitted rather than expanding the
1879 function in-line. EXP is the expression that is a call to the builtin
1880 function; if convenient, the result should be placed in TARGET.
1881 SUBTARGET may be used as the target for computing one of EXP's
1882 operands. */
1883
1884 static rtx
1885 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1886 {
1887 optab builtin_optab;
1888 rtx op0, op1, insns;
1889 int op1_type = REAL_TYPE;
1890 tree fndecl = get_callee_fndecl (exp);
1891 tree arglist = TREE_OPERAND (exp, 1);
1892 tree arg0, arg1, temp, narg;
1893 enum machine_mode mode;
1894 bool errno_set = true;
1895 bool stable = true;
1896
1897 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXP)
1898 || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPF)
1899 || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPL))
1900 op1_type = INTEGER_TYPE;
1901
1902 if (!validate_arglist (arglist, REAL_TYPE, op1_type, VOID_TYPE))
1903 return 0;
1904
1905 arg0 = TREE_VALUE (arglist);
1906 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1907
1908 switch (DECL_FUNCTION_CODE (fndecl))
1909 {
1910 case BUILT_IN_POW:
1911 case BUILT_IN_POWF:
1912 case BUILT_IN_POWL:
1913 builtin_optab = pow_optab; break;
1914 case BUILT_IN_ATAN2:
1915 case BUILT_IN_ATAN2F:
1916 case BUILT_IN_ATAN2L:
1917 builtin_optab = atan2_optab; break;
1918 case BUILT_IN_LDEXP:
1919 case BUILT_IN_LDEXPF:
1920 case BUILT_IN_LDEXPL:
1921 builtin_optab = ldexp_optab; break;
1922 case BUILT_IN_FMOD:
1923 case BUILT_IN_FMODF:
1924 case BUILT_IN_FMODL:
1925 builtin_optab = fmod_optab; break;
1926 case BUILT_IN_DREM:
1927 case BUILT_IN_DREMF:
1928 case BUILT_IN_DREML:
1929 builtin_optab = drem_optab; break;
1930 default:
1931 gcc_unreachable ();
1932 }
1933
1934 /* Make a suitable register to place result in. */
1935 mode = TYPE_MODE (TREE_TYPE (exp));
1936
1937 /* Before working hard, check whether the instruction is available. */
1938 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1939 return 0;
1940
1941 target = gen_reg_rtx (mode);
1942
1943 if (! flag_errno_math || ! HONOR_NANS (mode))
1944 errno_set = false;
1945
1946 /* Always stabilize the argument list. */
1947 narg = builtin_save_expr (arg1);
1948 if (narg != arg1)
1949 {
1950 arg1 = narg;
1951 temp = build_tree_list (NULL_TREE, narg);
1952 stable = false;
1953 }
1954 else
1955 temp = TREE_CHAIN (arglist);
1956
1957 narg = builtin_save_expr (arg0);
1958 if (narg != arg0)
1959 {
1960 arg0 = narg;
1961 arglist = tree_cons (NULL_TREE, narg, temp);
1962 stable = false;
1963 }
1964 else if (! stable)
1965 arglist = tree_cons (NULL_TREE, arg0, temp);
1966
1967 if (! stable)
1968 exp = build_function_call_expr (fndecl, arglist);
1969
1970 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1971 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1972
1973 start_sequence ();
1974
1975 /* Compute into TARGET.
1976 Set TARGET to wherever the result comes back. */
1977 target = expand_binop (mode, builtin_optab, op0, op1,
1978 target, 0, OPTAB_DIRECT);
1979
1980 /* If we were unable to expand via the builtin, stop the sequence
1981 (without outputting the insns) and call to the library function
1982 with the stabilized argument list. */
1983 if (target == 0)
1984 {
1985 end_sequence ();
1986 return expand_call (exp, target, target == const0_rtx);
1987 }
1988
1989 if (errno_set)
1990 expand_errno_check (exp, target);
1991
1992 /* Output the entire sequence. */
1993 insns = get_insns ();
1994 end_sequence ();
1995 emit_insn (insns);
1996
1997 return target;
1998 }
1999
2000 /* Expand a call to the builtin sin and cos math functions.
2001 Return 0 if a normal call should be emitted rather than expanding the
2002 function in-line. EXP is the expression that is a call to the builtin
2003 function; if convenient, the result should be placed in TARGET.
2004 SUBTARGET may be used as the target for computing one of EXP's
2005 operands. */
2006
2007 static rtx
2008 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2009 {
2010 optab builtin_optab;
2011 rtx op0, insns;
2012 tree fndecl = get_callee_fndecl (exp);
2013 tree arglist = TREE_OPERAND (exp, 1);
2014 enum machine_mode mode;
2015 bool errno_set = false;
2016 tree arg, narg;
2017
2018 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2019 return 0;
2020
2021 arg = TREE_VALUE (arglist);
2022
2023 switch (DECL_FUNCTION_CODE (fndecl))
2024 {
2025 case BUILT_IN_SIN:
2026 case BUILT_IN_SINF:
2027 case BUILT_IN_SINL:
2028 case BUILT_IN_COS:
2029 case BUILT_IN_COSF:
2030 case BUILT_IN_COSL:
2031 builtin_optab = sincos_optab; break;
2032 default:
2033 gcc_unreachable ();
2034 }
2035
2036 /* Make a suitable register to place result in. */
2037 mode = TYPE_MODE (TREE_TYPE (exp));
2038
2039 if (! flag_errno_math || ! HONOR_NANS (mode))
2040 errno_set = false;
2041
2042 /* Check if sincos insn is available, otherwise fallback
2043 to sin or cos insn. */
2044 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
2045 switch (DECL_FUNCTION_CODE (fndecl))
2046 {
2047 case BUILT_IN_SIN:
2048 case BUILT_IN_SINF:
2049 case BUILT_IN_SINL:
2050 builtin_optab = sin_optab; break;
2051 case BUILT_IN_COS:
2052 case BUILT_IN_COSF:
2053 case BUILT_IN_COSL:
2054 builtin_optab = cos_optab; break;
2055 default:
2056 gcc_unreachable ();
2057 }
2058 }
2059
2060 /* Before working hard, check whether the instruction is available. */
2061 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2062 {
2063 target = gen_reg_rtx (mode);
2064
2065 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2066 need to expand the argument again. This way, we will not perform
2067 side-effects more the once. */
2068 narg = save_expr (arg);
2069 if (narg != arg)
2070 {
2071 arg = narg;
2072 arglist = build_tree_list (NULL_TREE, arg);
2073 exp = build_function_call_expr (fndecl, arglist);
2074 }
2075
2076 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2077
2078 start_sequence ();
2079
2080 /* Compute into TARGET.
2081 Set TARGET to wherever the result comes back. */
2082 if (builtin_optab == sincos_optab)
2083 {
2084 int result;
2085
2086 switch (DECL_FUNCTION_CODE (fndecl))
2087 {
2088 case BUILT_IN_SIN:
2089 case BUILT_IN_SINF:
2090 case BUILT_IN_SINL:
2091 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2092 break;
2093 case BUILT_IN_COS:
2094 case BUILT_IN_COSF:
2095 case BUILT_IN_COSL:
2096 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2097 break;
2098 default:
2099 gcc_unreachable ();
2100 }
2101 gcc_assert (result);
2102 }
2103 else
2104 {
2105 target = expand_unop (mode, builtin_optab, op0, target, 0);
2106 }
2107
2108 if (target != 0)
2109 {
2110 if (errno_set)
2111 expand_errno_check (exp, target);
2112
2113 /* Output the entire sequence. */
2114 insns = get_insns ();
2115 end_sequence ();
2116 emit_insn (insns);
2117 return target;
2118 }
2119
2120 /* If we were unable to expand via the builtin, stop the sequence
2121 (without outputting the insns) and call to the library function
2122 with the stabilized argument list. */
2123 end_sequence ();
2124 }
2125
2126 target = expand_call (exp, target, target == const0_rtx);
2127
2128 return target;
2129 }
2130
2131 /* Expand a call to one of the builtin rounding functions (lfloor).
2132 If expanding via optab fails, lower expression to (int)(floor(x)).
2133 EXP is the expression that is a call to the builtin function;
2134 if convenient, the result should be placed in TARGET. SUBTARGET may
2135 be used as the target for computing one of EXP's operands. */
2136
2137 static rtx
2138 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2139 {
2140 optab builtin_optab;
2141 rtx op0, insns, tmp;
2142 tree fndecl = get_callee_fndecl (exp);
2143 tree arglist = TREE_OPERAND (exp, 1);
2144 enum built_in_function fallback_fn;
2145 tree fallback_fndecl;
2146 enum machine_mode mode;
2147 tree arg, narg;
2148
2149 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2150 gcc_unreachable ();
2151
2152 arg = TREE_VALUE (arglist);
2153
2154 switch (DECL_FUNCTION_CODE (fndecl))
2155 {
2156 case BUILT_IN_LCEIL:
2157 case BUILT_IN_LCEILF:
2158 case BUILT_IN_LCEILL:
2159 case BUILT_IN_LLCEIL:
2160 case BUILT_IN_LLCEILF:
2161 case BUILT_IN_LLCEILL:
2162 builtin_optab = lceil_optab;
2163 fallback_fn = BUILT_IN_CEIL;
2164 break;
2165
2166 case BUILT_IN_LFLOOR:
2167 case BUILT_IN_LFLOORF:
2168 case BUILT_IN_LFLOORL:
2169 case BUILT_IN_LLFLOOR:
2170 case BUILT_IN_LLFLOORF:
2171 case BUILT_IN_LLFLOORL:
2172 builtin_optab = lfloor_optab;
2173 fallback_fn = BUILT_IN_FLOOR;
2174 break;
2175
2176 default:
2177 gcc_unreachable ();
2178 }
2179
2180 /* Make a suitable register to place result in. */
2181 mode = TYPE_MODE (TREE_TYPE (exp));
2182
2183 /* Before working hard, check whether the instruction is available. */
2184 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2185 {
2186 target = gen_reg_rtx (mode);
2187
2188 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2189 need to expand the argument again. This way, we will not perform
2190 side-effects more the once. */
2191 narg = builtin_save_expr (arg);
2192 if (narg != arg)
2193 {
2194 arg = narg;
2195 arglist = build_tree_list (NULL_TREE, arg);
2196 exp = build_function_call_expr (fndecl, arglist);
2197 }
2198
2199 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2200
2201 start_sequence ();
2202
2203 /* Compute into TARGET.
2204 Set TARGET to wherever the result comes back. */
2205 target = expand_unop (mode, builtin_optab, op0, target, 0);
2206
2207 if (target != 0)
2208 {
2209 /* Output the entire sequence. */
2210 insns = get_insns ();
2211 end_sequence ();
2212 emit_insn (insns);
2213 return target;
2214 }
2215
2216 /* If we were unable to expand via the builtin, stop the sequence
2217 (without outputting the insns). */
2218 end_sequence ();
2219 }
2220
2221 /* Fall back to floating point rounding optab. */
2222 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2223 /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
2224 ??? Perhaps convert (int)floorf(x) into (int)floor((double)x). */
2225 gcc_assert (fallback_fndecl != NULL_TREE);
2226 exp = build_function_call_expr (fallback_fndecl, arglist);
2227
2228 tmp = expand_builtin_mathfn (exp, NULL_RTX, NULL_RTX);
2229
2230 /* Truncate the result of floating point optab to integer
2231 via expand_fix (). */
2232 target = gen_reg_rtx (mode);
2233 expand_fix (target, tmp, 0);
2234
2235 return target;
2236 }
2237
2238 /* To evaluate powi(x,n), the floating point value x raised to the
2239 constant integer exponent n, we use a hybrid algorithm that
2240 combines the "window method" with look-up tables. For an
2241 introduction to exponentiation algorithms and "addition chains",
2242 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2243 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2244 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2245 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2246
2247 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2248 multiplications to inline before calling the system library's pow
2249 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2250 so this default never requires calling pow, powf or powl. */
2251
2252 #ifndef POWI_MAX_MULTS
2253 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2254 #endif
2255
2256 /* The size of the "optimal power tree" lookup table. All
2257 exponents less than this value are simply looked up in the
2258 powi_table below. This threshold is also used to size the
2259 cache of pseudo registers that hold intermediate results. */
2260 #define POWI_TABLE_SIZE 256
2261
2262 /* The size, in bits of the window, used in the "window method"
2263 exponentiation algorithm. This is equivalent to a radix of
2264 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2265 #define POWI_WINDOW_SIZE 3
2266
2267 /* The following table is an efficient representation of an
2268 "optimal power tree". For each value, i, the corresponding
2269 value, j, in the table states than an optimal evaluation
2270 sequence for calculating pow(x,i) can be found by evaluating
2271 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2272 100 integers is given in Knuth's "Seminumerical algorithms". */
2273
2274 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2275 {
2276 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2277 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2278 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2279 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2280 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2281 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2282 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2283 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2284 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2285 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2286 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2287 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2288 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2289 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2290 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2291 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2292 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2293 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2294 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2295 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2296 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2297 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2298 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2299 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2300 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2301 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2302 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2303 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2304 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2305 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2306 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2307 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2308 };
2309
2310
2311 /* Return the number of multiplications required to calculate
2312 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2313 subroutine of powi_cost. CACHE is an array indicating
2314 which exponents have already been calculated. */
2315
2316 static int
2317 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2318 {
2319 /* If we've already calculated this exponent, then this evaluation
2320 doesn't require any additional multiplications. */
2321 if (cache[n])
2322 return 0;
2323
2324 cache[n] = true;
2325 return powi_lookup_cost (n - powi_table[n], cache)
2326 + powi_lookup_cost (powi_table[n], cache) + 1;
2327 }
2328
2329 /* Return the number of multiplications required to calculate
2330 powi(x,n) for an arbitrary x, given the exponent N. This
2331 function needs to be kept in sync with expand_powi below. */
2332
2333 static int
2334 powi_cost (HOST_WIDE_INT n)
2335 {
2336 bool cache[POWI_TABLE_SIZE];
2337 unsigned HOST_WIDE_INT digit;
2338 unsigned HOST_WIDE_INT val;
2339 int result;
2340
2341 if (n == 0)
2342 return 0;
2343
2344 /* Ignore the reciprocal when calculating the cost. */
2345 val = (n < 0) ? -n : n;
2346
2347 /* Initialize the exponent cache. */
2348 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2349 cache[1] = true;
2350
2351 result = 0;
2352
2353 while (val >= POWI_TABLE_SIZE)
2354 {
2355 if (val & 1)
2356 {
2357 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2358 result += powi_lookup_cost (digit, cache)
2359 + POWI_WINDOW_SIZE + 1;
2360 val >>= POWI_WINDOW_SIZE;
2361 }
2362 else
2363 {
2364 val >>= 1;
2365 result++;
2366 }
2367 }
2368
2369 return result + powi_lookup_cost (val, cache);
2370 }
2371
2372 /* Recursive subroutine of expand_powi. This function takes the array,
2373 CACHE, of already calculated exponents and an exponent N and returns
2374 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2375
2376 static rtx
2377 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2378 {
2379 unsigned HOST_WIDE_INT digit;
2380 rtx target, result;
2381 rtx op0, op1;
2382
2383 if (n < POWI_TABLE_SIZE)
2384 {
2385 if (cache[n])
2386 return cache[n];
2387
2388 target = gen_reg_rtx (mode);
2389 cache[n] = target;
2390
2391 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2392 op1 = expand_powi_1 (mode, powi_table[n], cache);
2393 }
2394 else if (n & 1)
2395 {
2396 target = gen_reg_rtx (mode);
2397 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2398 op0 = expand_powi_1 (mode, n - digit, cache);
2399 op1 = expand_powi_1 (mode, digit, cache);
2400 }
2401 else
2402 {
2403 target = gen_reg_rtx (mode);
2404 op0 = expand_powi_1 (mode, n >> 1, cache);
2405 op1 = op0;
2406 }
2407
2408 result = expand_mult (mode, op0, op1, target, 0);
2409 if (result != target)
2410 emit_move_insn (target, result);
2411 return target;
2412 }
2413
2414 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2415 floating point operand in mode MODE, and N is the exponent. This
2416 function needs to be kept in sync with powi_cost above. */
2417
2418 static rtx
2419 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2420 {
2421 unsigned HOST_WIDE_INT val;
2422 rtx cache[POWI_TABLE_SIZE];
2423 rtx result;
2424
2425 if (n == 0)
2426 return CONST1_RTX (mode);
2427
2428 val = (n < 0) ? -n : n;
2429
2430 memset (cache, 0, sizeof (cache));
2431 cache[1] = x;
2432
2433 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2434
2435 /* If the original exponent was negative, reciprocate the result. */
2436 if (n < 0)
2437 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2438 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2439
2440 return result;
2441 }
2442
2443 /* Expand a call to the pow built-in mathematical function. Return 0 if
2444 a normal call should be emitted rather than expanding the function
2445 in-line. EXP is the expression that is a call to the builtin
2446 function; if convenient, the result should be placed in TARGET. */
2447
2448 static rtx
2449 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2450 {
2451 tree arglist = TREE_OPERAND (exp, 1);
2452 tree arg0, arg1;
2453
2454 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2455 return 0;
2456
2457 arg0 = TREE_VALUE (arglist);
2458 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2459
2460 if (TREE_CODE (arg1) == REAL_CST
2461 && ! TREE_CONSTANT_OVERFLOW (arg1))
2462 {
2463 REAL_VALUE_TYPE cint;
2464 REAL_VALUE_TYPE c;
2465 HOST_WIDE_INT n;
2466
2467 c = TREE_REAL_CST (arg1);
2468 n = real_to_integer (&c);
2469 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2470 if (real_identical (&c, &cint))
2471 {
2472 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2473 Otherwise, check the number of multiplications required.
2474 Note that pow never sets errno for an integer exponent. */
2475 if ((n >= -1 && n <= 2)
2476 || (flag_unsafe_math_optimizations
2477 && ! optimize_size
2478 && powi_cost (n) <= POWI_MAX_MULTS))
2479 {
2480 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2481 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2482 op = force_reg (mode, op);
2483 return expand_powi (op, mode, n);
2484 }
2485 }
2486 }
2487
2488 if (! flag_unsafe_math_optimizations)
2489 return NULL_RTX;
2490 return expand_builtin_mathfn_2 (exp, target, subtarget);
2491 }
2492
2493 /* Expand a call to the powi built-in mathematical function. Return 0 if
2494 a normal call should be emitted rather than expanding the function
2495 in-line. EXP is the expression that is a call to the builtin
2496 function; if convenient, the result should be placed in TARGET. */
2497
2498 static rtx
2499 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2500 {
2501 tree arglist = TREE_OPERAND (exp, 1);
2502 tree arg0, arg1;
2503 rtx op0, op1;
2504 enum machine_mode mode;
2505 enum machine_mode mode2;
2506
2507 if (! validate_arglist (arglist, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2508 return 0;
2509
2510 arg0 = TREE_VALUE (arglist);
2511 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2512 mode = TYPE_MODE (TREE_TYPE (exp));
2513
2514 /* Handle constant power. */
2515
2516 if (TREE_CODE (arg1) == INTEGER_CST
2517 && ! TREE_CONSTANT_OVERFLOW (arg1))
2518 {
2519 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
2520
2521 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2522 Otherwise, check the number of multiplications required. */
2523 if ((TREE_INT_CST_HIGH (arg1) == 0
2524 || TREE_INT_CST_HIGH (arg1) == -1)
2525 && ((n >= -1 && n <= 2)
2526 || (! optimize_size
2527 && powi_cost (n) <= POWI_MAX_MULTS)))
2528 {
2529 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
2530 op0 = force_reg (mode, op0);
2531 return expand_powi (op0, mode, n);
2532 }
2533 }
2534
2535 /* Emit a libcall to libgcc. */
2536
2537 /* Mode of the 2nd argument must match that of an int. */
2538 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2539
2540 if (target == NULL_RTX)
2541 target = gen_reg_rtx (mode);
2542
2543 op0 = expand_expr (arg0, subtarget, mode, 0);
2544 if (GET_MODE (op0) != mode)
2545 op0 = convert_to_mode (mode, op0, 0);
2546 op1 = expand_expr (arg1, 0, mode2, 0);
2547 if (GET_MODE (op1) != mode2)
2548 op1 = convert_to_mode (mode2, op1, 0);
2549
2550 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
2551 target, LCT_CONST_MAKE_BLOCK, mode, 2,
2552 op0, mode, op1, mode2);
2553
2554 return target;
2555 }
2556
2557 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2558 if we failed the caller should emit a normal call, otherwise
2559 try to get the result in TARGET, if convenient. */
2560
2561 static rtx
2562 expand_builtin_strlen (tree arglist, rtx target,
2563 enum machine_mode target_mode)
2564 {
2565 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2566 return 0;
2567 else
2568 {
2569 rtx pat;
2570 tree len, src = TREE_VALUE (arglist);
2571 rtx result, src_reg, char_rtx, before_strlen;
2572 enum machine_mode insn_mode = target_mode, char_mode;
2573 enum insn_code icode = CODE_FOR_nothing;
2574 int align;
2575
2576 /* If the length can be computed at compile-time, return it. */
2577 len = c_strlen (src, 0);
2578 if (len)
2579 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2580
2581 /* If the length can be computed at compile-time and is constant
2582 integer, but there are side-effects in src, evaluate
2583 src for side-effects, then return len.
2584 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2585 can be optimized into: i++; x = 3; */
2586 len = c_strlen (src, 1);
2587 if (len && TREE_CODE (len) == INTEGER_CST)
2588 {
2589 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2590 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2591 }
2592
2593 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2594
2595 /* If SRC is not a pointer type, don't do this operation inline. */
2596 if (align == 0)
2597 return 0;
2598
2599 /* Bail out if we can't compute strlen in the right mode. */
2600 while (insn_mode != VOIDmode)
2601 {
2602 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2603 if (icode != CODE_FOR_nothing)
2604 break;
2605
2606 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2607 }
2608 if (insn_mode == VOIDmode)
2609 return 0;
2610
2611 /* Make a place to write the result of the instruction. */
2612 result = target;
2613 if (! (result != 0
2614 && REG_P (result)
2615 && GET_MODE (result) == insn_mode
2616 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2617 result = gen_reg_rtx (insn_mode);
2618
2619 /* Make a place to hold the source address. We will not expand
2620 the actual source until we are sure that the expansion will
2621 not fail -- there are trees that cannot be expanded twice. */
2622 src_reg = gen_reg_rtx (Pmode);
2623
2624 /* Mark the beginning of the strlen sequence so we can emit the
2625 source operand later. */
2626 before_strlen = get_last_insn ();
2627
2628 char_rtx = const0_rtx;
2629 char_mode = insn_data[(int) icode].operand[2].mode;
2630 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2631 char_mode))
2632 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2633
2634 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2635 char_rtx, GEN_INT (align));
2636 if (! pat)
2637 return 0;
2638 emit_insn (pat);
2639
2640 /* Now that we are assured of success, expand the source. */
2641 start_sequence ();
2642 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
2643 if (pat != src_reg)
2644 emit_move_insn (src_reg, pat);
2645 pat = get_insns ();
2646 end_sequence ();
2647
2648 if (before_strlen)
2649 emit_insn_after (pat, before_strlen);
2650 else
2651 emit_insn_before (pat, get_insns ());
2652
2653 /* Return the value in the proper mode for this function. */
2654 if (GET_MODE (result) == target_mode)
2655 target = result;
2656 else if (target != 0)
2657 convert_move (target, result, 0);
2658 else
2659 target = convert_to_mode (target_mode, result, 0);
2660
2661 return target;
2662 }
2663 }
2664
2665 /* Expand a call to the strstr builtin. Return 0 if we failed the
2666 caller should emit a normal call, otherwise try to get the result
2667 in TARGET, if convenient (and in mode MODE if that's convenient). */
2668
2669 static rtx
2670 expand_builtin_strstr (tree arglist, tree type, rtx target, enum machine_mode mode)
2671 {
2672 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2673 {
2674 tree result = fold_builtin_strstr (arglist, type);
2675 if (result)
2676 return expand_expr (result, target, mode, EXPAND_NORMAL);
2677 }
2678 return 0;
2679 }
2680
2681 /* Expand a call to the strchr builtin. Return 0 if we failed the
2682 caller should emit a normal call, otherwise try to get the result
2683 in TARGET, if convenient (and in mode MODE if that's convenient). */
2684
2685 static rtx
2686 expand_builtin_strchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2687 {
2688 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2689 {
2690 tree result = fold_builtin_strchr (arglist, type);
2691 if (result)
2692 return expand_expr (result, target, mode, EXPAND_NORMAL);
2693
2694 /* FIXME: Should use strchrM optab so that ports can optimize this. */
2695 }
2696 return 0;
2697 }
2698
2699 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2700 caller should emit a normal call, otherwise try to get the result
2701 in TARGET, if convenient (and in mode MODE if that's convenient). */
2702
2703 static rtx
2704 expand_builtin_strrchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2705 {
2706 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2707 {
2708 tree result = fold_builtin_strrchr (arglist, type);
2709 if (result)
2710 return expand_expr (result, target, mode, EXPAND_NORMAL);
2711 }
2712 return 0;
2713 }
2714
2715 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2716 caller should emit a normal call, otherwise try to get the result
2717 in TARGET, if convenient (and in mode MODE if that's convenient). */
2718
2719 static rtx
2720 expand_builtin_strpbrk (tree arglist, tree type, rtx target, enum machine_mode mode)
2721 {
2722 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2723 {
2724 tree result = fold_builtin_strpbrk (arglist, type);
2725 if (result)
2726 return expand_expr (result, target, mode, EXPAND_NORMAL);
2727 }
2728 return 0;
2729 }
2730
2731 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2732 bytes from constant string DATA + OFFSET and return it as target
2733 constant. */
2734
2735 static rtx
2736 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2737 enum machine_mode mode)
2738 {
2739 const char *str = (const char *) data;
2740
2741 gcc_assert (offset >= 0
2742 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2743 <= strlen (str) + 1));
2744
2745 return c_readstr (str + offset, mode);
2746 }
2747
2748 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2749 Return 0 if we failed, the caller should emit a normal call,
2750 otherwise try to get the result in TARGET, if convenient (and in
2751 mode MODE if that's convenient). */
2752 static rtx
2753 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
2754 {
2755 tree fndecl = get_callee_fndecl (exp);
2756 tree arglist = TREE_OPERAND (exp, 1);
2757 if (!validate_arglist (arglist,
2758 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2759 return 0;
2760 else
2761 {
2762 tree dest = TREE_VALUE (arglist);
2763 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2764 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2765 const char *src_str;
2766 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2767 unsigned int dest_align
2768 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2769 rtx dest_mem, src_mem, dest_addr, len_rtx;
2770 tree result = fold_builtin_memcpy (fndecl, arglist);
2771
2772 if (result)
2773 return expand_expr (result, target, mode, EXPAND_NORMAL);
2774
2775 /* If DEST is not a pointer type, call the normal function. */
2776 if (dest_align == 0)
2777 return 0;
2778
2779 /* If either SRC is not a pointer type, don't do this
2780 operation in-line. */
2781 if (src_align == 0)
2782 return 0;
2783
2784 dest_mem = get_memory_rtx (dest);
2785 set_mem_align (dest_mem, dest_align);
2786 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2787 src_str = c_getstr (src);
2788
2789 /* If SRC is a string constant and block move would be done
2790 by pieces, we can avoid loading the string from memory
2791 and only stored the computed constants. */
2792 if (src_str
2793 && GET_CODE (len_rtx) == CONST_INT
2794 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2795 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2796 (void *) src_str, dest_align))
2797 {
2798 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2799 builtin_memcpy_read_str,
2800 (void *) src_str, dest_align, 0);
2801 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2802 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2803 return dest_mem;
2804 }
2805
2806 src_mem = get_memory_rtx (src);
2807 set_mem_align (src_mem, src_align);
2808
2809 /* Copy word part most expediently. */
2810 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2811 CALL_EXPR_TAILCALL (exp)
2812 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
2813
2814 if (dest_addr == 0)
2815 {
2816 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2817 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2818 }
2819 return dest_addr;
2820 }
2821 }
2822
2823 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2824 Return 0 if we failed the caller should emit a normal call,
2825 otherwise try to get the result in TARGET, if convenient (and in
2826 mode MODE if that's convenient). If ENDP is 0 return the
2827 destination pointer, if ENDP is 1 return the end pointer ala
2828 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2829 stpcpy. */
2830
2831 static rtx
2832 expand_builtin_mempcpy (tree arglist, tree type, rtx target, enum machine_mode mode,
2833 int endp)
2834 {
2835 if (!validate_arglist (arglist,
2836 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2837 return 0;
2838 /* If return value is ignored, transform mempcpy into memcpy. */
2839 else if (target == const0_rtx)
2840 {
2841 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2842
2843 if (!fn)
2844 return 0;
2845
2846 return expand_expr (build_function_call_expr (fn, arglist),
2847 target, mode, EXPAND_NORMAL);
2848 }
2849 else
2850 {
2851 tree dest = TREE_VALUE (arglist);
2852 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2853 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2854 const char *src_str;
2855 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2856 unsigned int dest_align
2857 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2858 rtx dest_mem, src_mem, len_rtx;
2859 tree result = fold_builtin_mempcpy (arglist, type, endp);
2860
2861 if (result)
2862 return expand_expr (result, target, mode, EXPAND_NORMAL);
2863
2864 /* If either SRC or DEST is not a pointer type, don't do this
2865 operation in-line. */
2866 if (dest_align == 0 || src_align == 0)
2867 return 0;
2868
2869 /* If LEN is not constant, call the normal function. */
2870 if (! host_integerp (len, 1))
2871 return 0;
2872
2873 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2874 src_str = c_getstr (src);
2875
2876 /* If SRC is a string constant and block move would be done
2877 by pieces, we can avoid loading the string from memory
2878 and only stored the computed constants. */
2879 if (src_str
2880 && GET_CODE (len_rtx) == CONST_INT
2881 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2882 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2883 (void *) src_str, dest_align))
2884 {
2885 dest_mem = get_memory_rtx (dest);
2886 set_mem_align (dest_mem, dest_align);
2887 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2888 builtin_memcpy_read_str,
2889 (void *) src_str, dest_align, endp);
2890 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2891 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2892 return dest_mem;
2893 }
2894
2895 if (GET_CODE (len_rtx) == CONST_INT
2896 && can_move_by_pieces (INTVAL (len_rtx),
2897 MIN (dest_align, src_align)))
2898 {
2899 dest_mem = get_memory_rtx (dest);
2900 set_mem_align (dest_mem, dest_align);
2901 src_mem = get_memory_rtx (src);
2902 set_mem_align (src_mem, src_align);
2903 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2904 MIN (dest_align, src_align), endp);
2905 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2906 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2907 return dest_mem;
2908 }
2909
2910 return 0;
2911 }
2912 }
2913
2914 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2915 if we failed the caller should emit a normal call. */
2916
2917 static rtx
2918 expand_builtin_memmove (tree arglist, tree type, rtx target,
2919 enum machine_mode mode, tree orig_exp)
2920 {
2921 if (!validate_arglist (arglist,
2922 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2923 return 0;
2924 else
2925 {
2926 tree dest = TREE_VALUE (arglist);
2927 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2928 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2929
2930 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2931 unsigned int dest_align
2932 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2933 tree result = fold_builtin_memmove (arglist, type);
2934
2935 if (result)
2936 return expand_expr (result, target, mode, EXPAND_NORMAL);
2937
2938 /* If DEST is not a pointer type, call the normal function. */
2939 if (dest_align == 0)
2940 return 0;
2941
2942 /* If either SRC is not a pointer type, don't do this
2943 operation in-line. */
2944 if (src_align == 0)
2945 return 0;
2946
2947 /* If src is categorized for a readonly section we can use
2948 normal memcpy. */
2949 if (readonly_data_expr (src))
2950 {
2951 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2952 if (!fn)
2953 return 0;
2954 fn = build_function_call_expr (fn, arglist);
2955 if (TREE_CODE (fn) == CALL_EXPR)
2956 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
2957 return expand_expr (fn, target, mode, EXPAND_NORMAL);
2958 }
2959
2960 /* If length is 1 and we can expand memcpy call inline,
2961 it is ok to use memcpy as well. */
2962 if (integer_onep (len))
2963 {
2964 rtx ret = expand_builtin_mempcpy (arglist, type, target, mode,
2965 /*endp=*/0);
2966 if (ret)
2967 return ret;
2968 }
2969
2970 /* Otherwise, call the normal function. */
2971 return 0;
2972 }
2973 }
2974
2975 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2976 if we failed the caller should emit a normal call. */
2977
2978 static rtx
2979 expand_builtin_bcopy (tree exp)
2980 {
2981 tree arglist = TREE_OPERAND (exp, 1);
2982 tree type = TREE_TYPE (exp);
2983 tree src, dest, size, newarglist;
2984
2985 if (!validate_arglist (arglist,
2986 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2987 return NULL_RTX;
2988
2989 src = TREE_VALUE (arglist);
2990 dest = TREE_VALUE (TREE_CHAIN (arglist));
2991 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2992
2993 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2994 memmove(ptr y, ptr x, size_t z). This is done this way
2995 so that if it isn't expanded inline, we fallback to
2996 calling bcopy instead of memmove. */
2997
2998 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
2999 newarglist = tree_cons (NULL_TREE, src, newarglist);
3000 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3001
3002 return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode, exp);
3003 }
3004
3005 #ifndef HAVE_movstr
3006 # define HAVE_movstr 0
3007 # define CODE_FOR_movstr CODE_FOR_nothing
3008 #endif
3009
3010 /* Expand into a movstr instruction, if one is available. Return 0 if
3011 we failed, the caller should emit a normal call, otherwise try to
3012 get the result in TARGET, if convenient. If ENDP is 0 return the
3013 destination pointer, if ENDP is 1 return the end pointer ala
3014 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3015 stpcpy. */
3016
3017 static rtx
3018 expand_movstr (tree dest, tree src, rtx target, int endp)
3019 {
3020 rtx end;
3021 rtx dest_mem;
3022 rtx src_mem;
3023 rtx insn;
3024 const struct insn_data * data;
3025
3026 if (!HAVE_movstr)
3027 return 0;
3028
3029 dest_mem = get_memory_rtx (dest);
3030 src_mem = get_memory_rtx (src);
3031 if (!endp)
3032 {
3033 target = force_reg (Pmode, XEXP (dest_mem, 0));
3034 dest_mem = replace_equiv_address (dest_mem, target);
3035 end = gen_reg_rtx (Pmode);
3036 }
3037 else
3038 {
3039 if (target == 0 || target == const0_rtx)
3040 {
3041 end = gen_reg_rtx (Pmode);
3042 if (target == 0)
3043 target = end;
3044 }
3045 else
3046 end = target;
3047 }
3048
3049 data = insn_data + CODE_FOR_movstr;
3050
3051 if (data->operand[0].mode != VOIDmode)
3052 end = gen_lowpart (data->operand[0].mode, end);
3053
3054 insn = data->genfun (end, dest_mem, src_mem);
3055
3056 gcc_assert (insn);
3057
3058 emit_insn (insn);
3059
3060 /* movstr is supposed to set end to the address of the NUL
3061 terminator. If the caller requested a mempcpy-like return value,
3062 adjust it. */
3063 if (endp == 1 && target != const0_rtx)
3064 {
3065 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3066 emit_move_insn (target, force_operand (tem, NULL_RTX));
3067 }
3068
3069 return target;
3070 }
3071
3072 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
3073 if we failed the caller should emit a normal call, otherwise try to get
3074 the result in TARGET, if convenient (and in mode MODE if that's
3075 convenient). */
3076
3077 static rtx
3078 expand_builtin_strcpy (tree exp, rtx target, enum machine_mode mode)
3079 {
3080 tree fndecl = get_callee_fndecl (exp);
3081 tree arglist = TREE_OPERAND (exp, 1);
3082 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3083 {
3084 tree result = fold_builtin_strcpy (fndecl, arglist, 0);
3085 if (result)
3086 return expand_expr (result, target, mode, EXPAND_NORMAL);
3087
3088 return expand_movstr (TREE_VALUE (arglist),
3089 TREE_VALUE (TREE_CHAIN (arglist)),
3090 target, /*endp=*/0);
3091 }
3092 return 0;
3093 }
3094
3095 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
3096 Return 0 if we failed the caller should emit a normal call,
3097 otherwise try to get the result in TARGET, if convenient (and in
3098 mode MODE if that's convenient). */
3099
3100 static rtx
3101 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3102 {
3103 tree arglist = TREE_OPERAND (exp, 1);
3104 /* If return value is ignored, transform stpcpy into strcpy. */
3105 if (target == const0_rtx)
3106 {
3107 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3108 if (!fn)
3109 return 0;
3110
3111 return expand_expr (build_function_call_expr (fn, arglist),
3112 target, mode, EXPAND_NORMAL);
3113 }
3114
3115 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3116 return 0;
3117 else
3118 {
3119 tree dst, src, len, lenp1;
3120 tree narglist;
3121 rtx ret;
3122
3123 /* Ensure we get an actual string whose length can be evaluated at
3124 compile-time, not an expression containing a string. This is
3125 because the latter will potentially produce pessimized code
3126 when used to produce the return value. */
3127 src = TREE_VALUE (TREE_CHAIN (arglist));
3128 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3129 return expand_movstr (TREE_VALUE (arglist),
3130 TREE_VALUE (TREE_CHAIN (arglist)),
3131 target, /*endp=*/2);
3132
3133 dst = TREE_VALUE (arglist);
3134 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3135 narglist = build_tree_list (NULL_TREE, lenp1);
3136 narglist = tree_cons (NULL_TREE, src, narglist);
3137 narglist = tree_cons (NULL_TREE, dst, narglist);
3138 ret = expand_builtin_mempcpy (narglist, TREE_TYPE (exp),
3139 target, mode, /*endp=*/2);
3140
3141 if (ret)
3142 return ret;
3143
3144 if (TREE_CODE (len) == INTEGER_CST)
3145 {
3146 rtx len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3147
3148 if (GET_CODE (len_rtx) == CONST_INT)
3149 {
3150 ret = expand_builtin_strcpy (exp, target, mode);
3151
3152 if (ret)
3153 {
3154 if (! target)
3155 {
3156 if (mode != VOIDmode)
3157 target = gen_reg_rtx (mode);
3158 else
3159 target = gen_reg_rtx (GET_MODE (ret));
3160 }
3161 if (GET_MODE (target) != GET_MODE (ret))
3162 ret = gen_lowpart (GET_MODE (target), ret);
3163
3164 ret = plus_constant (ret, INTVAL (len_rtx));
3165 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3166 gcc_assert (ret);
3167
3168 return target;
3169 }
3170 }
3171 }
3172
3173 return expand_movstr (TREE_VALUE (arglist),
3174 TREE_VALUE (TREE_CHAIN (arglist)),
3175 target, /*endp=*/2);
3176 }
3177 }
3178
3179 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3180 bytes from constant string DATA + OFFSET and return it as target
3181 constant. */
3182
3183 static rtx
3184 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3185 enum machine_mode mode)
3186 {
3187 const char *str = (const char *) data;
3188
3189 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3190 return const0_rtx;
3191
3192 return c_readstr (str + offset, mode);
3193 }
3194
3195 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
3196 if we failed the caller should emit a normal call. */
3197
3198 static rtx
3199 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3200 {
3201 tree fndecl = get_callee_fndecl (exp);
3202 tree arglist = TREE_OPERAND (exp, 1);
3203 if (validate_arglist (arglist,
3204 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3205 {
3206 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
3207 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3208 tree result = fold_builtin_strncpy (fndecl, arglist, slen);
3209
3210 if (result)
3211 return expand_expr (result, target, mode, EXPAND_NORMAL);
3212
3213 /* We must be passed a constant len and src parameter. */
3214 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3215 return 0;
3216
3217 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3218
3219 /* We're required to pad with trailing zeros if the requested
3220 len is greater than strlen(s2)+1. In that case try to
3221 use store_by_pieces, if it fails, punt. */
3222 if (tree_int_cst_lt (slen, len))
3223 {
3224 tree dest = TREE_VALUE (arglist);
3225 unsigned int dest_align
3226 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3227 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
3228 rtx dest_mem;
3229
3230 if (!p || dest_align == 0 || !host_integerp (len, 1)
3231 || !can_store_by_pieces (tree_low_cst (len, 1),
3232 builtin_strncpy_read_str,
3233 (void *) p, dest_align))
3234 return 0;
3235
3236 dest_mem = get_memory_rtx (dest);
3237 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3238 builtin_strncpy_read_str,
3239 (void *) p, dest_align, 0);
3240 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3241 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3242 return dest_mem;
3243 }
3244 }
3245 return 0;
3246 }
3247
3248 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3249 bytes from constant string DATA + OFFSET and return it as target
3250 constant. */
3251
3252 static rtx
3253 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3254 enum machine_mode mode)
3255 {
3256 const char *c = (const char *) data;
3257 char *p = alloca (GET_MODE_SIZE (mode));
3258
3259 memset (p, *c, GET_MODE_SIZE (mode));
3260
3261 return c_readstr (p, mode);
3262 }
3263
3264 /* Callback routine for store_by_pieces. Return the RTL of a register
3265 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3266 char value given in the RTL register data. For example, if mode is
3267 4 bytes wide, return the RTL for 0x01010101*data. */
3268
3269 static rtx
3270 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3271 enum machine_mode mode)
3272 {
3273 rtx target, coeff;
3274 size_t size;
3275 char *p;
3276
3277 size = GET_MODE_SIZE (mode);
3278 if (size == 1)
3279 return (rtx) data;
3280
3281 p = alloca (size);
3282 memset (p, 1, size);
3283 coeff = c_readstr (p, mode);
3284
3285 target = convert_to_mode (mode, (rtx) data, 1);
3286 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3287 return force_reg (mode, target);
3288 }
3289
3290 /* Expand expression EXP, which is a call to the memset builtin. Return 0
3291 if we failed the caller should emit a normal call, otherwise try to get
3292 the result in TARGET, if convenient (and in mode MODE if that's
3293 convenient). */
3294
3295 static rtx
3296 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode,
3297 tree orig_exp)
3298 {
3299 if (!validate_arglist (arglist,
3300 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3301 return 0;
3302 else
3303 {
3304 tree dest = TREE_VALUE (arglist);
3305 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3306 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3307 char c;
3308
3309 unsigned int dest_align
3310 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3311 rtx dest_mem, dest_addr, len_rtx;
3312
3313 /* If DEST is not a pointer type, don't do this
3314 operation in-line. */
3315 if (dest_align == 0)
3316 return 0;
3317
3318 /* If the LEN parameter is zero, return DEST. */
3319 if (integer_zerop (len))
3320 {
3321 /* Evaluate and ignore VAL in case it has side-effects. */
3322 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3323 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3324 }
3325
3326 if (TREE_CODE (val) != INTEGER_CST)
3327 {
3328 rtx val_rtx;
3329
3330 if (!host_integerp (len, 1))
3331 return 0;
3332
3333 if (optimize_size && tree_low_cst (len, 1) > 1)
3334 return 0;
3335
3336 /* Assume that we can memset by pieces if we can store the
3337 * the coefficients by pieces (in the required modes).
3338 * We can't pass builtin_memset_gen_str as that emits RTL. */
3339 c = 1;
3340 if (!can_store_by_pieces (tree_low_cst (len, 1),
3341 builtin_memset_read_str,
3342 &c, dest_align))
3343 return 0;
3344
3345 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3346 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3347 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3348 val_rtx);
3349 dest_mem = get_memory_rtx (dest);
3350 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3351 builtin_memset_gen_str,
3352 val_rtx, dest_align, 0);
3353 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3354 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3355 return dest_mem;
3356 }
3357
3358 if (target_char_cast (val, &c))
3359 return 0;
3360
3361 if (c)
3362 {
3363 if (!host_integerp (len, 1))
3364 return 0;
3365 if (!can_store_by_pieces (tree_low_cst (len, 1),
3366 builtin_memset_read_str, &c,
3367 dest_align))
3368 return 0;
3369
3370 dest_mem = get_memory_rtx (dest);
3371 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3372 builtin_memset_read_str,
3373 &c, dest_align, 0);
3374 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3375 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3376 return dest_mem;
3377 }
3378
3379 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3380
3381 dest_mem = get_memory_rtx (dest);
3382 set_mem_align (dest_mem, dest_align);
3383 dest_addr = clear_storage (dest_mem, len_rtx,
3384 CALL_EXPR_TAILCALL (orig_exp)
3385 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
3386
3387 if (dest_addr == 0)
3388 {
3389 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3390 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3391 }
3392
3393 return dest_addr;
3394 }
3395 }
3396
3397 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3398 if we failed the caller should emit a normal call. */
3399
3400 static rtx
3401 expand_builtin_bzero (tree exp)
3402 {
3403 tree arglist = TREE_OPERAND (exp, 1);
3404 tree dest, size, newarglist;
3405
3406 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3407 return NULL_RTX;
3408
3409 dest = TREE_VALUE (arglist);
3410 size = TREE_VALUE (TREE_CHAIN (arglist));
3411
3412 /* New argument list transforming bzero(ptr x, int y) to
3413 memset(ptr x, int 0, size_t y). This is done this way
3414 so that if it isn't expanded inline, we fallback to
3415 calling bzero instead of memset. */
3416
3417 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3418 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3419 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3420
3421 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode, exp);
3422 }
3423
3424 /* Expand expression EXP, which is a call to the memcmp built-in function.
3425 ARGLIST is the argument list for this call. Return 0 if we failed and the
3426 caller should emit a normal call, otherwise try to get the result in
3427 TARGET, if convenient (and in mode MODE, if that's convenient). */
3428
3429 static rtx
3430 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3431 enum machine_mode mode)
3432 {
3433 if (!validate_arglist (arglist,
3434 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3435 return 0;
3436 else
3437 {
3438 tree result = fold_builtin_memcmp (arglist);
3439 if (result)
3440 return expand_expr (result, target, mode, EXPAND_NORMAL);
3441 }
3442
3443 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrsi
3444 {
3445 tree arg1 = TREE_VALUE (arglist);
3446 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3447 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3448 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3449 rtx result;
3450 rtx insn;
3451
3452 int arg1_align
3453 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3454 int arg2_align
3455 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3456 enum machine_mode insn_mode;
3457
3458 #ifdef HAVE_cmpmemsi
3459 if (HAVE_cmpmemsi)
3460 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3461 else
3462 #endif
3463 #ifdef HAVE_cmpstrsi
3464 if (HAVE_cmpstrsi)
3465 insn_mode = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3466 else
3467 #endif
3468 return 0;
3469
3470 /* If we don't have POINTER_TYPE, call the function. */
3471 if (arg1_align == 0 || arg2_align == 0)
3472 return 0;
3473
3474 /* Make a place to write the result of the instruction. */
3475 result = target;
3476 if (! (result != 0
3477 && REG_P (result) && GET_MODE (result) == insn_mode
3478 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3479 result = gen_reg_rtx (insn_mode);
3480
3481 arg1_rtx = get_memory_rtx (arg1);
3482 arg2_rtx = get_memory_rtx (arg2);
3483 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3484
3485 /* Set MEM_SIZE as appropriate. */
3486 if (GET_CODE (arg3_rtx) == CONST_INT)
3487 {
3488 set_mem_size (arg1_rtx, arg3_rtx);
3489 set_mem_size (arg2_rtx, arg3_rtx);
3490 }
3491
3492 #ifdef HAVE_cmpmemsi
3493 if (HAVE_cmpmemsi)
3494 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3495 GEN_INT (MIN (arg1_align, arg2_align)));
3496 else
3497 #endif
3498 #ifdef HAVE_cmpstrsi
3499 if (HAVE_cmpstrsi)
3500 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3501 GEN_INT (MIN (arg1_align, arg2_align)));
3502 else
3503 #endif
3504 gcc_unreachable ();
3505
3506 if (insn)
3507 emit_insn (insn);
3508 else
3509 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3510 TYPE_MODE (integer_type_node), 3,
3511 XEXP (arg1_rtx, 0), Pmode,
3512 XEXP (arg2_rtx, 0), Pmode,
3513 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3514 TYPE_UNSIGNED (sizetype)),
3515 TYPE_MODE (sizetype));
3516
3517 /* Return the value in the proper mode for this function. */
3518 mode = TYPE_MODE (TREE_TYPE (exp));
3519 if (GET_MODE (result) == mode)
3520 return result;
3521 else if (target != 0)
3522 {
3523 convert_move (target, result, 0);
3524 return target;
3525 }
3526 else
3527 return convert_to_mode (mode, result, 0);
3528 }
3529 #endif
3530
3531 return 0;
3532 }
3533
3534 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3535 if we failed the caller should emit a normal call, otherwise try to get
3536 the result in TARGET, if convenient. */
3537
3538 static rtx
3539 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3540 {
3541 tree arglist = TREE_OPERAND (exp, 1);
3542
3543 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3544 return 0;
3545 else
3546 {
3547 tree result = fold_builtin_strcmp (arglist);
3548 if (result)
3549 return expand_expr (result, target, mode, EXPAND_NORMAL);
3550 }
3551
3552 #ifdef HAVE_cmpstrsi
3553 if (HAVE_cmpstrsi)
3554 {
3555 tree arg1 = TREE_VALUE (arglist);
3556 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3557 tree len, len1, len2;
3558 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3559 rtx result, insn;
3560 tree fndecl, fn;
3561
3562 int arg1_align
3563 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3564 int arg2_align
3565 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3566 enum machine_mode insn_mode
3567 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3568
3569 len1 = c_strlen (arg1, 1);
3570 len2 = c_strlen (arg2, 1);
3571
3572 if (len1)
3573 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3574 if (len2)
3575 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3576
3577 /* If we don't have a constant length for the first, use the length
3578 of the second, if we know it. We don't require a constant for
3579 this case; some cost analysis could be done if both are available
3580 but neither is constant. For now, assume they're equally cheap,
3581 unless one has side effects. If both strings have constant lengths,
3582 use the smaller. */
3583
3584 if (!len1)
3585 len = len2;
3586 else if (!len2)
3587 len = len1;
3588 else if (TREE_SIDE_EFFECTS (len1))
3589 len = len2;
3590 else if (TREE_SIDE_EFFECTS (len2))
3591 len = len1;
3592 else if (TREE_CODE (len1) != INTEGER_CST)
3593 len = len2;
3594 else if (TREE_CODE (len2) != INTEGER_CST)
3595 len = len1;
3596 else if (tree_int_cst_lt (len1, len2))
3597 len = len1;
3598 else
3599 len = len2;
3600
3601 /* If both arguments have side effects, we cannot optimize. */
3602 if (!len || TREE_SIDE_EFFECTS (len))
3603 return 0;
3604
3605 /* If we don't have POINTER_TYPE, call the function. */
3606 if (arg1_align == 0 || arg2_align == 0)
3607 return 0;
3608
3609 /* Make a place to write the result of the instruction. */
3610 result = target;
3611 if (! (result != 0
3612 && REG_P (result) && GET_MODE (result) == insn_mode
3613 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3614 result = gen_reg_rtx (insn_mode);
3615
3616 /* Stabilize the arguments in case gen_cmpstrsi fails. */
3617 arg1 = builtin_save_expr (arg1);
3618 arg2 = builtin_save_expr (arg2);
3619
3620 arg1_rtx = get_memory_rtx (arg1);
3621 arg2_rtx = get_memory_rtx (arg2);
3622 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3623 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3624 GEN_INT (MIN (arg1_align, arg2_align)));
3625 if (insn)
3626 {
3627 emit_insn (insn);
3628
3629 /* Return the value in the proper mode for this function. */
3630 mode = TYPE_MODE (TREE_TYPE (exp));
3631 if (GET_MODE (result) == mode)
3632 return result;
3633 if (target == 0)
3634 return convert_to_mode (mode, result, 0);
3635 convert_move (target, result, 0);
3636 return target;
3637 }
3638
3639 /* Expand the library call ourselves using a stabilized argument
3640 list to avoid re-evaluating the function's arguments twice. */
3641 arglist = build_tree_list (NULL_TREE, arg2);
3642 arglist = tree_cons (NULL_TREE, arg1, arglist);
3643 fndecl = get_callee_fndecl (exp);
3644 fn = build_function_call_expr (fndecl, arglist);
3645 if (TREE_CODE (fn) == CALL_EXPR)
3646 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3647 return expand_call (fn, target, target == const0_rtx);
3648 }
3649 #endif
3650 return 0;
3651 }
3652
3653 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3654 if we failed the caller should emit a normal call, otherwise try to get
3655 the result in TARGET, if convenient. */
3656
3657 static rtx
3658 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3659 {
3660 tree arglist = TREE_OPERAND (exp, 1);
3661
3662 if (!validate_arglist (arglist,
3663 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3664 return 0;
3665 else
3666 {
3667 tree result = fold_builtin_strncmp (arglist);
3668 if (result)
3669 return expand_expr (result, target, mode, EXPAND_NORMAL);
3670 }
3671
3672 /* If c_strlen can determine an expression for one of the string
3673 lengths, and it doesn't have side effects, then emit cmpstrsi
3674 using length MIN(strlen(string)+1, arg3). */
3675 #ifdef HAVE_cmpstrsi
3676 if (HAVE_cmpstrsi)
3677 {
3678 tree arg1 = TREE_VALUE (arglist);
3679 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3680 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3681 tree len, len1, len2;
3682 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3683 rtx result, insn;
3684 tree fndecl, fn;
3685
3686 int arg1_align
3687 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3688 int arg2_align
3689 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3690 enum machine_mode insn_mode
3691 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3692
3693 len1 = c_strlen (arg1, 1);
3694 len2 = c_strlen (arg2, 1);
3695
3696 if (len1)
3697 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3698 if (len2)
3699 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3700
3701 /* If we don't have a constant length for the first, use the length
3702 of the second, if we know it. We don't require a constant for
3703 this case; some cost analysis could be done if both are available
3704 but neither is constant. For now, assume they're equally cheap,
3705 unless one has side effects. If both strings have constant lengths,
3706 use the smaller. */
3707
3708 if (!len1)
3709 len = len2;
3710 else if (!len2)
3711 len = len1;
3712 else if (TREE_SIDE_EFFECTS (len1))
3713 len = len2;
3714 else if (TREE_SIDE_EFFECTS (len2))
3715 len = len1;
3716 else if (TREE_CODE (len1) != INTEGER_CST)
3717 len = len2;
3718 else if (TREE_CODE (len2) != INTEGER_CST)
3719 len = len1;
3720 else if (tree_int_cst_lt (len1, len2))
3721 len = len1;
3722 else
3723 len = len2;
3724
3725 /* If both arguments have side effects, we cannot optimize. */
3726 if (!len || TREE_SIDE_EFFECTS (len))
3727 return 0;
3728
3729 /* The actual new length parameter is MIN(len,arg3). */
3730 len = fold (build2 (MIN_EXPR, TREE_TYPE (len), len,
3731 fold_convert (TREE_TYPE (len), arg3)));
3732
3733 /* If we don't have POINTER_TYPE, call the function. */
3734 if (arg1_align == 0 || arg2_align == 0)
3735 return 0;
3736
3737 /* Make a place to write the result of the instruction. */
3738 result = target;
3739 if (! (result != 0
3740 && REG_P (result) && GET_MODE (result) == insn_mode
3741 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3742 result = gen_reg_rtx (insn_mode);
3743
3744 /* Stabilize the arguments in case gen_cmpstrsi fails. */
3745 arg1 = builtin_save_expr (arg1);
3746 arg2 = builtin_save_expr (arg2);
3747 len = builtin_save_expr (len);
3748
3749 arg1_rtx = get_memory_rtx (arg1);
3750 arg2_rtx = get_memory_rtx (arg2);
3751 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3752 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3753 GEN_INT (MIN (arg1_align, arg2_align)));
3754 if (insn)
3755 {
3756 emit_insn (insn);
3757
3758 /* Return the value in the proper mode for this function. */
3759 mode = TYPE_MODE (TREE_TYPE (exp));
3760 if (GET_MODE (result) == mode)
3761 return result;
3762 if (target == 0)
3763 return convert_to_mode (mode, result, 0);
3764 convert_move (target, result, 0);
3765 return target;
3766 }
3767
3768 /* Expand the library call ourselves using a stabilized argument
3769 list to avoid re-evaluating the function's arguments twice. */
3770 arglist = build_tree_list (NULL_TREE, len);
3771 arglist = tree_cons (NULL_TREE, arg2, arglist);
3772 arglist = tree_cons (NULL_TREE, arg1, arglist);
3773 fndecl = get_callee_fndecl (exp);
3774 fn = build_function_call_expr (fndecl, arglist);
3775 if (TREE_CODE (fn) == CALL_EXPR)
3776 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3777 return expand_call (fn, target, target == const0_rtx);
3778 }
3779 #endif
3780 return 0;
3781 }
3782
3783 /* Expand expression EXP, which is a call to the strcat builtin.
3784 Return 0 if we failed the caller should emit a normal call,
3785 otherwise try to get the result in TARGET, if convenient. */
3786
3787 static rtx
3788 expand_builtin_strcat (tree arglist, tree type, rtx target, enum machine_mode mode)
3789 {
3790 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3791 return 0;
3792 else
3793 {
3794 tree dst = TREE_VALUE (arglist),
3795 src = TREE_VALUE (TREE_CHAIN (arglist));
3796 const char *p = c_getstr (src);
3797
3798 if (p)
3799 {
3800 /* If the string length is zero, return the dst parameter. */
3801 if (*p == '\0')
3802 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3803 else if (!optimize_size)
3804 {
3805 /* Otherwise if !optimize_size, see if we can store by
3806 pieces into (dst + strlen(dst)). */
3807 tree newdst, arglist,
3808 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3809
3810 /* This is the length argument. */
3811 arglist = build_tree_list (NULL_TREE,
3812 fold (size_binop (PLUS_EXPR,
3813 c_strlen (src, 0),
3814 ssize_int (1))));
3815 /* Prepend src argument. */
3816 arglist = tree_cons (NULL_TREE, src, arglist);
3817
3818 /* We're going to use dst more than once. */
3819 dst = builtin_save_expr (dst);
3820
3821 /* Create strlen (dst). */
3822 newdst =
3823 fold (build_function_call_expr (strlen_fn,
3824 build_tree_list (NULL_TREE,
3825 dst)));
3826 /* Create (dst + (cast) strlen (dst)). */
3827 newdst = fold_convert (TREE_TYPE (dst), newdst);
3828 newdst = fold (build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst));
3829
3830 /* Prepend the new dst argument. */
3831 arglist = tree_cons (NULL_TREE, newdst, arglist);
3832
3833 /* We don't want to get turned into a memcpy if the
3834 target is const0_rtx, i.e. when the return value
3835 isn't used. That would produce pessimized code so
3836 pass in a target of zero, it should never actually be
3837 used. If this was successful return the original
3838 dst, not the result of mempcpy. */
3839 if (expand_builtin_mempcpy (arglist, type, /*target=*/0, mode, /*endp=*/0))
3840 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3841 else
3842 return 0;
3843 }
3844 }
3845
3846 return 0;
3847 }
3848 }
3849
3850 /* Expand expression EXP, which is a call to the strncat builtin.
3851 Return 0 if we failed the caller should emit a normal call,
3852 otherwise try to get the result in TARGET, if convenient. */
3853
3854 static rtx
3855 expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
3856 {
3857 if (validate_arglist (arglist,
3858 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3859 {
3860 tree result = fold_builtin_strncat (arglist);
3861 if (result)
3862 return expand_expr (result, target, mode, EXPAND_NORMAL);
3863 }
3864 return 0;
3865 }
3866
3867 /* Expand expression EXP, which is a call to the strspn builtin.
3868 Return 0 if we failed the caller should emit a normal call,
3869 otherwise try to get the result in TARGET, if convenient. */
3870
3871 static rtx
3872 expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
3873 {
3874 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3875 {
3876 tree result = fold_builtin_strspn (arglist);
3877 if (result)
3878 return expand_expr (result, target, mode, EXPAND_NORMAL);
3879 }
3880 return 0;
3881 }
3882
3883 /* Expand expression EXP, which is a call to the strcspn builtin.
3884 Return 0 if we failed the caller should emit a normal call,
3885 otherwise try to get the result in TARGET, if convenient. */
3886
3887 static rtx
3888 expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
3889 {
3890 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3891 {
3892 tree result = fold_builtin_strcspn (arglist);
3893 if (result)
3894 return expand_expr (result, target, mode, EXPAND_NORMAL);
3895 }
3896 return 0;
3897 }
3898
3899 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3900 if that's convenient. */
3901
3902 rtx
3903 expand_builtin_saveregs (void)
3904 {
3905 rtx val, seq;
3906
3907 /* Don't do __builtin_saveregs more than once in a function.
3908 Save the result of the first call and reuse it. */
3909 if (saveregs_value != 0)
3910 return saveregs_value;
3911
3912 /* When this function is called, it means that registers must be
3913 saved on entry to this function. So we migrate the call to the
3914 first insn of this function. */
3915
3916 start_sequence ();
3917
3918 /* Do whatever the machine needs done in this case. */
3919 val = targetm.calls.expand_builtin_saveregs ();
3920
3921 seq = get_insns ();
3922 end_sequence ();
3923
3924 saveregs_value = val;
3925
3926 /* Put the insns after the NOTE that starts the function. If this
3927 is inside a start_sequence, make the outer-level insn chain current, so
3928 the code is placed at the start of the function. */
3929 push_topmost_sequence ();
3930 emit_insn_after (seq, entry_of_function ());
3931 pop_topmost_sequence ();
3932
3933 return val;
3934 }
3935
3936 /* __builtin_args_info (N) returns word N of the arg space info
3937 for the current function. The number and meanings of words
3938 is controlled by the definition of CUMULATIVE_ARGS. */
3939
3940 static rtx
3941 expand_builtin_args_info (tree arglist)
3942 {
3943 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3944 int *word_ptr = (int *) &current_function_args_info;
3945
3946 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
3947
3948 if (arglist != 0)
3949 {
3950 if (!host_integerp (TREE_VALUE (arglist), 0))
3951 error ("argument of %<__builtin_args_info%> must be constant");
3952 else
3953 {
3954 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3955
3956 if (wordnum < 0 || wordnum >= nwords)
3957 error ("argument of %<__builtin_args_info%> out of range");
3958 else
3959 return GEN_INT (word_ptr[wordnum]);
3960 }
3961 }
3962 else
3963 error ("missing argument in %<__builtin_args_info%>");
3964
3965 return const0_rtx;
3966 }
3967
3968 /* Expand a call to __builtin_next_arg. */
3969
3970 static rtx
3971 expand_builtin_next_arg (void)
3972 {
3973 /* Checking arguments is already done in fold_builtin_next_arg
3974 that must be called before this function. */
3975 return expand_binop (Pmode, add_optab,
3976 current_function_internal_arg_pointer,
3977 current_function_arg_offset_rtx,
3978 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3979 }
3980
3981 /* Make it easier for the backends by protecting the valist argument
3982 from multiple evaluations. */
3983
3984 static tree
3985 stabilize_va_list (tree valist, int needs_lvalue)
3986 {
3987 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3988 {
3989 if (TREE_SIDE_EFFECTS (valist))
3990 valist = save_expr (valist);
3991
3992 /* For this case, the backends will be expecting a pointer to
3993 TREE_TYPE (va_list_type_node), but it's possible we've
3994 actually been given an array (an actual va_list_type_node).
3995 So fix it. */
3996 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3997 {
3998 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3999 valist = build_fold_addr_expr_with_type (valist, p1);
4000 }
4001 }
4002 else
4003 {
4004 tree pt;
4005
4006 if (! needs_lvalue)
4007 {
4008 if (! TREE_SIDE_EFFECTS (valist))
4009 return valist;
4010
4011 pt = build_pointer_type (va_list_type_node);
4012 valist = fold (build1 (ADDR_EXPR, pt, valist));
4013 TREE_SIDE_EFFECTS (valist) = 1;
4014 }
4015
4016 if (TREE_SIDE_EFFECTS (valist))
4017 valist = save_expr (valist);
4018 valist = build_fold_indirect_ref (valist);
4019 }
4020
4021 return valist;
4022 }
4023
4024 /* The "standard" definition of va_list is void*. */
4025
4026 tree
4027 std_build_builtin_va_list (void)
4028 {
4029 return ptr_type_node;
4030 }
4031
4032 /* The "standard" implementation of va_start: just assign `nextarg' to
4033 the variable. */
4034
4035 void
4036 std_expand_builtin_va_start (tree valist, rtx nextarg)
4037 {
4038 tree t;
4039
4040 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
4041 make_tree (ptr_type_node, nextarg));
4042 TREE_SIDE_EFFECTS (t) = 1;
4043
4044 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4045 }
4046
4047 /* Expand ARGLIST, from a call to __builtin_va_start. */
4048
4049 static rtx
4050 expand_builtin_va_start (tree arglist)
4051 {
4052 rtx nextarg;
4053 tree chain, valist;
4054
4055 chain = TREE_CHAIN (arglist);
4056
4057 if (!chain)
4058 {
4059 error ("too few arguments to function %<va_start%>");
4060 return const0_rtx;
4061 }
4062
4063 if (fold_builtin_next_arg (chain))
4064 return const0_rtx;
4065
4066 nextarg = expand_builtin_next_arg ();
4067 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
4068
4069 #ifdef EXPAND_BUILTIN_VA_START
4070 EXPAND_BUILTIN_VA_START (valist, nextarg);
4071 #else
4072 std_expand_builtin_va_start (valist, nextarg);
4073 #endif
4074
4075 return const0_rtx;
4076 }
4077
4078 /* The "standard" implementation of va_arg: read the value from the
4079 current (padded) address and increment by the (padded) size. */
4080
4081 tree
4082 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4083 {
4084 tree addr, t, type_size, rounded_size, valist_tmp;
4085 unsigned HOST_WIDE_INT align, boundary;
4086 bool indirect;
4087
4088 #ifdef ARGS_GROW_DOWNWARD
4089 /* All of the alignment and movement below is for args-grow-up machines.
4090 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4091 implement their own specialized gimplify_va_arg_expr routines. */
4092 gcc_unreachable ();
4093 #endif
4094
4095 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4096 if (indirect)
4097 type = build_pointer_type (type);
4098
4099 align = PARM_BOUNDARY / BITS_PER_UNIT;
4100 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4101
4102 /* Hoist the valist value into a temporary for the moment. */
4103 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4104
4105 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4106 requires greater alignment, we must perform dynamic alignment. */
4107 if (boundary > align)
4108 {
4109 t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
4110 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4111 build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
4112 gimplify_and_add (t, pre_p);
4113
4114 t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
4115 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4116 build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
4117 gimplify_and_add (t, pre_p);
4118 }
4119 else
4120 boundary = align;
4121
4122 /* If the actual alignment is less than the alignment of the type,
4123 adjust the type accordingly so that we don't assume strict alignment
4124 when deferencing the pointer. */
4125 boundary *= BITS_PER_UNIT;
4126 if (boundary < TYPE_ALIGN (type))
4127 {
4128 type = build_variant_type_copy (type);
4129 TYPE_ALIGN (type) = boundary;
4130 }
4131
4132 /* Compute the rounded size of the type. */
4133 type_size = size_in_bytes (type);
4134 rounded_size = round_up (type_size, align);
4135
4136 /* Reduce rounded_size so it's sharable with the postqueue. */
4137 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4138
4139 /* Get AP. */
4140 addr = valist_tmp;
4141 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4142 {
4143 /* Small args are padded downward. */
4144 t = fold (build2 (GT_EXPR, sizetype, rounded_size, size_int (align)));
4145 t = fold (build3 (COND_EXPR, sizetype, t, size_zero_node,
4146 size_binop (MINUS_EXPR, rounded_size, type_size)));
4147 t = fold_convert (TREE_TYPE (addr), t);
4148 addr = fold (build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t));
4149 }
4150
4151 /* Compute new value for AP. */
4152 t = fold_convert (TREE_TYPE (valist), rounded_size);
4153 t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
4154 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4155 gimplify_and_add (t, pre_p);
4156
4157 addr = fold_convert (build_pointer_type (type), addr);
4158
4159 if (indirect)
4160 addr = build_va_arg_indirect_ref (addr);
4161
4162 return build_va_arg_indirect_ref (addr);
4163 }
4164
4165 /* Build an indirect-ref expression over the given TREE, which represents a
4166 piece of a va_arg() expansion. */
4167 tree
4168 build_va_arg_indirect_ref (tree addr)
4169 {
4170 addr = build_fold_indirect_ref (addr);
4171
4172 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4173 mf_mark (addr);
4174
4175 return addr;
4176 }
4177
4178 /* Return a dummy expression of type TYPE in order to keep going after an
4179 error. */
4180
4181 static tree
4182 dummy_object (tree type)
4183 {
4184 tree t = convert (build_pointer_type (type), null_pointer_node);
4185 return build1 (INDIRECT_REF, type, t);
4186 }
4187
4188 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4189 builtin function, but a very special sort of operator. */
4190
4191 enum gimplify_status
4192 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4193 {
4194 tree promoted_type, want_va_type, have_va_type;
4195 tree valist = TREE_OPERAND (*expr_p, 0);
4196 tree type = TREE_TYPE (*expr_p);
4197 tree t;
4198
4199 /* Verify that valist is of the proper type. */
4200 want_va_type = va_list_type_node;
4201 have_va_type = TREE_TYPE (valist);
4202
4203 if (have_va_type == error_mark_node)
4204 return GS_ERROR;
4205
4206 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4207 {
4208 /* If va_list is an array type, the argument may have decayed
4209 to a pointer type, e.g. by being passed to another function.
4210 In that case, unwrap both types so that we can compare the
4211 underlying records. */
4212 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4213 || POINTER_TYPE_P (have_va_type))
4214 {
4215 want_va_type = TREE_TYPE (want_va_type);
4216 have_va_type = TREE_TYPE (have_va_type);
4217 }
4218 }
4219
4220 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4221 {
4222 error ("first argument to %<va_arg%> not of type %<va_list%>");
4223 return GS_ERROR;
4224 }
4225
4226 /* Generate a diagnostic for requesting data of a type that cannot
4227 be passed through `...' due to type promotion at the call site. */
4228 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4229 != type)
4230 {
4231 static bool gave_help;
4232
4233 /* Unfortunately, this is merely undefined, rather than a constraint
4234 violation, so we cannot make this an error. If this call is never
4235 executed, the program is still strictly conforming. */
4236 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4237 type, promoted_type);
4238 if (! gave_help)
4239 {
4240 gave_help = true;
4241 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4242 promoted_type, type);
4243 }
4244
4245 /* We can, however, treat "undefined" any way we please.
4246 Call abort to encourage the user to fix the program. */
4247 inform ("if this code is reached, the program will abort");
4248 t = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
4249 NULL);
4250 append_to_statement_list (t, pre_p);
4251
4252 /* This is dead code, but go ahead and finish so that the
4253 mode of the result comes out right. */
4254 *expr_p = dummy_object (type);
4255 return GS_ALL_DONE;
4256 }
4257 else
4258 {
4259 /* Make it easier for the backends by protecting the valist argument
4260 from multiple evaluations. */
4261 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4262 {
4263 /* For this case, the backends will be expecting a pointer to
4264 TREE_TYPE (va_list_type_node), but it's possible we've
4265 actually been given an array (an actual va_list_type_node).
4266 So fix it. */
4267 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4268 {
4269 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4270 valist = build_fold_addr_expr_with_type (valist, p1);
4271 }
4272 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4273 }
4274 else
4275 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4276
4277 if (!targetm.gimplify_va_arg_expr)
4278 /* FIXME:Once most targets are converted we should merely
4279 assert this is non-null. */
4280 return GS_ALL_DONE;
4281
4282 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4283 return GS_OK;
4284 }
4285 }
4286
4287 /* Expand ARGLIST, from a call to __builtin_va_end. */
4288
4289 static rtx
4290 expand_builtin_va_end (tree arglist)
4291 {
4292 tree valist = TREE_VALUE (arglist);
4293
4294 /* Evaluate for side effects, if needed. I hate macros that don't
4295 do that. */
4296 if (TREE_SIDE_EFFECTS (valist))
4297 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4298
4299 return const0_rtx;
4300 }
4301
4302 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
4303 builtin rather than just as an assignment in stdarg.h because of the
4304 nastiness of array-type va_list types. */
4305
4306 static rtx
4307 expand_builtin_va_copy (tree arglist)
4308 {
4309 tree dst, src, t;
4310
4311 dst = TREE_VALUE (arglist);
4312 src = TREE_VALUE (TREE_CHAIN (arglist));
4313
4314 dst = stabilize_va_list (dst, 1);
4315 src = stabilize_va_list (src, 0);
4316
4317 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4318 {
4319 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4320 TREE_SIDE_EFFECTS (t) = 1;
4321 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4322 }
4323 else
4324 {
4325 rtx dstb, srcb, size;
4326
4327 /* Evaluate to pointers. */
4328 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4329 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4330 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4331 VOIDmode, EXPAND_NORMAL);
4332
4333 dstb = convert_memory_address (Pmode, dstb);
4334 srcb = convert_memory_address (Pmode, srcb);
4335
4336 /* "Dereference" to BLKmode memories. */
4337 dstb = gen_rtx_MEM (BLKmode, dstb);
4338 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4339 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4340 srcb = gen_rtx_MEM (BLKmode, srcb);
4341 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4342 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4343
4344 /* Copy. */
4345 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4346 }
4347
4348 return const0_rtx;
4349 }
4350
4351 /* Expand a call to one of the builtin functions __builtin_frame_address or
4352 __builtin_return_address. */
4353
4354 static rtx
4355 expand_builtin_frame_address (tree fndecl, tree arglist)
4356 {
4357 /* The argument must be a nonnegative integer constant.
4358 It counts the number of frames to scan up the stack.
4359 The value is the return address saved in that frame. */
4360 if (arglist == 0)
4361 /* Warning about missing arg was already issued. */
4362 return const0_rtx;
4363 else if (! host_integerp (TREE_VALUE (arglist), 1))
4364 {
4365 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4366 error ("invalid argument to %<__builtin_frame_address%>");
4367 else
4368 error ("invalid argument to %<__builtin_return_address%>");
4369 return const0_rtx;
4370 }
4371 else
4372 {
4373 rtx tem
4374 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4375 tree_low_cst (TREE_VALUE (arglist), 1));
4376
4377 /* Some ports cannot access arbitrary stack frames. */
4378 if (tem == NULL)
4379 {
4380 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4381 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4382 else
4383 warning (0, "unsupported argument to %<__builtin_return_address%>");
4384 return const0_rtx;
4385 }
4386
4387 /* For __builtin_frame_address, return what we've got. */
4388 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4389 return tem;
4390
4391 if (!REG_P (tem)
4392 && ! CONSTANT_P (tem))
4393 tem = copy_to_mode_reg (Pmode, tem);
4394 return tem;
4395 }
4396 }
4397
4398 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4399 we failed and the caller should emit a normal call, otherwise try to get
4400 the result in TARGET, if convenient. */
4401
4402 static rtx
4403 expand_builtin_alloca (tree arglist, rtx target)
4404 {
4405 rtx op0;
4406 rtx result;
4407
4408 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
4409 should always expand to function calls. These can be intercepted
4410 in libmudflap. */
4411 if (flag_mudflap)
4412 return 0;
4413
4414 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4415 return 0;
4416
4417 /* Compute the argument. */
4418 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
4419
4420 /* Allocate the desired space. */
4421 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4422 result = convert_memory_address (ptr_mode, result);
4423
4424 return result;
4425 }
4426
4427 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4428 Return 0 if a normal call should be emitted rather than expanding the
4429 function in-line. If convenient, the result should be placed in TARGET.
4430 SUBTARGET may be used as the target for computing one of EXP's operands. */
4431
4432 static rtx
4433 expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
4434 rtx subtarget, optab op_optab)
4435 {
4436 rtx op0;
4437 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4438 return 0;
4439
4440 /* Compute the argument. */
4441 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4442 /* Compute op, into TARGET if possible.
4443 Set TARGET to wherever the result comes back. */
4444 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4445 op_optab, op0, target, 1);
4446 gcc_assert (target);
4447
4448 return convert_to_mode (target_mode, target, 0);
4449 }
4450
4451 /* If the string passed to fputs is a constant and is one character
4452 long, we attempt to transform this call into __builtin_fputc(). */
4453
4454 static rtx
4455 expand_builtin_fputs (tree arglist, rtx target, bool unlocked)
4456 {
4457 /* Verify the arguments in the original call. */
4458 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4459 {
4460 tree result = fold_builtin_fputs (arglist, (target == const0_rtx),
4461 unlocked, NULL_TREE);
4462 if (result)
4463 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
4464 }
4465 return 0;
4466 }
4467
4468 /* Expand a call to __builtin_expect. We return our argument and emit a
4469 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4470 a non-jump context. */
4471
4472 static rtx
4473 expand_builtin_expect (tree arglist, rtx target)
4474 {
4475 tree exp, c;
4476 rtx note, rtx_c;
4477
4478 if (arglist == NULL_TREE
4479 || TREE_CHAIN (arglist) == NULL_TREE)
4480 return const0_rtx;
4481 exp = TREE_VALUE (arglist);
4482 c = TREE_VALUE (TREE_CHAIN (arglist));
4483
4484 if (TREE_CODE (c) != INTEGER_CST)
4485 {
4486 error ("second argument to %<__builtin_expect%> must be a constant");
4487 c = integer_zero_node;
4488 }
4489
4490 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4491
4492 /* Don't bother with expected value notes for integral constants. */
4493 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4494 {
4495 /* We do need to force this into a register so that we can be
4496 moderately sure to be able to correctly interpret the branch
4497 condition later. */
4498 target = force_reg (GET_MODE (target), target);
4499
4500 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4501
4502 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
4503 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4504 }
4505
4506 return target;
4507 }
4508
4509 /* Like expand_builtin_expect, except do this in a jump context. This is
4510 called from do_jump if the conditional is a __builtin_expect. Return either
4511 a list of insns to emit the jump or NULL if we cannot optimize
4512 __builtin_expect. We need to optimize this at jump time so that machines
4513 like the PowerPC don't turn the test into a SCC operation, and then jump
4514 based on the test being 0/1. */
4515
4516 rtx
4517 expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
4518 {
4519 tree arglist = TREE_OPERAND (exp, 1);
4520 tree arg0 = TREE_VALUE (arglist);
4521 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4522 rtx ret = NULL_RTX;
4523
4524 /* Only handle __builtin_expect (test, 0) and
4525 __builtin_expect (test, 1). */
4526 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4527 && (integer_zerop (arg1) || integer_onep (arg1)))
4528 {
4529 rtx insn, drop_through_label, temp;
4530
4531 /* Expand the jump insns. */
4532 start_sequence ();
4533 do_jump (arg0, if_false_label, if_true_label);
4534 ret = get_insns ();
4535
4536 drop_through_label = get_last_insn ();
4537 if (drop_through_label && NOTE_P (drop_through_label))
4538 drop_through_label = prev_nonnote_insn (drop_through_label);
4539 if (drop_through_label && !LABEL_P (drop_through_label))
4540 drop_through_label = NULL_RTX;
4541 end_sequence ();
4542
4543 if (! if_true_label)
4544 if_true_label = drop_through_label;
4545 if (! if_false_label)
4546 if_false_label = drop_through_label;
4547
4548 /* Go through and add the expect's to each of the conditional jumps. */
4549 insn = ret;
4550 while (insn != NULL_RTX)
4551 {
4552 rtx next = NEXT_INSN (insn);
4553
4554 if (JUMP_P (insn) && any_condjump_p (insn))
4555 {
4556 rtx ifelse = SET_SRC (pc_set (insn));
4557 rtx then_dest = XEXP (ifelse, 1);
4558 rtx else_dest = XEXP (ifelse, 2);
4559 int taken = -1;
4560
4561 /* First check if we recognize any of the labels. */
4562 if (GET_CODE (then_dest) == LABEL_REF
4563 && XEXP (then_dest, 0) == if_true_label)
4564 taken = 1;
4565 else if (GET_CODE (then_dest) == LABEL_REF
4566 && XEXP (then_dest, 0) == if_false_label)
4567 taken = 0;
4568 else if (GET_CODE (else_dest) == LABEL_REF
4569 && XEXP (else_dest, 0) == if_false_label)
4570 taken = 1;
4571 else if (GET_CODE (else_dest) == LABEL_REF
4572 && XEXP (else_dest, 0) == if_true_label)
4573 taken = 0;
4574 /* Otherwise check where we drop through. */
4575 else if (else_dest == pc_rtx)
4576 {
4577 if (next && NOTE_P (next))
4578 next = next_nonnote_insn (next);
4579
4580 if (next && JUMP_P (next)
4581 && any_uncondjump_p (next))
4582 temp = XEXP (SET_SRC (pc_set (next)), 0);
4583 else
4584 temp = next;
4585
4586 /* TEMP is either a CODE_LABEL, NULL_RTX or something
4587 else that can't possibly match either target label. */
4588 if (temp == if_false_label)
4589 taken = 1;
4590 else if (temp == if_true_label)
4591 taken = 0;
4592 }
4593 else if (then_dest == pc_rtx)
4594 {
4595 if (next && NOTE_P (next))
4596 next = next_nonnote_insn (next);
4597
4598 if (next && JUMP_P (next)
4599 && any_uncondjump_p (next))
4600 temp = XEXP (SET_SRC (pc_set (next)), 0);
4601 else
4602 temp = next;
4603
4604 if (temp == if_false_label)
4605 taken = 0;
4606 else if (temp == if_true_label)
4607 taken = 1;
4608 }
4609
4610 if (taken != -1)
4611 {
4612 /* If the test is expected to fail, reverse the
4613 probabilities. */
4614 if (integer_zerop (arg1))
4615 taken = 1 - taken;
4616 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4617 }
4618 }
4619
4620 insn = next;
4621 }
4622 }
4623
4624 return ret;
4625 }
4626
4627 static void
4628 expand_builtin_trap (void)
4629 {
4630 #ifdef HAVE_trap
4631 if (HAVE_trap)
4632 emit_insn (gen_trap ());
4633 else
4634 #endif
4635 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4636 emit_barrier ();
4637 }
4638
4639 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4640 Return 0 if a normal call should be emitted rather than expanding
4641 the function inline. If convenient, the result should be placed
4642 in TARGET. SUBTARGET may be used as the target for computing
4643 the operand. */
4644
4645 static rtx
4646 expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
4647 {
4648 enum machine_mode mode;
4649 tree arg;
4650 rtx op0;
4651
4652 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4653 return 0;
4654
4655 arg = TREE_VALUE (arglist);
4656 mode = TYPE_MODE (TREE_TYPE (arg));
4657 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4658 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4659 }
4660
4661 /* Expand a call to copysign, copysignf, or copysignl with arguments ARGLIST.
4662 Return NULL is a normal call should be emitted rather than expanding the
4663 function inline. If convenient, the result should be placed in TARGET.
4664 SUBTARGET may be used as the target for computing the operand. */
4665
4666 static rtx
4667 expand_builtin_copysign (tree arglist, rtx target, rtx subtarget)
4668 {
4669 rtx op0, op1;
4670 tree arg;
4671
4672 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4673 return 0;
4674
4675 arg = TREE_VALUE (arglist);
4676 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4677
4678 arg = TREE_VALUE (TREE_CHAIN (arglist));
4679 op1 = expand_expr (arg, NULL, VOIDmode, 0);
4680
4681 return expand_copysign (op0, op1, target);
4682 }
4683
4684 /* Create a new constant string literal and return a char* pointer to it.
4685 The STRING_CST value is the LEN characters at STR. */
4686 static tree
4687 build_string_literal (int len, const char *str)
4688 {
4689 tree t, elem, index, type;
4690
4691 t = build_string (len, str);
4692 elem = build_type_variant (char_type_node, 1, 0);
4693 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
4694 type = build_array_type (elem, index);
4695 TREE_TYPE (t) = type;
4696 TREE_CONSTANT (t) = 1;
4697 TREE_INVARIANT (t) = 1;
4698 TREE_READONLY (t) = 1;
4699 TREE_STATIC (t) = 1;
4700
4701 type = build_pointer_type (type);
4702 t = build1 (ADDR_EXPR, type, t);
4703
4704 type = build_pointer_type (elem);
4705 t = build1 (NOP_EXPR, type, t);
4706 return t;
4707 }
4708
4709 /* Expand EXP, a call to printf or printf_unlocked.
4710 Return 0 if a normal call should be emitted rather than transforming
4711 the function inline. If convenient, the result should be placed in
4712 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
4713 call. */
4714 static rtx
4715 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
4716 bool unlocked)
4717 {
4718 tree arglist = TREE_OPERAND (exp, 1);
4719 tree fn_putchar = unlocked
4720 ? implicit_built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
4721 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
4722 tree fn_puts = unlocked ? implicit_built_in_decls[BUILT_IN_PUTS_UNLOCKED]
4723 : implicit_built_in_decls[BUILT_IN_PUTS];
4724 const char *fmt_str;
4725 tree fn, fmt, arg;
4726
4727 /* If the return value is used, don't do the transformation. */
4728 if (target != const0_rtx)
4729 return 0;
4730
4731 /* Verify the required arguments in the original call. */
4732 if (! arglist)
4733 return 0;
4734 fmt = TREE_VALUE (arglist);
4735 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
4736 return 0;
4737 arglist = TREE_CHAIN (arglist);
4738
4739 /* Check whether the format is a literal string constant. */
4740 fmt_str = c_getstr (fmt);
4741 if (fmt_str == NULL)
4742 return 0;
4743
4744 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4745 if (strcmp (fmt_str, "%s\n") == 0)
4746 {
4747 if (! arglist
4748 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
4749 || TREE_CHAIN (arglist))
4750 return 0;
4751 fn = fn_puts;
4752 }
4753 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4754 else if (strcmp (fmt_str, "%c") == 0)
4755 {
4756 if (! arglist
4757 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE
4758 || TREE_CHAIN (arglist))
4759 return 0;
4760 fn = fn_putchar;
4761 }
4762 else
4763 {
4764 /* We can't handle anything else with % args or %% ... yet. */
4765 if (strchr (fmt_str, '%'))
4766 return 0;
4767
4768 if (arglist)
4769 return 0;
4770
4771 /* If the format specifier was "", printf does nothing. */
4772 if (fmt_str[0] == '\0')
4773 return const0_rtx;
4774 /* If the format specifier has length of 1, call putchar. */
4775 if (fmt_str[1] == '\0')
4776 {
4777 /* Given printf("c"), (where c is any one character,)
4778 convert "c"[0] to an int and pass that to the replacement
4779 function. */
4780 arg = build_int_cst (NULL_TREE, fmt_str[0]);
4781 arglist = build_tree_list (NULL_TREE, arg);
4782 fn = fn_putchar;
4783 }
4784 else
4785 {
4786 /* If the format specifier was "string\n", call puts("string"). */
4787 size_t len = strlen (fmt_str);
4788 if (fmt_str[len - 1] == '\n')
4789 {
4790 /* Create a NUL-terminated string that's one char shorter
4791 than the original, stripping off the trailing '\n'. */
4792 char *newstr = alloca (len);
4793 memcpy (newstr, fmt_str, len - 1);
4794 newstr[len - 1] = 0;
4795
4796 arg = build_string_literal (len, newstr);
4797 arglist = build_tree_list (NULL_TREE, arg);
4798 fn = fn_puts;
4799 }
4800 else
4801 /* We'd like to arrange to call fputs(string,stdout) here,
4802 but we need stdout and don't have a way to get it yet. */
4803 return 0;
4804 }
4805 }
4806
4807 if (!fn)
4808 return 0;
4809 fn = build_function_call_expr (fn, arglist);
4810 if (TREE_CODE (fn) == CALL_EXPR)
4811 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4812 return expand_expr (fn, target, mode, EXPAND_NORMAL);
4813 }
4814
4815 /* Expand EXP, a call to fprintf or fprintf_unlocked.
4816 Return 0 if a normal call should be emitted rather than transforming
4817 the function inline. If convenient, the result should be placed in
4818 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
4819 call. */
4820 static rtx
4821 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
4822 bool unlocked)
4823 {
4824 tree arglist = TREE_OPERAND (exp, 1);
4825 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
4826 : implicit_built_in_decls[BUILT_IN_FPUTC];
4827 tree fn_fputs = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
4828 : implicit_built_in_decls[BUILT_IN_FPUTS];
4829 const char *fmt_str;
4830 tree fn, fmt, fp, arg;
4831
4832 /* If the return value is used, don't do the transformation. */
4833 if (target != const0_rtx)
4834 return 0;
4835
4836 /* Verify the required arguments in the original call. */
4837 if (! arglist)
4838 return 0;
4839 fp = TREE_VALUE (arglist);
4840 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
4841 return 0;
4842 arglist = TREE_CHAIN (arglist);
4843 if (! arglist)
4844 return 0;
4845 fmt = TREE_VALUE (arglist);
4846 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
4847 return 0;
4848 arglist = TREE_CHAIN (arglist);
4849
4850 /* Check whether the format is a literal string constant. */
4851 fmt_str = c_getstr (fmt);
4852 if (fmt_str == NULL)
4853 return 0;
4854
4855 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
4856 if (strcmp (fmt_str, "%s") == 0)
4857 {
4858 if (! arglist
4859 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
4860 || TREE_CHAIN (arglist))
4861 return 0;
4862 arg = TREE_VALUE (arglist);
4863 arglist = build_tree_list (NULL_TREE, fp);
4864 arglist = tree_cons (NULL_TREE, arg, arglist);
4865 fn = fn_fputs;
4866 }
4867 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
4868 else if (strcmp (fmt_str, "%c") == 0)
4869 {
4870 if (! arglist
4871 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE
4872 || TREE_CHAIN (arglist))
4873 return 0;
4874 arg = TREE_VALUE (arglist);
4875 arglist = build_tree_list (NULL_TREE, fp);
4876 arglist = tree_cons (NULL_TREE, arg, arglist);
4877 fn = fn_fputc;
4878 }
4879 else
4880 {
4881 /* We can't handle anything else with % args or %% ... yet. */
4882 if (strchr (fmt_str, '%'))
4883 return 0;
4884
4885 if (arglist)
4886 return 0;
4887
4888 /* If the format specifier was "", fprintf does nothing. */
4889 if (fmt_str[0] == '\0')
4890 {
4891 /* Evaluate and ignore FILE* argument for side-effects. */
4892 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4893 return const0_rtx;
4894 }
4895
4896 /* When "string" doesn't contain %, replace all cases of
4897 fprintf(stream,string) with fputs(string,stream). The fputs
4898 builtin will take care of special cases like length == 1. */
4899 arglist = build_tree_list (NULL_TREE, fp);
4900 arglist = tree_cons (NULL_TREE, fmt, arglist);
4901 fn = fn_fputs;
4902 }
4903
4904 if (!fn)
4905 return 0;
4906 fn = build_function_call_expr (fn, arglist);
4907 if (TREE_CODE (fn) == CALL_EXPR)
4908 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4909 return expand_expr (fn, target, mode, EXPAND_NORMAL);
4910 }
4911
4912 /* Expand a call to sprintf with argument list ARGLIST. Return 0 if
4913 a normal call should be emitted rather than expanding the function
4914 inline. If convenient, the result should be placed in TARGET with
4915 mode MODE. */
4916
4917 static rtx
4918 expand_builtin_sprintf (tree arglist, rtx target, enum machine_mode mode)
4919 {
4920 tree orig_arglist, dest, fmt;
4921 const char *fmt_str;
4922
4923 orig_arglist = arglist;
4924
4925 /* Verify the required arguments in the original call. */
4926 if (! arglist)
4927 return 0;
4928 dest = TREE_VALUE (arglist);
4929 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
4930 return 0;
4931 arglist = TREE_CHAIN (arglist);
4932 if (! arglist)
4933 return 0;
4934 fmt = TREE_VALUE (arglist);
4935 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
4936 return 0;
4937 arglist = TREE_CHAIN (arglist);
4938
4939 /* Check whether the format is a literal string constant. */
4940 fmt_str = c_getstr (fmt);
4941 if (fmt_str == NULL)
4942 return 0;
4943
4944 /* If the format doesn't contain % args or %%, use strcpy. */
4945 if (strchr (fmt_str, '%') == 0)
4946 {
4947 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4948 tree exp;
4949
4950 if (arglist || ! fn)
4951 return 0;
4952 expand_expr (build_function_call_expr (fn, orig_arglist),
4953 const0_rtx, VOIDmode, EXPAND_NORMAL);
4954 if (target == const0_rtx)
4955 return const0_rtx;
4956 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
4957 return expand_expr (exp, target, mode, EXPAND_NORMAL);
4958 }
4959 /* If the format is "%s", use strcpy if the result isn't used. */
4960 else if (strcmp (fmt_str, "%s") == 0)
4961 {
4962 tree fn, arg, len;
4963 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4964
4965 if (! fn)
4966 return 0;
4967
4968 if (! arglist || TREE_CHAIN (arglist))
4969 return 0;
4970 arg = TREE_VALUE (arglist);
4971 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
4972 return 0;
4973
4974 if (target != const0_rtx)
4975 {
4976 len = c_strlen (arg, 1);
4977 if (! len || TREE_CODE (len) != INTEGER_CST)
4978 return 0;
4979 }
4980 else
4981 len = NULL_TREE;
4982
4983 arglist = build_tree_list (NULL_TREE, arg);
4984 arglist = tree_cons (NULL_TREE, dest, arglist);
4985 expand_expr (build_function_call_expr (fn, arglist),
4986 const0_rtx, VOIDmode, EXPAND_NORMAL);
4987
4988 if (target == const0_rtx)
4989 return const0_rtx;
4990 return expand_expr (len, target, mode, EXPAND_NORMAL);
4991 }
4992
4993 return 0;
4994 }
4995
4996 /* Expand a call to either the entry or exit function profiler. */
4997
4998 static rtx
4999 expand_builtin_profile_func (bool exitp)
5000 {
5001 rtx this, which;
5002
5003 this = DECL_RTL (current_function_decl);
5004 gcc_assert (MEM_P (this));
5005 this = XEXP (this, 0);
5006
5007 if (exitp)
5008 which = profile_function_exit_libfunc;
5009 else
5010 which = profile_function_entry_libfunc;
5011
5012 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5013 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5014 0),
5015 Pmode);
5016
5017 return const0_rtx;
5018 }
5019
5020 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5021
5022 static rtx
5023 round_trampoline_addr (rtx tramp)
5024 {
5025 rtx temp, addend, mask;
5026
5027 /* If we don't need too much alignment, we'll have been guaranteed
5028 proper alignment by get_trampoline_type. */
5029 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5030 return tramp;
5031
5032 /* Round address up to desired boundary. */
5033 temp = gen_reg_rtx (Pmode);
5034 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5035 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5036
5037 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5038 temp, 0, OPTAB_LIB_WIDEN);
5039 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5040 temp, 0, OPTAB_LIB_WIDEN);
5041
5042 return tramp;
5043 }
5044
5045 static rtx
5046 expand_builtin_init_trampoline (tree arglist)
5047 {
5048 tree t_tramp, t_func, t_chain;
5049 rtx r_tramp, r_func, r_chain;
5050 #ifdef TRAMPOLINE_TEMPLATE
5051 rtx blktramp;
5052 #endif
5053
5054 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE,
5055 POINTER_TYPE, VOID_TYPE))
5056 return NULL_RTX;
5057
5058 t_tramp = TREE_VALUE (arglist);
5059 arglist = TREE_CHAIN (arglist);
5060 t_func = TREE_VALUE (arglist);
5061 arglist = TREE_CHAIN (arglist);
5062 t_chain = TREE_VALUE (arglist);
5063
5064 r_tramp = expand_expr (t_tramp, NULL_RTX, VOIDmode, 0);
5065 r_func = expand_expr (t_func, NULL_RTX, VOIDmode, 0);
5066 r_chain = expand_expr (t_chain, NULL_RTX, VOIDmode, 0);
5067
5068 /* Generate insns to initialize the trampoline. */
5069 r_tramp = round_trampoline_addr (r_tramp);
5070 #ifdef TRAMPOLINE_TEMPLATE
5071 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5072 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5073 emit_block_move (blktramp, assemble_trampoline_template (),
5074 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5075 #endif
5076 trampolines_created = 1;
5077 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5078
5079 return const0_rtx;
5080 }
5081
5082 static rtx
5083 expand_builtin_adjust_trampoline (tree arglist)
5084 {
5085 rtx tramp;
5086
5087 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5088 return NULL_RTX;
5089
5090 tramp = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5091 tramp = round_trampoline_addr (tramp);
5092 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5093 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5094 #endif
5095
5096 return tramp;
5097 }
5098
5099 /* Expand a call to the built-in signbit, signbitf or signbitl function.
5100 Return NULL_RTX if a normal call should be emitted rather than expanding
5101 the function in-line. EXP is the expression that is a call to the builtin
5102 function; if convenient, the result should be placed in TARGET. */
5103
5104 static rtx
5105 expand_builtin_signbit (tree exp, rtx target)
5106 {
5107 const struct real_format *fmt;
5108 enum machine_mode fmode, imode, rmode;
5109 HOST_WIDE_INT hi, lo;
5110 tree arg, arglist;
5111 int word, bitpos;
5112 rtx temp;
5113
5114 arglist = TREE_OPERAND (exp, 1);
5115 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5116 return 0;
5117
5118 arg = TREE_VALUE (arglist);
5119 fmode = TYPE_MODE (TREE_TYPE (arg));
5120 rmode = TYPE_MODE (TREE_TYPE (exp));
5121 fmt = REAL_MODE_FORMAT (fmode);
5122
5123 /* For floating point formats without a sign bit, implement signbit
5124 as "ARG < 0.0". */
5125 bitpos = fmt->signbit_ro;
5126 if (bitpos < 0)
5127 {
5128 /* But we can't do this if the format supports signed zero. */
5129 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5130 return 0;
5131
5132 arg = fold (build2 (LT_EXPR, TREE_TYPE (exp), arg,
5133 build_real (TREE_TYPE (arg), dconst0)));
5134 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5135 }
5136
5137 temp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
5138 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5139 {
5140 imode = int_mode_for_mode (fmode);
5141 if (imode == BLKmode)
5142 return 0;
5143 temp = gen_lowpart (imode, temp);
5144 }
5145 else
5146 {
5147 imode = word_mode;
5148 /* Handle targets with different FP word orders. */
5149 if (FLOAT_WORDS_BIG_ENDIAN)
5150 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5151 else
5152 word = bitpos / BITS_PER_WORD;
5153 temp = operand_subword_force (temp, word, fmode);
5154 bitpos = bitpos % BITS_PER_WORD;
5155 }
5156
5157 /* Force the intermediate word_mode (or narrower) result into a
5158 register. This avoids attempting to create paradoxical SUBREGs
5159 of floating point modes below. */
5160 temp = force_reg (imode, temp);
5161
5162 /* If the bitpos is within the "result mode" lowpart, the operation
5163 can be implement with a single bitwise AND. Otherwise, we need
5164 a right shift and an AND. */
5165
5166 if (bitpos < GET_MODE_BITSIZE (rmode))
5167 {
5168 if (bitpos < HOST_BITS_PER_WIDE_INT)
5169 {
5170 hi = 0;
5171 lo = (HOST_WIDE_INT) 1 << bitpos;
5172 }
5173 else
5174 {
5175 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5176 lo = 0;
5177 }
5178
5179 if (imode != rmode)
5180 temp = gen_lowpart (rmode, temp);
5181 temp = expand_binop (rmode, and_optab, temp,
5182 immed_double_const (lo, hi, rmode),
5183 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5184 }
5185 else
5186 {
5187 /* Perform a logical right shift to place the signbit in the least
5188 significant bit, then truncate the result to the desired mode
5189 and mask just this bit. */
5190 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5191 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5192 temp = gen_lowpart (rmode, temp);
5193 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5194 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5195 }
5196
5197 return temp;
5198 }
5199
5200 /* Expand fork or exec calls. TARGET is the desired target of the
5201 call. ARGLIST is the list of arguments of the call. FN is the
5202 identificator of the actual function. IGNORE is nonzero if the
5203 value is to be ignored. */
5204
5205 static rtx
5206 expand_builtin_fork_or_exec (tree fn, tree arglist, rtx target, int ignore)
5207 {
5208 tree id, decl;
5209 tree call;
5210
5211 /* If we are not profiling, just call the function. */
5212 if (!profile_arc_flag)
5213 return NULL_RTX;
5214
5215 /* Otherwise call the wrapper. This should be equivalent for the rest of
5216 compiler, so the code does not diverge, and the wrapper may run the
5217 code necessary for keeping the profiling sane. */
5218
5219 switch (DECL_FUNCTION_CODE (fn))
5220 {
5221 case BUILT_IN_FORK:
5222 id = get_identifier ("__gcov_fork");
5223 break;
5224
5225 case BUILT_IN_EXECL:
5226 id = get_identifier ("__gcov_execl");
5227 break;
5228
5229 case BUILT_IN_EXECV:
5230 id = get_identifier ("__gcov_execv");
5231 break;
5232
5233 case BUILT_IN_EXECLP:
5234 id = get_identifier ("__gcov_execlp");
5235 break;
5236
5237 case BUILT_IN_EXECLE:
5238 id = get_identifier ("__gcov_execle");
5239 break;
5240
5241 case BUILT_IN_EXECVP:
5242 id = get_identifier ("__gcov_execvp");
5243 break;
5244
5245 case BUILT_IN_EXECVE:
5246 id = get_identifier ("__gcov_execve");
5247 break;
5248
5249 default:
5250 gcc_unreachable ();
5251 }
5252
5253 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5254 DECL_EXTERNAL (decl) = 1;
5255 TREE_PUBLIC (decl) = 1;
5256 DECL_ARTIFICIAL (decl) = 1;
5257 TREE_NOTHROW (decl) = 1;
5258 call = build_function_call_expr (decl, arglist);
5259
5260 return expand_call (call, target, ignore);
5261 }
5262
5263 \f
5264 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5265 ARGLIST is the operands list to the function. CODE is the rtx code
5266 that corresponds to the arithmetic or logical operation from the name;
5267 an exception here is that NOT actually means NAND. TARGET is an optional
5268 place for us to store the results; AFTER is true if this is the
5269 fetch_and_xxx form. IGNORE is true if we don't actually care about
5270 the result of the operation at all. */
5271
5272 static rtx
5273 expand_builtin_sync_operation (tree arglist, enum rtx_code code, bool after,
5274 rtx target, bool ignore)
5275 {
5276 enum machine_mode mode;
5277 rtx addr, val, mem;
5278
5279 /* Expand the operands. */
5280 addr = expand_expr (TREE_VALUE (arglist), NULL, Pmode, EXPAND_SUM);
5281 mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist))));
5282
5283 arglist = TREE_CHAIN (arglist);
5284 val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
5285
5286 /* Note that we explicitly do not want any alias information for this
5287 memory, so that we kill all other live memories. Otherwise we don't
5288 satisfy the full barrier semantics of the intrinsic. */
5289 mem = validize_mem (gen_rtx_MEM (mode, addr));
5290 MEM_VOLATILE_P (mem) = 1;
5291
5292 if (ignore)
5293 return expand_sync_operation (mem, val, code);
5294 else
5295 return expand_sync_fetch_operation (mem, val, code, after, target);
5296 }
5297
5298 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5299 intrinsics. ARGLIST is the operands list to the function. IS_BOOL is
5300 true if this is the boolean form. TARGET is a place for us to store the
5301 results; this is NOT optional if IS_BOOL is true. */
5302
5303 static rtx
5304 expand_builtin_compare_and_swap (tree arglist, bool is_bool, rtx target)
5305 {
5306 enum machine_mode mode;
5307 rtx addr, old_val, new_val, mem;
5308
5309 /* Expand the operands. */
5310 addr = expand_expr (TREE_VALUE (arglist), NULL, Pmode, EXPAND_SUM);
5311 mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist))));
5312
5313 arglist = TREE_CHAIN (arglist);
5314 old_val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
5315
5316 arglist = TREE_CHAIN (arglist);
5317 new_val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
5318
5319 /* Note that we explicitly do not want any alias information for this
5320 memory, so that we kill all other live memories. Otherwise we don't
5321 satisfy the full barrier semantics of the intrinsic. */
5322 mem = validize_mem (gen_rtx_MEM (mode, addr));
5323 MEM_VOLATILE_P (mem) = 1;
5324
5325 if (is_bool)
5326 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5327 else
5328 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5329 }
5330
5331 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5332 general form is actually an atomic exchange, and some targets only
5333 support a reduced form with the second argument being a constant 1.
5334 ARGLIST is the operands list to the function; TARGET is an optional
5335 place for us to store the results. */
5336
5337 static rtx
5338 expand_builtin_lock_test_and_set (tree arglist, rtx target)
5339 {
5340 enum machine_mode mode;
5341 rtx addr, val, mem;
5342
5343 /* Expand the operands. */
5344 addr = expand_expr (TREE_VALUE (arglist), NULL, Pmode, EXPAND_NORMAL);
5345 mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist))));
5346
5347 arglist = TREE_CHAIN (arglist);
5348 val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
5349
5350 /* Note that we explicitly do not want any alias information for this
5351 memory, so that we kill all other live memories. Otherwise we don't
5352 satisfy the barrier semantics of the intrinsic. */
5353 mem = validize_mem (gen_rtx_MEM (mode, addr));
5354 MEM_VOLATILE_P (mem) = 1;
5355
5356 return expand_sync_lock_test_and_set (mem, val, target);
5357 }
5358
5359 /* Expand the __sync_synchronize intrinsic. */
5360
5361 static void
5362 expand_builtin_synchronize (void)
5363 {
5364 rtx body;
5365
5366 #ifdef HAVE_memory_barrier
5367 if (HAVE_memory_barrier)
5368 {
5369 emit_insn (gen_memory_barrier ());
5370 return;
5371 }
5372 #endif
5373
5374 /* If no explicit memory barrier instruction is available, create an empty
5375 asm stmt that will prevent compiler movement across the barrier. */
5376 body = gen_rtx_ASM_INPUT (VOIDmode, "");
5377 MEM_VOLATILE_P (body) = 1;
5378 emit_insn (body);
5379 }
5380
5381 /* Expand the __sync_lock_release intrinsic. ARGLIST is the operands list
5382 to the function. */
5383
5384 static void
5385 expand_builtin_lock_release (tree arglist)
5386 {
5387 enum machine_mode mode;
5388 enum insn_code icode;
5389 rtx addr, val, mem, insn;
5390
5391 /* Expand the operands. */
5392 addr = expand_expr (TREE_VALUE (arglist), NULL, Pmode, EXPAND_NORMAL);
5393 mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist))));
5394 val = const0_rtx;
5395
5396 /* Note that we explicitly do not want any alias information for this
5397 memory, so that we kill all other live memories. Otherwise we don't
5398 satisfy the barrier semantics of the intrinsic. */
5399 mem = validize_mem (gen_rtx_MEM (mode, addr));
5400 MEM_VOLATILE_P (mem) = 1;
5401
5402 /* If there is an explicit operation in the md file, use it. */
5403 icode = sync_lock_release[mode];
5404 if (icode != CODE_FOR_nothing)
5405 {
5406 if (!insn_data[icode].operand[1].predicate (val, mode))
5407 val = force_reg (mode, val);
5408
5409 insn = GEN_FCN (icode) (mem, val);
5410 if (insn)
5411 {
5412 emit_insn (insn);
5413 return;
5414 }
5415 }
5416
5417 /* Otherwise we can implement this operation by emitting a barrier
5418 followed by a store of zero. */
5419 expand_builtin_synchronize ();
5420 emit_move_insn (mem, val);
5421 }
5422 \f
5423 /* Expand an expression EXP that calls a built-in function,
5424 with result going to TARGET if that's convenient
5425 (and in mode MODE if that's convenient).
5426 SUBTARGET may be used as the target for computing one of EXP's operands.
5427 IGNORE is nonzero if the value is to be ignored. */
5428
5429 rtx
5430 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5431 int ignore)
5432 {
5433 tree fndecl = get_callee_fndecl (exp);
5434 tree arglist = TREE_OPERAND (exp, 1);
5435 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5436 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5437
5438 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5439 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5440
5441 /* When not optimizing, generate calls to library functions for a certain
5442 set of builtins. */
5443 if (!optimize
5444 && !called_as_built_in (fndecl)
5445 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5446 && fcode != BUILT_IN_ALLOCA)
5447 return expand_call (exp, target, ignore);
5448
5449 /* The built-in function expanders test for target == const0_rtx
5450 to determine whether the function's result will be ignored. */
5451 if (ignore)
5452 target = const0_rtx;
5453
5454 /* If the result of a pure or const built-in function is ignored, and
5455 none of its arguments are volatile, we can avoid expanding the
5456 built-in call and just evaluate the arguments for side-effects. */
5457 if (target == const0_rtx
5458 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5459 {
5460 bool volatilep = false;
5461 tree arg;
5462
5463 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
5464 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
5465 {
5466 volatilep = true;
5467 break;
5468 }
5469
5470 if (! volatilep)
5471 {
5472 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
5473 expand_expr (TREE_VALUE (arg), const0_rtx,
5474 VOIDmode, EXPAND_NORMAL);
5475 return const0_rtx;
5476 }
5477 }
5478
5479 switch (fcode)
5480 {
5481 case BUILT_IN_FABS:
5482 case BUILT_IN_FABSF:
5483 case BUILT_IN_FABSL:
5484 target = expand_builtin_fabs (arglist, target, subtarget);
5485 if (target)
5486 return target;
5487 break;
5488
5489 case BUILT_IN_COPYSIGN:
5490 case BUILT_IN_COPYSIGNF:
5491 case BUILT_IN_COPYSIGNL:
5492 target = expand_builtin_copysign (arglist, target, subtarget);
5493 if (target)
5494 return target;
5495 break;
5496
5497 /* Just do a normal library call if we were unable to fold
5498 the values. */
5499 case BUILT_IN_CABS:
5500 case BUILT_IN_CABSF:
5501 case BUILT_IN_CABSL:
5502 break;
5503
5504 case BUILT_IN_EXP:
5505 case BUILT_IN_EXPF:
5506 case BUILT_IN_EXPL:
5507 case BUILT_IN_EXP10:
5508 case BUILT_IN_EXP10F:
5509 case BUILT_IN_EXP10L:
5510 case BUILT_IN_POW10:
5511 case BUILT_IN_POW10F:
5512 case BUILT_IN_POW10L:
5513 case BUILT_IN_EXP2:
5514 case BUILT_IN_EXP2F:
5515 case BUILT_IN_EXP2L:
5516 case BUILT_IN_EXPM1:
5517 case BUILT_IN_EXPM1F:
5518 case BUILT_IN_EXPM1L:
5519 case BUILT_IN_LOGB:
5520 case BUILT_IN_LOGBF:
5521 case BUILT_IN_LOGBL:
5522 case BUILT_IN_ILOGB:
5523 case BUILT_IN_ILOGBF:
5524 case BUILT_IN_ILOGBL:
5525 case BUILT_IN_LOG:
5526 case BUILT_IN_LOGF:
5527 case BUILT_IN_LOGL:
5528 case BUILT_IN_LOG10:
5529 case BUILT_IN_LOG10F:
5530 case BUILT_IN_LOG10L:
5531 case BUILT_IN_LOG2:
5532 case BUILT_IN_LOG2F:
5533 case BUILT_IN_LOG2L:
5534 case BUILT_IN_LOG1P:
5535 case BUILT_IN_LOG1PF:
5536 case BUILT_IN_LOG1PL:
5537 case BUILT_IN_TAN:
5538 case BUILT_IN_TANF:
5539 case BUILT_IN_TANL:
5540 case BUILT_IN_ASIN:
5541 case BUILT_IN_ASINF:
5542 case BUILT_IN_ASINL:
5543 case BUILT_IN_ACOS:
5544 case BUILT_IN_ACOSF:
5545 case BUILT_IN_ACOSL:
5546 case BUILT_IN_ATAN:
5547 case BUILT_IN_ATANF:
5548 case BUILT_IN_ATANL:
5549 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5550 because of possible accuracy problems. */
5551 if (! flag_unsafe_math_optimizations)
5552 break;
5553 case BUILT_IN_SQRT:
5554 case BUILT_IN_SQRTF:
5555 case BUILT_IN_SQRTL:
5556 case BUILT_IN_FLOOR:
5557 case BUILT_IN_FLOORF:
5558 case BUILT_IN_FLOORL:
5559 case BUILT_IN_CEIL:
5560 case BUILT_IN_CEILF:
5561 case BUILT_IN_CEILL:
5562 case BUILT_IN_TRUNC:
5563 case BUILT_IN_TRUNCF:
5564 case BUILT_IN_TRUNCL:
5565 case BUILT_IN_ROUND:
5566 case BUILT_IN_ROUNDF:
5567 case BUILT_IN_ROUNDL:
5568 case BUILT_IN_NEARBYINT:
5569 case BUILT_IN_NEARBYINTF:
5570 case BUILT_IN_NEARBYINTL:
5571 case BUILT_IN_RINT:
5572 case BUILT_IN_RINTF:
5573 case BUILT_IN_RINTL:
5574 case BUILT_IN_LRINT:
5575 case BUILT_IN_LRINTF:
5576 case BUILT_IN_LRINTL:
5577 case BUILT_IN_LLRINT:
5578 case BUILT_IN_LLRINTF:
5579 case BUILT_IN_LLRINTL:
5580 target = expand_builtin_mathfn (exp, target, subtarget);
5581 if (target)
5582 return target;
5583 break;
5584
5585 case BUILT_IN_LCEIL:
5586 case BUILT_IN_LCEILF:
5587 case BUILT_IN_LCEILL:
5588 case BUILT_IN_LLCEIL:
5589 case BUILT_IN_LLCEILF:
5590 case BUILT_IN_LLCEILL:
5591 case BUILT_IN_LFLOOR:
5592 case BUILT_IN_LFLOORF:
5593 case BUILT_IN_LFLOORL:
5594 case BUILT_IN_LLFLOOR:
5595 case BUILT_IN_LLFLOORF:
5596 case BUILT_IN_LLFLOORL:
5597 target = expand_builtin_int_roundingfn (exp, target, subtarget);
5598 if (target)
5599 return target;
5600 break;
5601
5602 case BUILT_IN_POW:
5603 case BUILT_IN_POWF:
5604 case BUILT_IN_POWL:
5605 target = expand_builtin_pow (exp, target, subtarget);
5606 if (target)
5607 return target;
5608 break;
5609
5610 case BUILT_IN_POWI:
5611 case BUILT_IN_POWIF:
5612 case BUILT_IN_POWIL:
5613 target = expand_builtin_powi (exp, target, subtarget);
5614 if (target)
5615 return target;
5616 break;
5617
5618 case BUILT_IN_ATAN2:
5619 case BUILT_IN_ATAN2F:
5620 case BUILT_IN_ATAN2L:
5621 case BUILT_IN_LDEXP:
5622 case BUILT_IN_LDEXPF:
5623 case BUILT_IN_LDEXPL:
5624 case BUILT_IN_FMOD:
5625 case BUILT_IN_FMODF:
5626 case BUILT_IN_FMODL:
5627 case BUILT_IN_DREM:
5628 case BUILT_IN_DREMF:
5629 case BUILT_IN_DREML:
5630 if (! flag_unsafe_math_optimizations)
5631 break;
5632 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5633 if (target)
5634 return target;
5635 break;
5636
5637 case BUILT_IN_SIN:
5638 case BUILT_IN_SINF:
5639 case BUILT_IN_SINL:
5640 case BUILT_IN_COS:
5641 case BUILT_IN_COSF:
5642 case BUILT_IN_COSL:
5643 if (! flag_unsafe_math_optimizations)
5644 break;
5645 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5646 if (target)
5647 return target;
5648 break;
5649
5650 case BUILT_IN_APPLY_ARGS:
5651 return expand_builtin_apply_args ();
5652
5653 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5654 FUNCTION with a copy of the parameters described by
5655 ARGUMENTS, and ARGSIZE. It returns a block of memory
5656 allocated on the stack into which is stored all the registers
5657 that might possibly be used for returning the result of a
5658 function. ARGUMENTS is the value returned by
5659 __builtin_apply_args. ARGSIZE is the number of bytes of
5660 arguments that must be copied. ??? How should this value be
5661 computed? We'll also need a safe worst case value for varargs
5662 functions. */
5663 case BUILT_IN_APPLY:
5664 if (!validate_arglist (arglist, POINTER_TYPE,
5665 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5666 && !validate_arglist (arglist, REFERENCE_TYPE,
5667 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5668 return const0_rtx;
5669 else
5670 {
5671 int i;
5672 tree t;
5673 rtx ops[3];
5674
5675 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5676 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5677
5678 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5679 }
5680
5681 /* __builtin_return (RESULT) causes the function to return the
5682 value described by RESULT. RESULT is address of the block of
5683 memory returned by __builtin_apply. */
5684 case BUILT_IN_RETURN:
5685 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5686 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5687 NULL_RTX, VOIDmode, 0));
5688 return const0_rtx;
5689
5690 case BUILT_IN_SAVEREGS:
5691 return expand_builtin_saveregs ();
5692
5693 case BUILT_IN_ARGS_INFO:
5694 return expand_builtin_args_info (arglist);
5695
5696 /* Return the address of the first anonymous stack arg. */
5697 case BUILT_IN_NEXT_ARG:
5698 if (fold_builtin_next_arg (arglist))
5699 return const0_rtx;
5700 return expand_builtin_next_arg ();
5701
5702 case BUILT_IN_CLASSIFY_TYPE:
5703 return expand_builtin_classify_type (arglist);
5704
5705 case BUILT_IN_CONSTANT_P:
5706 return const0_rtx;
5707
5708 case BUILT_IN_FRAME_ADDRESS:
5709 case BUILT_IN_RETURN_ADDRESS:
5710 return expand_builtin_frame_address (fndecl, arglist);
5711
5712 /* Returns the address of the area where the structure is returned.
5713 0 otherwise. */
5714 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5715 if (arglist != 0
5716 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5717 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5718 return const0_rtx;
5719 else
5720 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5721
5722 case BUILT_IN_ALLOCA:
5723 target = expand_builtin_alloca (arglist, target);
5724 if (target)
5725 return target;
5726 break;
5727
5728 case BUILT_IN_STACK_SAVE:
5729 return expand_stack_save ();
5730
5731 case BUILT_IN_STACK_RESTORE:
5732 expand_stack_restore (TREE_VALUE (arglist));
5733 return const0_rtx;
5734
5735 case BUILT_IN_FFS:
5736 case BUILT_IN_FFSL:
5737 case BUILT_IN_FFSLL:
5738 case BUILT_IN_FFSIMAX:
5739 target = expand_builtin_unop (target_mode, arglist, target,
5740 subtarget, ffs_optab);
5741 if (target)
5742 return target;
5743 break;
5744
5745 case BUILT_IN_CLZ:
5746 case BUILT_IN_CLZL:
5747 case BUILT_IN_CLZLL:
5748 case BUILT_IN_CLZIMAX:
5749 target = expand_builtin_unop (target_mode, arglist, target,
5750 subtarget, clz_optab);
5751 if (target)
5752 return target;
5753 break;
5754
5755 case BUILT_IN_CTZ:
5756 case BUILT_IN_CTZL:
5757 case BUILT_IN_CTZLL:
5758 case BUILT_IN_CTZIMAX:
5759 target = expand_builtin_unop (target_mode, arglist, target,
5760 subtarget, ctz_optab);
5761 if (target)
5762 return target;
5763 break;
5764
5765 case BUILT_IN_POPCOUNT:
5766 case BUILT_IN_POPCOUNTL:
5767 case BUILT_IN_POPCOUNTLL:
5768 case BUILT_IN_POPCOUNTIMAX:
5769 target = expand_builtin_unop (target_mode, arglist, target,
5770 subtarget, popcount_optab);
5771 if (target)
5772 return target;
5773 break;
5774
5775 case BUILT_IN_PARITY:
5776 case BUILT_IN_PARITYL:
5777 case BUILT_IN_PARITYLL:
5778 case BUILT_IN_PARITYIMAX:
5779 target = expand_builtin_unop (target_mode, arglist, target,
5780 subtarget, parity_optab);
5781 if (target)
5782 return target;
5783 break;
5784
5785 case BUILT_IN_STRLEN:
5786 target = expand_builtin_strlen (arglist, target, target_mode);
5787 if (target)
5788 return target;
5789 break;
5790
5791 case BUILT_IN_STRCPY:
5792 target = expand_builtin_strcpy (exp, target, mode);
5793 if (target)
5794 return target;
5795 break;
5796
5797 case BUILT_IN_STRNCPY:
5798 target = expand_builtin_strncpy (exp, target, mode);
5799 if (target)
5800 return target;
5801 break;
5802
5803 case BUILT_IN_STPCPY:
5804 target = expand_builtin_stpcpy (exp, target, mode);
5805 if (target)
5806 return target;
5807 break;
5808
5809 case BUILT_IN_STRCAT:
5810 target = expand_builtin_strcat (arglist, TREE_TYPE (exp), target, mode);
5811 if (target)
5812 return target;
5813 break;
5814
5815 case BUILT_IN_STRNCAT:
5816 target = expand_builtin_strncat (arglist, target, mode);
5817 if (target)
5818 return target;
5819 break;
5820
5821 case BUILT_IN_STRSPN:
5822 target = expand_builtin_strspn (arglist, target, mode);
5823 if (target)
5824 return target;
5825 break;
5826
5827 case BUILT_IN_STRCSPN:
5828 target = expand_builtin_strcspn (arglist, target, mode);
5829 if (target)
5830 return target;
5831 break;
5832
5833 case BUILT_IN_STRSTR:
5834 target = expand_builtin_strstr (arglist, TREE_TYPE (exp), target, mode);
5835 if (target)
5836 return target;
5837 break;
5838
5839 case BUILT_IN_STRPBRK:
5840 target = expand_builtin_strpbrk (arglist, TREE_TYPE (exp), target, mode);
5841 if (target)
5842 return target;
5843 break;
5844
5845 case BUILT_IN_INDEX:
5846 case BUILT_IN_STRCHR:
5847 target = expand_builtin_strchr (arglist, TREE_TYPE (exp), target, mode);
5848 if (target)
5849 return target;
5850 break;
5851
5852 case BUILT_IN_RINDEX:
5853 case BUILT_IN_STRRCHR:
5854 target = expand_builtin_strrchr (arglist, TREE_TYPE (exp), target, mode);
5855 if (target)
5856 return target;
5857 break;
5858
5859 case BUILT_IN_MEMCPY:
5860 target = expand_builtin_memcpy (exp, target, mode);
5861 if (target)
5862 return target;
5863 break;
5864
5865 case BUILT_IN_MEMPCPY:
5866 target = expand_builtin_mempcpy (arglist, TREE_TYPE (exp), target, mode, /*endp=*/ 1);
5867 if (target)
5868 return target;
5869 break;
5870
5871 case BUILT_IN_MEMMOVE:
5872 target = expand_builtin_memmove (arglist, TREE_TYPE (exp), target,
5873 mode, exp);
5874 if (target)
5875 return target;
5876 break;
5877
5878 case BUILT_IN_BCOPY:
5879 target = expand_builtin_bcopy (exp);
5880 if (target)
5881 return target;
5882 break;
5883
5884 case BUILT_IN_MEMSET:
5885 target = expand_builtin_memset (arglist, target, mode, exp);
5886 if (target)
5887 return target;
5888 break;
5889
5890 case BUILT_IN_BZERO:
5891 target = expand_builtin_bzero (exp);
5892 if (target)
5893 return target;
5894 break;
5895
5896 case BUILT_IN_STRCMP:
5897 target = expand_builtin_strcmp (exp, target, mode);
5898 if (target)
5899 return target;
5900 break;
5901
5902 case BUILT_IN_STRNCMP:
5903 target = expand_builtin_strncmp (exp, target, mode);
5904 if (target)
5905 return target;
5906 break;
5907
5908 case BUILT_IN_BCMP:
5909 case BUILT_IN_MEMCMP:
5910 target = expand_builtin_memcmp (exp, arglist, target, mode);
5911 if (target)
5912 return target;
5913 break;
5914
5915 case BUILT_IN_SETJMP:
5916 target = expand_builtin_setjmp (arglist, target);
5917 if (target)
5918 return target;
5919 break;
5920
5921 /* __builtin_longjmp is passed a pointer to an array of five words.
5922 It's similar to the C library longjmp function but works with
5923 __builtin_setjmp above. */
5924 case BUILT_IN_LONGJMP:
5925 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5926 break;
5927 else
5928 {
5929 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
5930 VOIDmode, 0);
5931 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
5932 NULL_RTX, VOIDmode, 0);
5933
5934 if (value != const1_rtx)
5935 {
5936 error ("%<__builtin_longjmp%> second argument must be 1");
5937 return const0_rtx;
5938 }
5939
5940 expand_builtin_longjmp (buf_addr, value);
5941 return const0_rtx;
5942 }
5943
5944 case BUILT_IN_NONLOCAL_GOTO:
5945 target = expand_builtin_nonlocal_goto (arglist);
5946 if (target)
5947 return target;
5948 break;
5949
5950 /* This updates the setjmp buffer that is its argument with the value
5951 of the current stack pointer. */
5952 case BUILT_IN_UPDATE_SETJMP_BUF:
5953 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5954 {
5955 rtx buf_addr
5956 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5957
5958 expand_builtin_update_setjmp_buf (buf_addr);
5959 return const0_rtx;
5960 }
5961 break;
5962
5963 case BUILT_IN_TRAP:
5964 expand_builtin_trap ();
5965 return const0_rtx;
5966
5967 case BUILT_IN_PRINTF:
5968 target = expand_builtin_printf (exp, target, mode, false);
5969 if (target)
5970 return target;
5971 break;
5972
5973 case BUILT_IN_PRINTF_UNLOCKED:
5974 target = expand_builtin_printf (exp, target, mode, true);
5975 if (target)
5976 return target;
5977 break;
5978
5979 case BUILT_IN_FPUTS:
5980 target = expand_builtin_fputs (arglist, target, false);
5981 if (target)
5982 return target;
5983 break;
5984 case BUILT_IN_FPUTS_UNLOCKED:
5985 target = expand_builtin_fputs (arglist, target, true);
5986 if (target)
5987 return target;
5988 break;
5989
5990 case BUILT_IN_FPRINTF:
5991 target = expand_builtin_fprintf (exp, target, mode, false);
5992 if (target)
5993 return target;
5994 break;
5995
5996 case BUILT_IN_FPRINTF_UNLOCKED:
5997 target = expand_builtin_fprintf (exp, target, mode, true);
5998 if (target)
5999 return target;
6000 break;
6001
6002 case BUILT_IN_SPRINTF:
6003 target = expand_builtin_sprintf (arglist, target, mode);
6004 if (target)
6005 return target;
6006 break;
6007
6008 case BUILT_IN_SIGNBIT:
6009 case BUILT_IN_SIGNBITF:
6010 case BUILT_IN_SIGNBITL:
6011 target = expand_builtin_signbit (exp, target);
6012 if (target)
6013 return target;
6014 break;
6015
6016 /* Various hooks for the DWARF 2 __throw routine. */
6017 case BUILT_IN_UNWIND_INIT:
6018 expand_builtin_unwind_init ();
6019 return const0_rtx;
6020 case BUILT_IN_DWARF_CFA:
6021 return virtual_cfa_rtx;
6022 #ifdef DWARF2_UNWIND_INFO
6023 case BUILT_IN_DWARF_SP_COLUMN:
6024 return expand_builtin_dwarf_sp_column ();
6025 case BUILT_IN_INIT_DWARF_REG_SIZES:
6026 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
6027 return const0_rtx;
6028 #endif
6029 case BUILT_IN_FROB_RETURN_ADDR:
6030 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
6031 case BUILT_IN_EXTRACT_RETURN_ADDR:
6032 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
6033 case BUILT_IN_EH_RETURN:
6034 expand_builtin_eh_return (TREE_VALUE (arglist),
6035 TREE_VALUE (TREE_CHAIN (arglist)));
6036 return const0_rtx;
6037 #ifdef EH_RETURN_DATA_REGNO
6038 case BUILT_IN_EH_RETURN_DATA_REGNO:
6039 return expand_builtin_eh_return_data_regno (arglist);
6040 #endif
6041 case BUILT_IN_EXTEND_POINTER:
6042 return expand_builtin_extend_pointer (TREE_VALUE (arglist));
6043
6044 case BUILT_IN_VA_START:
6045 case BUILT_IN_STDARG_START:
6046 return expand_builtin_va_start (arglist);
6047 case BUILT_IN_VA_END:
6048 return expand_builtin_va_end (arglist);
6049 case BUILT_IN_VA_COPY:
6050 return expand_builtin_va_copy (arglist);
6051 case BUILT_IN_EXPECT:
6052 return expand_builtin_expect (arglist, target);
6053 case BUILT_IN_PREFETCH:
6054 expand_builtin_prefetch (arglist);
6055 return const0_rtx;
6056
6057 case BUILT_IN_PROFILE_FUNC_ENTER:
6058 return expand_builtin_profile_func (false);
6059 case BUILT_IN_PROFILE_FUNC_EXIT:
6060 return expand_builtin_profile_func (true);
6061
6062 case BUILT_IN_INIT_TRAMPOLINE:
6063 return expand_builtin_init_trampoline (arglist);
6064 case BUILT_IN_ADJUST_TRAMPOLINE:
6065 return expand_builtin_adjust_trampoline (arglist);
6066
6067 case BUILT_IN_FORK:
6068 case BUILT_IN_EXECL:
6069 case BUILT_IN_EXECV:
6070 case BUILT_IN_EXECLP:
6071 case BUILT_IN_EXECLE:
6072 case BUILT_IN_EXECVP:
6073 case BUILT_IN_EXECVE:
6074 target = expand_builtin_fork_or_exec (fndecl, arglist, target, ignore);
6075 if (target)
6076 return target;
6077 break;
6078
6079 case BUILT_IN_FETCH_AND_ADD_1:
6080 case BUILT_IN_FETCH_AND_ADD_2:
6081 case BUILT_IN_FETCH_AND_ADD_4:
6082 case BUILT_IN_FETCH_AND_ADD_8:
6083 target = expand_builtin_sync_operation (arglist, PLUS,
6084 false, target, ignore);
6085 if (target)
6086 return target;
6087 break;
6088
6089 case BUILT_IN_FETCH_AND_SUB_1:
6090 case BUILT_IN_FETCH_AND_SUB_2:
6091 case BUILT_IN_FETCH_AND_SUB_4:
6092 case BUILT_IN_FETCH_AND_SUB_8:
6093 target = expand_builtin_sync_operation (arglist, MINUS,
6094 false, target, ignore);
6095 if (target)
6096 return target;
6097 break;
6098
6099 case BUILT_IN_FETCH_AND_OR_1:
6100 case BUILT_IN_FETCH_AND_OR_2:
6101 case BUILT_IN_FETCH_AND_OR_4:
6102 case BUILT_IN_FETCH_AND_OR_8:
6103 target = expand_builtin_sync_operation (arglist, IOR,
6104 false, target, ignore);
6105 if (target)
6106 return target;
6107 break;
6108
6109 case BUILT_IN_FETCH_AND_AND_1:
6110 case BUILT_IN_FETCH_AND_AND_2:
6111 case BUILT_IN_FETCH_AND_AND_4:
6112 case BUILT_IN_FETCH_AND_AND_8:
6113 target = expand_builtin_sync_operation (arglist, AND,
6114 false, target, ignore);
6115 if (target)
6116 return target;
6117 break;
6118
6119 case BUILT_IN_FETCH_AND_XOR_1:
6120 case BUILT_IN_FETCH_AND_XOR_2:
6121 case BUILT_IN_FETCH_AND_XOR_4:
6122 case BUILT_IN_FETCH_AND_XOR_8:
6123 target = expand_builtin_sync_operation (arglist, XOR,
6124 false, target, ignore);
6125 if (target)
6126 return target;
6127 break;
6128
6129 case BUILT_IN_FETCH_AND_NAND_1:
6130 case BUILT_IN_FETCH_AND_NAND_2:
6131 case BUILT_IN_FETCH_AND_NAND_4:
6132 case BUILT_IN_FETCH_AND_NAND_8:
6133 target = expand_builtin_sync_operation (arglist, NOT,
6134 false, target, ignore);
6135 if (target)
6136 return target;
6137 break;
6138
6139 case BUILT_IN_ADD_AND_FETCH_1:
6140 case BUILT_IN_ADD_AND_FETCH_2:
6141 case BUILT_IN_ADD_AND_FETCH_4:
6142 case BUILT_IN_ADD_AND_FETCH_8:
6143 target = expand_builtin_sync_operation (arglist, PLUS,
6144 true, target, ignore);
6145 if (target)
6146 return target;
6147 break;
6148
6149 case BUILT_IN_SUB_AND_FETCH_1:
6150 case BUILT_IN_SUB_AND_FETCH_2:
6151 case BUILT_IN_SUB_AND_FETCH_4:
6152 case BUILT_IN_SUB_AND_FETCH_8:
6153 target = expand_builtin_sync_operation (arglist, MINUS,
6154 true, target, ignore);
6155 if (target)
6156 return target;
6157 break;
6158
6159 case BUILT_IN_OR_AND_FETCH_1:
6160 case BUILT_IN_OR_AND_FETCH_2:
6161 case BUILT_IN_OR_AND_FETCH_4:
6162 case BUILT_IN_OR_AND_FETCH_8:
6163 target = expand_builtin_sync_operation (arglist, IOR,
6164 true, target, ignore);
6165 if (target)
6166 return target;
6167 break;
6168
6169 case BUILT_IN_AND_AND_FETCH_1:
6170 case BUILT_IN_AND_AND_FETCH_2:
6171 case BUILT_IN_AND_AND_FETCH_4:
6172 case BUILT_IN_AND_AND_FETCH_8:
6173 target = expand_builtin_sync_operation (arglist, AND,
6174 true, target, ignore);
6175 if (target)
6176 return target;
6177 break;
6178
6179 case BUILT_IN_XOR_AND_FETCH_1:
6180 case BUILT_IN_XOR_AND_FETCH_2:
6181 case BUILT_IN_XOR_AND_FETCH_4:
6182 case BUILT_IN_XOR_AND_FETCH_8:
6183 target = expand_builtin_sync_operation (arglist, XOR,
6184 true, target, ignore);
6185 if (target)
6186 return target;
6187 break;
6188
6189 case BUILT_IN_NAND_AND_FETCH_1:
6190 case BUILT_IN_NAND_AND_FETCH_2:
6191 case BUILT_IN_NAND_AND_FETCH_4:
6192 case BUILT_IN_NAND_AND_FETCH_8:
6193 target = expand_builtin_sync_operation (arglist, NOT,
6194 true, target, ignore);
6195 if (target)
6196 return target;
6197 break;
6198
6199 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6200 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6201 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6202 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6203 if (mode == VOIDmode)
6204 mode = TYPE_MODE (boolean_type_node);
6205 if (!target || !register_operand (target, mode))
6206 target = gen_reg_rtx (mode);
6207 target = expand_builtin_compare_and_swap (arglist, true, target);
6208 if (target)
6209 return target;
6210 break;
6211
6212 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6213 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6214 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6215 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6216 target = expand_builtin_compare_and_swap (arglist, false, target);
6217 if (target)
6218 return target;
6219 break;
6220
6221 case BUILT_IN_LOCK_TEST_AND_SET_1:
6222 case BUILT_IN_LOCK_TEST_AND_SET_2:
6223 case BUILT_IN_LOCK_TEST_AND_SET_4:
6224 case BUILT_IN_LOCK_TEST_AND_SET_8:
6225 target = expand_builtin_lock_test_and_set (arglist, target);
6226 if (target)
6227 return target;
6228 break;
6229
6230 case BUILT_IN_LOCK_RELEASE_1:
6231 case BUILT_IN_LOCK_RELEASE_2:
6232 case BUILT_IN_LOCK_RELEASE_4:
6233 case BUILT_IN_LOCK_RELEASE_8:
6234 expand_builtin_lock_release (arglist);
6235 return const0_rtx;
6236
6237 case BUILT_IN_SYNCHRONIZE:
6238 expand_builtin_synchronize ();
6239 return const0_rtx;
6240
6241 default: /* just do library call, if unknown builtin */
6242 break;
6243 }
6244
6245 /* The switch statement above can drop through to cause the function
6246 to be called normally. */
6247 return expand_call (exp, target, ignore);
6248 }
6249
6250 /* Determine whether a tree node represents a call to a built-in
6251 function. If the tree T is a call to a built-in function with
6252 the right number of arguments of the appropriate types, return
6253 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6254 Otherwise the return value is END_BUILTINS. */
6255
6256 enum built_in_function
6257 builtin_mathfn_code (tree t)
6258 {
6259 tree fndecl, arglist, parmlist;
6260 tree argtype, parmtype;
6261
6262 if (TREE_CODE (t) != CALL_EXPR
6263 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
6264 return END_BUILTINS;
6265
6266 fndecl = get_callee_fndecl (t);
6267 if (fndecl == NULL_TREE
6268 || TREE_CODE (fndecl) != FUNCTION_DECL
6269 || ! DECL_BUILT_IN (fndecl)
6270 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6271 return END_BUILTINS;
6272
6273 arglist = TREE_OPERAND (t, 1);
6274 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6275 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6276 {
6277 /* If a function doesn't take a variable number of arguments,
6278 the last element in the list will have type `void'. */
6279 parmtype = TREE_VALUE (parmlist);
6280 if (VOID_TYPE_P (parmtype))
6281 {
6282 if (arglist)
6283 return END_BUILTINS;
6284 return DECL_FUNCTION_CODE (fndecl);
6285 }
6286
6287 if (! arglist)
6288 return END_BUILTINS;
6289
6290 argtype = TREE_TYPE (TREE_VALUE (arglist));
6291
6292 if (SCALAR_FLOAT_TYPE_P (parmtype))
6293 {
6294 if (! SCALAR_FLOAT_TYPE_P (argtype))
6295 return END_BUILTINS;
6296 }
6297 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6298 {
6299 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6300 return END_BUILTINS;
6301 }
6302 else if (POINTER_TYPE_P (parmtype))
6303 {
6304 if (! POINTER_TYPE_P (argtype))
6305 return END_BUILTINS;
6306 }
6307 else if (INTEGRAL_TYPE_P (parmtype))
6308 {
6309 if (! INTEGRAL_TYPE_P (argtype))
6310 return END_BUILTINS;
6311 }
6312 else
6313 return END_BUILTINS;
6314
6315 arglist = TREE_CHAIN (arglist);
6316 }
6317
6318 /* Variable-length argument list. */
6319 return DECL_FUNCTION_CODE (fndecl);
6320 }
6321
6322 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
6323 constant. ARGLIST is the argument list of the call. */
6324
6325 static tree
6326 fold_builtin_constant_p (tree arglist)
6327 {
6328 if (arglist == 0)
6329 return 0;
6330
6331 arglist = TREE_VALUE (arglist);
6332
6333 /* We return 1 for a numeric type that's known to be a constant
6334 value at compile-time or for an aggregate type that's a
6335 literal constant. */
6336 STRIP_NOPS (arglist);
6337
6338 /* If we know this is a constant, emit the constant of one. */
6339 if (CONSTANT_CLASS_P (arglist)
6340 || (TREE_CODE (arglist) == CONSTRUCTOR
6341 && TREE_CONSTANT (arglist))
6342 || (TREE_CODE (arglist) == ADDR_EXPR
6343 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
6344 return integer_one_node;
6345
6346 /* If this expression has side effects, show we don't know it to be a
6347 constant. Likewise if it's a pointer or aggregate type since in
6348 those case we only want literals, since those are only optimized
6349 when generating RTL, not later.
6350 And finally, if we are compiling an initializer, not code, we
6351 need to return a definite result now; there's not going to be any
6352 more optimization done. */
6353 if (TREE_SIDE_EFFECTS (arglist)
6354 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
6355 || POINTER_TYPE_P (TREE_TYPE (arglist))
6356 || cfun == 0)
6357 return integer_zero_node;
6358
6359 return 0;
6360 }
6361
6362 /* Fold a call to __builtin_expect, if we expect that a comparison against
6363 the argument will fold to a constant. In practice, this means a true
6364 constant or the address of a non-weak symbol. ARGLIST is the argument
6365 list of the call. */
6366
6367 static tree
6368 fold_builtin_expect (tree arglist)
6369 {
6370 tree arg, inner;
6371
6372 if (arglist == 0)
6373 return 0;
6374
6375 arg = TREE_VALUE (arglist);
6376
6377 /* If the argument isn't invariant, then there's nothing we can do. */
6378 if (!TREE_INVARIANT (arg))
6379 return 0;
6380
6381 /* If we're looking at an address of a weak decl, then do not fold. */
6382 inner = arg;
6383 STRIP_NOPS (inner);
6384 if (TREE_CODE (inner) == ADDR_EXPR)
6385 {
6386 do
6387 {
6388 inner = TREE_OPERAND (inner, 0);
6389 }
6390 while (TREE_CODE (inner) == COMPONENT_REF
6391 || TREE_CODE (inner) == ARRAY_REF);
6392 if (DECL_P (inner) && DECL_WEAK (inner))
6393 return 0;
6394 }
6395
6396 /* Otherwise, ARG already has the proper type for the return value. */
6397 return arg;
6398 }
6399
6400 /* Fold a call to __builtin_classify_type. */
6401
6402 static tree
6403 fold_builtin_classify_type (tree arglist)
6404 {
6405 if (arglist == 0)
6406 return build_int_cst (NULL_TREE, no_type_class);
6407
6408 return build_int_cst (NULL_TREE,
6409 type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
6410 }
6411
6412 /* Fold a call to __builtin_strlen. */
6413
6414 static tree
6415 fold_builtin_strlen (tree arglist)
6416 {
6417 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
6418 return NULL_TREE;
6419 else
6420 {
6421 tree len = c_strlen (TREE_VALUE (arglist), 0);
6422
6423 if (len)
6424 {
6425 /* Convert from the internal "sizetype" type to "size_t". */
6426 if (size_type_node)
6427 len = fold_convert (size_type_node, len);
6428 return len;
6429 }
6430
6431 return NULL_TREE;
6432 }
6433 }
6434
6435 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6436
6437 static tree
6438 fold_builtin_inf (tree type, int warn)
6439 {
6440 REAL_VALUE_TYPE real;
6441
6442 /* __builtin_inff is intended to be usable to define INFINITY on all
6443 targets. If an infinity is not available, INFINITY expands "to a
6444 positive constant of type float that overflows at translation
6445 time", footnote "In this case, using INFINITY will violate the
6446 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6447 Thus we pedwarn to ensure this constraint violation is
6448 diagnosed. */
6449 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6450 pedwarn ("target format does not support infinity");
6451
6452 real_inf (&real);
6453 return build_real (type, real);
6454 }
6455
6456 /* Fold a call to __builtin_nan or __builtin_nans. */
6457
6458 static tree
6459 fold_builtin_nan (tree arglist, tree type, int quiet)
6460 {
6461 REAL_VALUE_TYPE real;
6462 const char *str;
6463
6464 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
6465 return 0;
6466 str = c_getstr (TREE_VALUE (arglist));
6467 if (!str)
6468 return 0;
6469
6470 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6471 return 0;
6472
6473 return build_real (type, real);
6474 }
6475
6476 /* Return true if the floating point expression T has an integer value.
6477 We also allow +Inf, -Inf and NaN to be considered integer values. */
6478
6479 static bool
6480 integer_valued_real_p (tree t)
6481 {
6482 switch (TREE_CODE (t))
6483 {
6484 case FLOAT_EXPR:
6485 return true;
6486
6487 case ABS_EXPR:
6488 case SAVE_EXPR:
6489 case NON_LVALUE_EXPR:
6490 return integer_valued_real_p (TREE_OPERAND (t, 0));
6491
6492 case COMPOUND_EXPR:
6493 case MODIFY_EXPR:
6494 case BIND_EXPR:
6495 return integer_valued_real_p (TREE_OPERAND (t, 1));
6496
6497 case PLUS_EXPR:
6498 case MINUS_EXPR:
6499 case MULT_EXPR:
6500 case MIN_EXPR:
6501 case MAX_EXPR:
6502 return integer_valued_real_p (TREE_OPERAND (t, 0))
6503 && integer_valued_real_p (TREE_OPERAND (t, 1));
6504
6505 case COND_EXPR:
6506 return integer_valued_real_p (TREE_OPERAND (t, 1))
6507 && integer_valued_real_p (TREE_OPERAND (t, 2));
6508
6509 case REAL_CST:
6510 if (! TREE_CONSTANT_OVERFLOW (t))
6511 {
6512 REAL_VALUE_TYPE c, cint;
6513
6514 c = TREE_REAL_CST (t);
6515 real_trunc (&cint, TYPE_MODE (TREE_TYPE (t)), &c);
6516 return real_identical (&c, &cint);
6517 }
6518
6519 case NOP_EXPR:
6520 {
6521 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6522 if (TREE_CODE (type) == INTEGER_TYPE)
6523 return true;
6524 if (TREE_CODE (type) == REAL_TYPE)
6525 return integer_valued_real_p (TREE_OPERAND (t, 0));
6526 break;
6527 }
6528
6529 case CALL_EXPR:
6530 switch (builtin_mathfn_code (t))
6531 {
6532 case BUILT_IN_CEIL:
6533 case BUILT_IN_CEILF:
6534 case BUILT_IN_CEILL:
6535 case BUILT_IN_FLOOR:
6536 case BUILT_IN_FLOORF:
6537 case BUILT_IN_FLOORL:
6538 case BUILT_IN_NEARBYINT:
6539 case BUILT_IN_NEARBYINTF:
6540 case BUILT_IN_NEARBYINTL:
6541 case BUILT_IN_RINT:
6542 case BUILT_IN_RINTF:
6543 case BUILT_IN_RINTL:
6544 case BUILT_IN_ROUND:
6545 case BUILT_IN_ROUNDF:
6546 case BUILT_IN_ROUNDL:
6547 case BUILT_IN_TRUNC:
6548 case BUILT_IN_TRUNCF:
6549 case BUILT_IN_TRUNCL:
6550 return true;
6551
6552 default:
6553 break;
6554 }
6555 break;
6556
6557 default:
6558 break;
6559 }
6560 return false;
6561 }
6562
6563 /* EXP is assumed to be builtin call where truncation can be propagated
6564 across (for instance floor((double)f) == (double)floorf (f).
6565 Do the transformation. */
6566
6567 static tree
6568 fold_trunc_transparent_mathfn (tree fndecl, tree arglist)
6569 {
6570 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6571 tree arg;
6572
6573 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6574 return 0;
6575
6576 arg = TREE_VALUE (arglist);
6577 /* Integer rounding functions are idempotent. */
6578 if (fcode == builtin_mathfn_code (arg))
6579 return arg;
6580
6581 /* If argument is already integer valued, and we don't need to worry
6582 about setting errno, there's no need to perform rounding. */
6583 if (! flag_errno_math && integer_valued_real_p (arg))
6584 return arg;
6585
6586 if (optimize)
6587 {
6588 tree arg0 = strip_float_extensions (arg);
6589 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6590 tree newtype = TREE_TYPE (arg0);
6591 tree decl;
6592
6593 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6594 && (decl = mathfn_built_in (newtype, fcode)))
6595 {
6596 arglist =
6597 build_tree_list (NULL_TREE, fold_convert (newtype, arg0));
6598 return fold_convert (ftype,
6599 build_function_call_expr (decl, arglist));
6600 }
6601 }
6602 return 0;
6603 }
6604
6605 /* EXP is assumed to be builtin call which can narrow the FP type of
6606 the argument, for instance lround((double)f) -> lroundf (f). */
6607
6608 static tree
6609 fold_fixed_mathfn (tree fndecl, tree arglist)
6610 {
6611 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6612 tree arg;
6613
6614 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6615 return 0;
6616
6617 arg = TREE_VALUE (arglist);
6618
6619 /* If argument is already integer valued, and we don't need to worry
6620 about setting errno, there's no need to perform rounding. */
6621 if (! flag_errno_math && integer_valued_real_p (arg))
6622 return fold (build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg));
6623
6624 if (optimize)
6625 {
6626 tree ftype = TREE_TYPE (arg);
6627 tree arg0 = strip_float_extensions (arg);
6628 tree newtype = TREE_TYPE (arg0);
6629 tree decl;
6630
6631 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6632 && (decl = mathfn_built_in (newtype, fcode)))
6633 {
6634 arglist =
6635 build_tree_list (NULL_TREE, fold_convert (newtype, arg0));
6636 return build_function_call_expr (decl, arglist);
6637 }
6638 }
6639 return 0;
6640 }
6641
6642 /* Fold function call to builtin cabs, cabsf or cabsl. ARGLIST
6643 is the argument list and TYPE is the return type. Return
6644 NULL_TREE if no if no simplification can be made. */
6645
6646 static tree
6647 fold_builtin_cabs (tree arglist, tree type)
6648 {
6649 tree arg;
6650
6651 if (!arglist || TREE_CHAIN (arglist))
6652 return NULL_TREE;
6653
6654 arg = TREE_VALUE (arglist);
6655 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
6656 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6657 return NULL_TREE;
6658
6659 /* Evaluate cabs of a constant at compile-time. */
6660 if (flag_unsafe_math_optimizations
6661 && TREE_CODE (arg) == COMPLEX_CST
6662 && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
6663 && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
6664 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
6665 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
6666 {
6667 REAL_VALUE_TYPE r, i;
6668
6669 r = TREE_REAL_CST (TREE_REALPART (arg));
6670 i = TREE_REAL_CST (TREE_IMAGPART (arg));
6671
6672 real_arithmetic (&r, MULT_EXPR, &r, &r);
6673 real_arithmetic (&i, MULT_EXPR, &i, &i);
6674 real_arithmetic (&r, PLUS_EXPR, &r, &i);
6675 if (real_sqrt (&r, TYPE_MODE (type), &r)
6676 || ! flag_trapping_math)
6677 return build_real (type, r);
6678 }
6679
6680 /* If either part is zero, cabs is fabs of the other. */
6681 if (TREE_CODE (arg) == COMPLEX_EXPR
6682 && real_zerop (TREE_OPERAND (arg, 0)))
6683 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
6684 if (TREE_CODE (arg) == COMPLEX_EXPR
6685 && real_zerop (TREE_OPERAND (arg, 1)))
6686 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
6687
6688 /* Don't do this when optimizing for size. */
6689 if (flag_unsafe_math_optimizations
6690 && optimize && !optimize_size)
6691 {
6692 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6693
6694 if (sqrtfn != NULL_TREE)
6695 {
6696 tree rpart, ipart, result, arglist;
6697
6698 arg = builtin_save_expr (arg);
6699
6700 rpart = fold (build1 (REALPART_EXPR, type, arg));
6701 ipart = fold (build1 (IMAGPART_EXPR, type, arg));
6702
6703 rpart = builtin_save_expr (rpart);
6704 ipart = builtin_save_expr (ipart);
6705
6706 result = fold (build2 (PLUS_EXPR, type,
6707 fold (build2 (MULT_EXPR, type,
6708 rpart, rpart)),
6709 fold (build2 (MULT_EXPR, type,
6710 ipart, ipart))));
6711
6712 arglist = build_tree_list (NULL_TREE, result);
6713 return build_function_call_expr (sqrtfn, arglist);
6714 }
6715 }
6716
6717 return NULL_TREE;
6718 }
6719
6720 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl. Return
6721 NULL_TREE if no simplification can be made. */
6722
6723 static tree
6724 fold_builtin_sqrt (tree arglist, tree type)
6725 {
6726
6727 enum built_in_function fcode;
6728 tree arg = TREE_VALUE (arglist);
6729
6730 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6731 return NULL_TREE;
6732
6733 /* Optimize sqrt of constant value. */
6734 if (TREE_CODE (arg) == REAL_CST
6735 && ! TREE_CONSTANT_OVERFLOW (arg))
6736 {
6737 REAL_VALUE_TYPE r, x;
6738
6739 x = TREE_REAL_CST (arg);
6740 if (real_sqrt (&r, TYPE_MODE (type), &x)
6741 || (!flag_trapping_math && !flag_errno_math))
6742 return build_real (type, r);
6743 }
6744
6745 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6746 fcode = builtin_mathfn_code (arg);
6747 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6748 {
6749 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
6750 arg = fold (build2 (MULT_EXPR, type,
6751 TREE_VALUE (TREE_OPERAND (arg, 1)),
6752 build_real (type, dconsthalf)));
6753 arglist = build_tree_list (NULL_TREE, arg);
6754 return build_function_call_expr (expfn, arglist);
6755 }
6756
6757 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6758 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6759 {
6760 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6761
6762 if (powfn)
6763 {
6764 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
6765 tree tree_root;
6766 /* The inner root was either sqrt or cbrt. */
6767 REAL_VALUE_TYPE dconstroot =
6768 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
6769
6770 /* Adjust for the outer root. */
6771 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6772 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6773 tree_root = build_real (type, dconstroot);
6774 arglist = tree_cons (NULL_TREE, arg0,
6775 build_tree_list (NULL_TREE, tree_root));
6776 return build_function_call_expr (powfn, arglist);
6777 }
6778 }
6779
6780 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6781 if (flag_unsafe_math_optimizations
6782 && (fcode == BUILT_IN_POW
6783 || fcode == BUILT_IN_POWF
6784 || fcode == BUILT_IN_POWL))
6785 {
6786 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
6787 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
6788 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
6789 tree narg1;
6790 if (!tree_expr_nonnegative_p (arg0))
6791 arg0 = build1 (ABS_EXPR, type, arg0);
6792 narg1 = fold (build2 (MULT_EXPR, type, arg1,
6793 build_real (type, dconsthalf)));
6794 arglist = tree_cons (NULL_TREE, arg0,
6795 build_tree_list (NULL_TREE, narg1));
6796 return build_function_call_expr (powfn, arglist);
6797 }
6798
6799 return NULL_TREE;
6800 }
6801
6802 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl. Return
6803 NULL_TREE if no simplification can be made. */
6804 static tree
6805 fold_builtin_cbrt (tree arglist, tree type)
6806 {
6807 tree arg = TREE_VALUE (arglist);
6808 const enum built_in_function fcode = builtin_mathfn_code (arg);
6809
6810 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6811 return NULL_TREE;
6812
6813 /* Optimize cbrt of constant value. */
6814 if (real_zerop (arg) || real_onep (arg) || real_minus_onep (arg))
6815 return arg;
6816
6817 if (flag_unsafe_math_optimizations)
6818 {
6819 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6820 if (BUILTIN_EXPONENT_P (fcode))
6821 {
6822 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
6823 const REAL_VALUE_TYPE third_trunc =
6824 real_value_truncate (TYPE_MODE (type), dconstthird);
6825 arg = fold (build2 (MULT_EXPR, type,
6826 TREE_VALUE (TREE_OPERAND (arg, 1)),
6827 build_real (type, third_trunc)));
6828 arglist = build_tree_list (NULL_TREE, arg);
6829 return build_function_call_expr (expfn, arglist);
6830 }
6831
6832 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6833 if (BUILTIN_SQRT_P (fcode))
6834 {
6835 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6836
6837 if (powfn)
6838 {
6839 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
6840 tree tree_root;
6841 REAL_VALUE_TYPE dconstroot = dconstthird;
6842
6843 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6844 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6845 tree_root = build_real (type, dconstroot);
6846 arglist = tree_cons (NULL_TREE, arg0,
6847 build_tree_list (NULL_TREE, tree_root));
6848 return build_function_call_expr (powfn, arglist);
6849 }
6850 }
6851
6852 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6853 if (BUILTIN_CBRT_P (fcode))
6854 {
6855 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
6856 if (tree_expr_nonnegative_p (arg0))
6857 {
6858 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6859
6860 if (powfn)
6861 {
6862 tree tree_root;
6863 REAL_VALUE_TYPE dconstroot;
6864
6865 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
6866 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6867 tree_root = build_real (type, dconstroot);
6868 arglist = tree_cons (NULL_TREE, arg0,
6869 build_tree_list (NULL_TREE, tree_root));
6870 return build_function_call_expr (powfn, arglist);
6871 }
6872 }
6873 }
6874
6875 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6876 if (fcode == BUILT_IN_POW || fcode == BUILT_IN_POWF
6877 || fcode == BUILT_IN_POWL)
6878 {
6879 tree arg00 = TREE_VALUE (TREE_OPERAND (arg, 1));
6880 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
6881 if (tree_expr_nonnegative_p (arg00))
6882 {
6883 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
6884 const REAL_VALUE_TYPE dconstroot
6885 = real_value_truncate (TYPE_MODE (type), dconstthird);
6886 tree narg01 = fold (build2 (MULT_EXPR, type, arg01,
6887 build_real (type, dconstroot)));
6888 arglist = tree_cons (NULL_TREE, arg00,
6889 build_tree_list (NULL_TREE, narg01));
6890 return build_function_call_expr (powfn, arglist);
6891 }
6892 }
6893 }
6894 return NULL_TREE;
6895 }
6896
6897 /* Fold function call to builtin sin, sinf, or sinl. Return
6898 NULL_TREE if no simplification can be made. */
6899 static tree
6900 fold_builtin_sin (tree arglist)
6901 {
6902 tree arg = TREE_VALUE (arglist);
6903
6904 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6905 return NULL_TREE;
6906
6907 /* Optimize sin (0.0) = 0.0. */
6908 if (real_zerop (arg))
6909 return arg;
6910
6911 return NULL_TREE;
6912 }
6913
6914 /* Fold function call to builtin cos, cosf, or cosl. Return
6915 NULL_TREE if no simplification can be made. */
6916 static tree
6917 fold_builtin_cos (tree arglist, tree type, tree fndecl)
6918 {
6919 tree arg = TREE_VALUE (arglist);
6920
6921 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6922 return NULL_TREE;
6923
6924 /* Optimize cos (0.0) = 1.0. */
6925 if (real_zerop (arg))
6926 return build_real (type, dconst1);
6927
6928 /* Optimize cos(-x) into cos (x). */
6929 if (TREE_CODE (arg) == NEGATE_EXPR)
6930 {
6931 tree args = build_tree_list (NULL_TREE,
6932 TREE_OPERAND (arg, 0));
6933 return build_function_call_expr (fndecl, args);
6934 }
6935
6936 return NULL_TREE;
6937 }
6938
6939 /* Fold function call to builtin tan, tanf, or tanl. Return
6940 NULL_TREE if no simplification can be made. */
6941 static tree
6942 fold_builtin_tan (tree arglist)
6943 {
6944 enum built_in_function fcode;
6945 tree arg = TREE_VALUE (arglist);
6946
6947 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6948 return NULL_TREE;
6949
6950 /* Optimize tan(0.0) = 0.0. */
6951 if (real_zerop (arg))
6952 return arg;
6953
6954 /* Optimize tan(atan(x)) = x. */
6955 fcode = builtin_mathfn_code (arg);
6956 if (flag_unsafe_math_optimizations
6957 && (fcode == BUILT_IN_ATAN
6958 || fcode == BUILT_IN_ATANF
6959 || fcode == BUILT_IN_ATANL))
6960 return TREE_VALUE (TREE_OPERAND (arg, 1));
6961
6962 return NULL_TREE;
6963 }
6964
6965 /* Fold function call to builtin atan, atanf, or atanl. Return
6966 NULL_TREE if no simplification can be made. */
6967
6968 static tree
6969 fold_builtin_atan (tree arglist, tree type)
6970 {
6971
6972 tree arg = TREE_VALUE (arglist);
6973
6974 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6975 return NULL_TREE;
6976
6977 /* Optimize atan(0.0) = 0.0. */
6978 if (real_zerop (arg))
6979 return arg;
6980
6981 /* Optimize atan(1.0) = pi/4. */
6982 if (real_onep (arg))
6983 {
6984 REAL_VALUE_TYPE cst;
6985
6986 real_convert (&cst, TYPE_MODE (type), &dconstpi);
6987 SET_REAL_EXP (&cst, REAL_EXP (&cst) - 2);
6988 return build_real (type, cst);
6989 }
6990
6991 return NULL_TREE;
6992 }
6993
6994 /* Fold function call to builtin trunc, truncf or truncl. Return
6995 NULL_TREE if no simplification can be made. */
6996
6997 static tree
6998 fold_builtin_trunc (tree fndecl, tree arglist)
6999 {
7000 tree arg;
7001
7002 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7003 return 0;
7004
7005 /* Optimize trunc of constant value. */
7006 arg = TREE_VALUE (arglist);
7007 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
7008 {
7009 REAL_VALUE_TYPE r, x;
7010 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7011
7012 x = TREE_REAL_CST (arg);
7013 real_trunc (&r, TYPE_MODE (type), &x);
7014 return build_real (type, r);
7015 }
7016
7017 return fold_trunc_transparent_mathfn (fndecl, arglist);
7018 }
7019
7020 /* Fold function call to builtin floor, floorf or floorl. Return
7021 NULL_TREE if no simplification can be made. */
7022
7023 static tree
7024 fold_builtin_floor (tree fndecl, tree arglist)
7025 {
7026 tree arg;
7027
7028 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7029 return 0;
7030
7031 /* Optimize floor of constant value. */
7032 arg = TREE_VALUE (arglist);
7033 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
7034 {
7035 REAL_VALUE_TYPE x;
7036
7037 x = TREE_REAL_CST (arg);
7038 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7039 {
7040 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7041 REAL_VALUE_TYPE r;
7042
7043 real_floor (&r, TYPE_MODE (type), &x);
7044 return build_real (type, r);
7045 }
7046 }
7047
7048 return fold_trunc_transparent_mathfn (fndecl, arglist);
7049 }
7050
7051 /* Fold function call to builtin ceil, ceilf or ceill. Return
7052 NULL_TREE if no simplification can be made. */
7053
7054 static tree
7055 fold_builtin_ceil (tree fndecl, tree arglist)
7056 {
7057 tree arg;
7058
7059 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7060 return 0;
7061
7062 /* Optimize ceil of constant value. */
7063 arg = TREE_VALUE (arglist);
7064 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
7065 {
7066 REAL_VALUE_TYPE x;
7067
7068 x = TREE_REAL_CST (arg);
7069 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7070 {
7071 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7072 REAL_VALUE_TYPE r;
7073
7074 real_ceil (&r, TYPE_MODE (type), &x);
7075 return build_real (type, r);
7076 }
7077 }
7078
7079 return fold_trunc_transparent_mathfn (fndecl, arglist);
7080 }
7081
7082 /* Fold function call to builtin round, roundf or roundl. Return
7083 NULL_TREE if no simplification can be made. */
7084
7085 static tree
7086 fold_builtin_round (tree fndecl, tree arglist)
7087 {
7088 tree arg;
7089
7090 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7091 return 0;
7092
7093 /* Optimize round of constant value. */
7094 arg = TREE_VALUE (arglist);
7095 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
7096 {
7097 REAL_VALUE_TYPE x;
7098
7099 x = TREE_REAL_CST (arg);
7100 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7101 {
7102 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7103 REAL_VALUE_TYPE r;
7104
7105 real_round (&r, TYPE_MODE (type), &x);
7106 return build_real (type, r);
7107 }
7108 }
7109
7110 return fold_trunc_transparent_mathfn (fndecl, arglist);
7111 }
7112
7113 /* Fold function call to builtin lround, lroundf or lroundl (or the
7114 corresponding long long versions) and other rounding functions.
7115 Return NULL_TREE if no simplification can be made. */
7116
7117 static tree
7118 fold_builtin_int_roundingfn (tree fndecl, tree arglist)
7119 {
7120 tree arg;
7121
7122 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7123 return 0;
7124
7125 /* Optimize lround of constant value. */
7126 arg = TREE_VALUE (arglist);
7127 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
7128 {
7129 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7130
7131 if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
7132 {
7133 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7134 tree ftype = TREE_TYPE (arg), result;
7135 HOST_WIDE_INT hi, lo;
7136 REAL_VALUE_TYPE r;
7137
7138 switch (DECL_FUNCTION_CODE (fndecl))
7139 {
7140 case BUILT_IN_LFLOOR:
7141 case BUILT_IN_LFLOORF:
7142 case BUILT_IN_LFLOORL:
7143 case BUILT_IN_LLFLOOR:
7144 case BUILT_IN_LLFLOORF:
7145 case BUILT_IN_LLFLOORL:
7146 real_floor (&r, TYPE_MODE (ftype), &x);
7147 break;
7148
7149 case BUILT_IN_LCEIL:
7150 case BUILT_IN_LCEILF:
7151 case BUILT_IN_LCEILL:
7152 case BUILT_IN_LLCEIL:
7153 case BUILT_IN_LLCEILF:
7154 case BUILT_IN_LLCEILL:
7155 real_ceil (&r, TYPE_MODE (ftype), &x);
7156 break;
7157
7158 case BUILT_IN_LROUND:
7159 case BUILT_IN_LROUNDF:
7160 case BUILT_IN_LROUNDL:
7161 case BUILT_IN_LLROUND:
7162 case BUILT_IN_LLROUNDF:
7163 case BUILT_IN_LLROUNDL:
7164 real_round (&r, TYPE_MODE (ftype), &x);
7165 break;
7166
7167 default:
7168 gcc_unreachable ();
7169 }
7170
7171 REAL_VALUE_TO_INT (&lo, &hi, r);
7172 result = build_int_cst_wide (NULL_TREE, lo, hi);
7173 if (int_fits_type_p (result, itype))
7174 return fold_convert (itype, result);
7175 }
7176 }
7177
7178 return fold_fixed_mathfn (fndecl, arglist);
7179 }
7180
7181 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7182 and their long and long long variants (i.e. ffsl and ffsll).
7183 Return NULL_TREE if no simplification can be made. */
7184
7185 static tree
7186 fold_builtin_bitop (tree fndecl, tree arglist)
7187 {
7188 tree arg;
7189
7190 if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
7191 return NULL_TREE;
7192
7193 /* Optimize for constant argument. */
7194 arg = TREE_VALUE (arglist);
7195 if (TREE_CODE (arg) == INTEGER_CST && ! TREE_CONSTANT_OVERFLOW (arg))
7196 {
7197 HOST_WIDE_INT hi, width, result;
7198 unsigned HOST_WIDE_INT lo;
7199 tree type;
7200
7201 type = TREE_TYPE (arg);
7202 width = TYPE_PRECISION (type);
7203 lo = TREE_INT_CST_LOW (arg);
7204
7205 /* Clear all the bits that are beyond the type's precision. */
7206 if (width > HOST_BITS_PER_WIDE_INT)
7207 {
7208 hi = TREE_INT_CST_HIGH (arg);
7209 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7210 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7211 }
7212 else
7213 {
7214 hi = 0;
7215 if (width < HOST_BITS_PER_WIDE_INT)
7216 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7217 }
7218
7219 switch (DECL_FUNCTION_CODE (fndecl))
7220 {
7221 case BUILT_IN_FFS:
7222 case BUILT_IN_FFSL:
7223 case BUILT_IN_FFSLL:
7224 if (lo != 0)
7225 result = exact_log2 (lo & -lo) + 1;
7226 else if (hi != 0)
7227 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7228 else
7229 result = 0;
7230 break;
7231
7232 case BUILT_IN_CLZ:
7233 case BUILT_IN_CLZL:
7234 case BUILT_IN_CLZLL:
7235 if (hi != 0)
7236 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7237 else if (lo != 0)
7238 result = width - floor_log2 (lo) - 1;
7239 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7240 result = width;
7241 break;
7242
7243 case BUILT_IN_CTZ:
7244 case BUILT_IN_CTZL:
7245 case BUILT_IN_CTZLL:
7246 if (lo != 0)
7247 result = exact_log2 (lo & -lo);
7248 else if (hi != 0)
7249 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7250 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7251 result = width;
7252 break;
7253
7254 case BUILT_IN_POPCOUNT:
7255 case BUILT_IN_POPCOUNTL:
7256 case BUILT_IN_POPCOUNTLL:
7257 result = 0;
7258 while (lo)
7259 result++, lo &= lo - 1;
7260 while (hi)
7261 result++, hi &= hi - 1;
7262 break;
7263
7264 case BUILT_IN_PARITY:
7265 case BUILT_IN_PARITYL:
7266 case BUILT_IN_PARITYLL:
7267 result = 0;
7268 while (lo)
7269 result++, lo &= lo - 1;
7270 while (hi)
7271 result++, hi &= hi - 1;
7272 result &= 1;
7273 break;
7274
7275 default:
7276 gcc_unreachable ();
7277 }
7278
7279 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7280 }
7281
7282 return NULL_TREE;
7283 }
7284
7285 /* Return true if EXPR is the real constant contained in VALUE. */
7286
7287 static bool
7288 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7289 {
7290 STRIP_NOPS (expr);
7291
7292 return ((TREE_CODE (expr) == REAL_CST
7293 && ! TREE_CONSTANT_OVERFLOW (expr)
7294 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7295 || (TREE_CODE (expr) == COMPLEX_CST
7296 && real_dconstp (TREE_REALPART (expr), value)
7297 && real_zerop (TREE_IMAGPART (expr))));
7298 }
7299
7300 /* A subroutine of fold_builtin to fold the various logarithmic
7301 functions. EXP is the CALL_EXPR of a call to a builtin logN
7302 function. VALUE is the base of the logN function. */
7303
7304 static tree
7305 fold_builtin_logarithm (tree fndecl, tree arglist,
7306 const REAL_VALUE_TYPE *value)
7307 {
7308 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7309 {
7310 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7311 tree arg = TREE_VALUE (arglist);
7312 const enum built_in_function fcode = builtin_mathfn_code (arg);
7313
7314 /* Optimize logN(1.0) = 0.0. */
7315 if (real_onep (arg))
7316 return build_real (type, dconst0);
7317
7318 /* Optimize logN(N) = 1.0. If N can't be truncated to MODE
7319 exactly, then only do this if flag_unsafe_math_optimizations. */
7320 if (exact_real_truncate (TYPE_MODE (type), value)
7321 || flag_unsafe_math_optimizations)
7322 {
7323 const REAL_VALUE_TYPE value_truncate =
7324 real_value_truncate (TYPE_MODE (type), *value);
7325 if (real_dconstp (arg, &value_truncate))
7326 return build_real (type, dconst1);
7327 }
7328
7329 /* Special case, optimize logN(expN(x)) = x. */
7330 if (flag_unsafe_math_optimizations
7331 && ((value == &dconste
7332 && (fcode == BUILT_IN_EXP
7333 || fcode == BUILT_IN_EXPF
7334 || fcode == BUILT_IN_EXPL))
7335 || (value == &dconst2
7336 && (fcode == BUILT_IN_EXP2
7337 || fcode == BUILT_IN_EXP2F
7338 || fcode == BUILT_IN_EXP2L))
7339 || (value == &dconst10 && (BUILTIN_EXP10_P (fcode)))))
7340 return fold_convert (type, TREE_VALUE (TREE_OPERAND (arg, 1)));
7341
7342 /* Optimize logN(func()) for various exponential functions. We
7343 want to determine the value "x" and the power "exponent" in
7344 order to transform logN(x**exponent) into exponent*logN(x). */
7345 if (flag_unsafe_math_optimizations)
7346 {
7347 tree exponent = 0, x = 0;
7348
7349 switch (fcode)
7350 {
7351 case BUILT_IN_EXP:
7352 case BUILT_IN_EXPF:
7353 case BUILT_IN_EXPL:
7354 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7355 x = build_real (type,
7356 real_value_truncate (TYPE_MODE (type), dconste));
7357 exponent = TREE_VALUE (TREE_OPERAND (arg, 1));
7358 break;
7359 case BUILT_IN_EXP2:
7360 case BUILT_IN_EXP2F:
7361 case BUILT_IN_EXP2L:
7362 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7363 x = build_real (type, dconst2);
7364 exponent = TREE_VALUE (TREE_OPERAND (arg, 1));
7365 break;
7366 case BUILT_IN_EXP10:
7367 case BUILT_IN_EXP10F:
7368 case BUILT_IN_EXP10L:
7369 case BUILT_IN_POW10:
7370 case BUILT_IN_POW10F:
7371 case BUILT_IN_POW10L:
7372 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7373 x = build_real (type, dconst10);
7374 exponent = TREE_VALUE (TREE_OPERAND (arg, 1));
7375 break;
7376 case BUILT_IN_SQRT:
7377 case BUILT_IN_SQRTF:
7378 case BUILT_IN_SQRTL:
7379 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7380 x = TREE_VALUE (TREE_OPERAND (arg, 1));
7381 exponent = build_real (type, dconsthalf);
7382 break;
7383 case BUILT_IN_CBRT:
7384 case BUILT_IN_CBRTF:
7385 case BUILT_IN_CBRTL:
7386 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7387 x = TREE_VALUE (TREE_OPERAND (arg, 1));
7388 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7389 dconstthird));
7390 break;
7391 case BUILT_IN_POW:
7392 case BUILT_IN_POWF:
7393 case BUILT_IN_POWL:
7394 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7395 x = TREE_VALUE (TREE_OPERAND (arg, 1));
7396 exponent = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
7397 break;
7398 default:
7399 break;
7400 }
7401
7402 /* Now perform the optimization. */
7403 if (x && exponent)
7404 {
7405 tree logfn;
7406 arglist = build_tree_list (NULL_TREE, x);
7407 logfn = build_function_call_expr (fndecl, arglist);
7408 return fold (build2 (MULT_EXPR, type, exponent, logfn));
7409 }
7410 }
7411 }
7412
7413 return 0;
7414 }
7415
7416 /* Fold a builtin function call to pow, powf, or powl. Return
7417 NULL_TREE if no simplification can be made. */
7418 static tree
7419 fold_builtin_pow (tree fndecl, tree arglist, tree type)
7420 {
7421 tree arg0 = TREE_VALUE (arglist);
7422 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7423
7424 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7425 return NULL_TREE;
7426
7427 /* Optimize pow(1.0,y) = 1.0. */
7428 if (real_onep (arg0))
7429 return omit_one_operand (type, build_real (type, dconst1), arg1);
7430
7431 if (TREE_CODE (arg1) == REAL_CST
7432 && ! TREE_CONSTANT_OVERFLOW (arg1))
7433 {
7434 REAL_VALUE_TYPE cint;
7435 REAL_VALUE_TYPE c;
7436 HOST_WIDE_INT n;
7437
7438 c = TREE_REAL_CST (arg1);
7439
7440 /* Optimize pow(x,0.0) = 1.0. */
7441 if (REAL_VALUES_EQUAL (c, dconst0))
7442 return omit_one_operand (type, build_real (type, dconst1),
7443 arg0);
7444
7445 /* Optimize pow(x,1.0) = x. */
7446 if (REAL_VALUES_EQUAL (c, dconst1))
7447 return arg0;
7448
7449 /* Optimize pow(x,-1.0) = 1.0/x. */
7450 if (REAL_VALUES_EQUAL (c, dconstm1))
7451 return fold (build2 (RDIV_EXPR, type,
7452 build_real (type, dconst1), arg0));
7453
7454 /* Optimize pow(x,0.5) = sqrt(x). */
7455 if (flag_unsafe_math_optimizations
7456 && REAL_VALUES_EQUAL (c, dconsthalf))
7457 {
7458 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7459
7460 if (sqrtfn != NULL_TREE)
7461 {
7462 tree arglist = build_tree_list (NULL_TREE, arg0);
7463 return build_function_call_expr (sqrtfn, arglist);
7464 }
7465 }
7466
7467 /* Check for an integer exponent. */
7468 n = real_to_integer (&c);
7469 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7470 if (real_identical (&c, &cint))
7471 {
7472 /* Attempt to evaluate pow at compile-time. */
7473 if (TREE_CODE (arg0) == REAL_CST
7474 && ! TREE_CONSTANT_OVERFLOW (arg0))
7475 {
7476 REAL_VALUE_TYPE x;
7477 bool inexact;
7478
7479 x = TREE_REAL_CST (arg0);
7480 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7481 if (flag_unsafe_math_optimizations || !inexact)
7482 return build_real (type, x);
7483 }
7484
7485 /* Strip sign ops from even integer powers. */
7486 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7487 {
7488 tree narg0 = fold_strip_sign_ops (arg0);
7489 if (narg0)
7490 {
7491 arglist = build_tree_list (NULL_TREE, arg1);
7492 arglist = tree_cons (NULL_TREE, narg0, arglist);
7493 return build_function_call_expr (fndecl, arglist);
7494 }
7495 }
7496 }
7497 }
7498
7499 if (flag_unsafe_math_optimizations)
7500 {
7501 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7502
7503 /* Optimize pow(expN(x),y) = expN(x*y). */
7504 if (BUILTIN_EXPONENT_P (fcode))
7505 {
7506 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7507 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
7508 arg = fold (build2 (MULT_EXPR, type, arg, arg1));
7509 arglist = build_tree_list (NULL_TREE, arg);
7510 return build_function_call_expr (expfn, arglist);
7511 }
7512
7513 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7514 if (BUILTIN_SQRT_P (fcode))
7515 {
7516 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7517 tree narg1 = fold (build2 (MULT_EXPR, type, arg1,
7518 build_real (type, dconsthalf)));
7519
7520 arglist = tree_cons (NULL_TREE, narg0,
7521 build_tree_list (NULL_TREE, narg1));
7522 return build_function_call_expr (fndecl, arglist);
7523 }
7524
7525 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7526 if (BUILTIN_CBRT_P (fcode))
7527 {
7528 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
7529 if (tree_expr_nonnegative_p (arg))
7530 {
7531 const REAL_VALUE_TYPE dconstroot
7532 = real_value_truncate (TYPE_MODE (type), dconstthird);
7533 tree narg1 = fold (build2 (MULT_EXPR, type, arg1,
7534 build_real (type, dconstroot)));
7535 arglist = tree_cons (NULL_TREE, arg,
7536 build_tree_list (NULL_TREE, narg1));
7537 return build_function_call_expr (fndecl, arglist);
7538 }
7539 }
7540
7541 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
7542 if (fcode == BUILT_IN_POW || fcode == BUILT_IN_POWF
7543 || fcode == BUILT_IN_POWL)
7544 {
7545 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7546 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7547 tree narg1 = fold (build2 (MULT_EXPR, type, arg01, arg1));
7548 arglist = tree_cons (NULL_TREE, arg00,
7549 build_tree_list (NULL_TREE, narg1));
7550 return build_function_call_expr (fndecl, arglist);
7551 }
7552 }
7553
7554 return NULL_TREE;
7555 }
7556
7557 /* Fold a builtin function call to powi, powif, or powil. Return
7558 NULL_TREE if no simplification can be made. */
7559 static tree
7560 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED, tree arglist, tree type)
7561 {
7562 tree arg0 = TREE_VALUE (arglist);
7563 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7564
7565 if (!validate_arglist (arglist, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
7566 return NULL_TREE;
7567
7568 /* Optimize pow(1.0,y) = 1.0. */
7569 if (real_onep (arg0))
7570 return omit_one_operand (type, build_real (type, dconst1), arg1);
7571
7572 if (host_integerp (arg1, 0))
7573 {
7574 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7575
7576 /* Evaluate powi at compile-time. */
7577 if (TREE_CODE (arg0) == REAL_CST
7578 && ! TREE_CONSTANT_OVERFLOW (arg0))
7579 {
7580 REAL_VALUE_TYPE x;
7581 x = TREE_REAL_CST (arg0);
7582 real_powi (&x, TYPE_MODE (type), &x, c);
7583 return build_real (type, x);
7584 }
7585
7586 /* Optimize pow(x,0) = 1.0. */
7587 if (c == 0)
7588 return omit_one_operand (type, build_real (type, dconst1),
7589 arg0);
7590
7591 /* Optimize pow(x,1) = x. */
7592 if (c == 1)
7593 return arg0;
7594
7595 /* Optimize pow(x,-1) = 1.0/x. */
7596 if (c == -1)
7597 return fold (build2 (RDIV_EXPR, type,
7598 build_real (type, dconst1), arg0));
7599 }
7600
7601 return NULL_TREE;
7602 }
7603
7604 /* A subroutine of fold_builtin to fold the various exponent
7605 functions. EXP is the CALL_EXPR of a call to a builtin function.
7606 VALUE is the value which will be raised to a power. */
7607
7608 static tree
7609 fold_builtin_exponent (tree fndecl, tree arglist,
7610 const REAL_VALUE_TYPE *value)
7611 {
7612 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7613 {
7614 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7615 tree arg = TREE_VALUE (arglist);
7616
7617 /* Optimize exp*(0.0) = 1.0. */
7618 if (real_zerop (arg))
7619 return build_real (type, dconst1);
7620
7621 /* Optimize expN(1.0) = N. */
7622 if (real_onep (arg))
7623 {
7624 REAL_VALUE_TYPE cst;
7625
7626 real_convert (&cst, TYPE_MODE (type), value);
7627 return build_real (type, cst);
7628 }
7629
7630 /* Attempt to evaluate expN(integer) at compile-time. */
7631 if (flag_unsafe_math_optimizations
7632 && TREE_CODE (arg) == REAL_CST
7633 && ! TREE_CONSTANT_OVERFLOW (arg))
7634 {
7635 REAL_VALUE_TYPE cint;
7636 REAL_VALUE_TYPE c;
7637 HOST_WIDE_INT n;
7638
7639 c = TREE_REAL_CST (arg);
7640 n = real_to_integer (&c);
7641 real_from_integer (&cint, VOIDmode, n,
7642 n < 0 ? -1 : 0, 0);
7643 if (real_identical (&c, &cint))
7644 {
7645 REAL_VALUE_TYPE x;
7646
7647 real_powi (&x, TYPE_MODE (type), value, n);
7648 return build_real (type, x);
7649 }
7650 }
7651
7652 /* Optimize expN(logN(x)) = x. */
7653 if (flag_unsafe_math_optimizations)
7654 {
7655 const enum built_in_function fcode = builtin_mathfn_code (arg);
7656
7657 if ((value == &dconste
7658 && (fcode == BUILT_IN_LOG
7659 || fcode == BUILT_IN_LOGF
7660 || fcode == BUILT_IN_LOGL))
7661 || (value == &dconst2
7662 && (fcode == BUILT_IN_LOG2
7663 || fcode == BUILT_IN_LOG2F
7664 || fcode == BUILT_IN_LOG2L))
7665 || (value == &dconst10
7666 && (fcode == BUILT_IN_LOG10
7667 || fcode == BUILT_IN_LOG10F
7668 || fcode == BUILT_IN_LOG10L)))
7669 return fold_convert (type, TREE_VALUE (TREE_OPERAND (arg, 1)));
7670 }
7671 }
7672
7673 return 0;
7674 }
7675
7676 /* Fold function call to builtin memcpy. Return
7677 NULL_TREE if no simplification can be made. */
7678
7679 static tree
7680 fold_builtin_memcpy (tree fndecl, tree arglist)
7681 {
7682 tree dest, src, len;
7683
7684 if (!validate_arglist (arglist,
7685 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7686 return 0;
7687
7688 dest = TREE_VALUE (arglist);
7689 src = TREE_VALUE (TREE_CHAIN (arglist));
7690 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7691
7692 /* If the LEN parameter is zero, return DEST. */
7693 if (integer_zerop (len))
7694 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
7695
7696 /* If SRC and DEST are the same (and not volatile), return DEST. */
7697 if (operand_equal_p (src, dest, 0))
7698 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
7699
7700 return 0;
7701 }
7702
7703 /* Fold function call to builtin mempcpy. Return
7704 NULL_TREE if no simplification can be made. */
7705
7706 static tree
7707 fold_builtin_mempcpy (tree arglist, tree type, int endp)
7708 {
7709 if (validate_arglist (arglist,
7710 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7711 {
7712 tree dest = TREE_VALUE (arglist);
7713 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7714 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7715
7716 /* If the LEN parameter is zero, return DEST. */
7717 if (integer_zerop (len))
7718 return omit_one_operand (type, dest, src);
7719
7720 /* If SRC and DEST are the same (and not volatile), return DEST+LEN. */
7721 if (operand_equal_p (src, dest, 0))
7722 {
7723 if (endp == 0)
7724 return omit_one_operand (type, dest, len);
7725
7726 if (endp == 2)
7727 len = fold (build2 (MINUS_EXPR, TREE_TYPE (len), len,
7728 ssize_int (1)));
7729
7730 len = fold_convert (TREE_TYPE (dest), len);
7731 len = fold (build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len));
7732 return fold_convert (type, len);
7733 }
7734 }
7735 return 0;
7736 }
7737
7738 /* Fold function call to builtin memmove. Return
7739 NULL_TREE if no simplification can be made. */
7740
7741 static tree
7742 fold_builtin_memmove (tree arglist, tree type)
7743 {
7744 tree dest, src, len;
7745
7746 if (!validate_arglist (arglist,
7747 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7748 return 0;
7749
7750 dest = TREE_VALUE (arglist);
7751 src = TREE_VALUE (TREE_CHAIN (arglist));
7752 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7753
7754 /* If the LEN parameter is zero, return DEST. */
7755 if (integer_zerop (len))
7756 return omit_one_operand (type, dest, src);
7757
7758 /* If SRC and DEST are the same (and not volatile), return DEST. */
7759 if (operand_equal_p (src, dest, 0))
7760 return omit_one_operand (type, dest, len);
7761
7762 return 0;
7763 }
7764
7765 /* Fold function call to builtin strcpy. If LEN is not NULL, it represents
7766 the length of the string to be copied. Return NULL_TREE if no
7767 simplification can be made. */
7768
7769 tree
7770 fold_builtin_strcpy (tree fndecl, tree arglist, tree len)
7771 {
7772 tree dest, src, fn;
7773
7774 if (!validate_arglist (arglist,
7775 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7776 return 0;
7777
7778 dest = TREE_VALUE (arglist);
7779 src = TREE_VALUE (TREE_CHAIN (arglist));
7780
7781 /* If SRC and DEST are the same (and not volatile), return DEST. */
7782 if (operand_equal_p (src, dest, 0))
7783 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
7784
7785 if (optimize_size)
7786 return 0;
7787
7788 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
7789 if (!fn)
7790 return 0;
7791
7792 if (!len)
7793 {
7794 len = c_strlen (src, 1);
7795 if (! len || TREE_SIDE_EFFECTS (len))
7796 return 0;
7797 }
7798
7799 len = size_binop (PLUS_EXPR, len, ssize_int (1));
7800 arglist = build_tree_list (NULL_TREE, len);
7801 arglist = tree_cons (NULL_TREE, src, arglist);
7802 arglist = tree_cons (NULL_TREE, dest, arglist);
7803 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
7804 build_function_call_expr (fn, arglist));
7805 }
7806
7807 /* Fold function call to builtin strncpy. If SLEN is not NULL, it represents
7808 the length of the source string. Return NULL_TREE if no simplification
7809 can be made. */
7810
7811 tree
7812 fold_builtin_strncpy (tree fndecl, tree arglist, tree slen)
7813 {
7814 tree dest, src, len, fn;
7815
7816 if (!validate_arglist (arglist,
7817 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7818 return 0;
7819
7820 dest = TREE_VALUE (arglist);
7821 src = TREE_VALUE (TREE_CHAIN (arglist));
7822 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7823
7824 /* If the LEN parameter is zero, return DEST. */
7825 if (integer_zerop (len))
7826 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
7827
7828 /* We can't compare slen with len as constants below if len is not a
7829 constant. */
7830 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
7831 return 0;
7832
7833 if (!slen)
7834 slen = c_strlen (src, 1);
7835
7836 /* Now, we must be passed a constant src ptr parameter. */
7837 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
7838 return 0;
7839
7840 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
7841
7842 /* We do not support simplification of this case, though we do
7843 support it when expanding trees into RTL. */
7844 /* FIXME: generate a call to __builtin_memset. */
7845 if (tree_int_cst_lt (slen, len))
7846 return 0;
7847
7848 /* OK transform into builtin memcpy. */
7849 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
7850 if (!fn)
7851 return 0;
7852 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
7853 build_function_call_expr (fn, arglist));
7854 }
7855
7856 /* Fold function call to builtin memcmp. Return
7857 NULL_TREE if no simplification can be made. */
7858
7859 static tree
7860 fold_builtin_memcmp (tree arglist)
7861 {
7862 tree arg1, arg2, len;
7863 const char *p1, *p2;
7864
7865 if (!validate_arglist (arglist,
7866 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7867 return 0;
7868
7869 arg1 = TREE_VALUE (arglist);
7870 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7871 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7872
7873 /* If the LEN parameter is zero, return zero. */
7874 if (integer_zerop (len))
7875 return omit_two_operands (integer_type_node, integer_zero_node,
7876 arg1, arg2);
7877
7878 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7879 if (operand_equal_p (arg1, arg2, 0))
7880 return omit_one_operand (integer_type_node, integer_zero_node, len);
7881
7882 p1 = c_getstr (arg1);
7883 p2 = c_getstr (arg2);
7884
7885 /* If all arguments are constant, and the value of len is not greater
7886 than the lengths of arg1 and arg2, evaluate at compile-time. */
7887 if (host_integerp (len, 1) && p1 && p2
7888 && compare_tree_int (len, strlen (p1) + 1) <= 0
7889 && compare_tree_int (len, strlen (p2) + 1) <= 0)
7890 {
7891 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
7892
7893 if (r > 0)
7894 return integer_one_node;
7895 else if (r < 0)
7896 return integer_minus_one_node;
7897 else
7898 return integer_zero_node;
7899 }
7900
7901 /* If len parameter is one, return an expression corresponding to
7902 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7903 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
7904 {
7905 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7906 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
7907 tree ind1 = fold_convert (integer_type_node,
7908 build1 (INDIRECT_REF, cst_uchar_node,
7909 fold_convert (cst_uchar_ptr_node,
7910 arg1)));
7911 tree ind2 = fold_convert (integer_type_node,
7912 build1 (INDIRECT_REF, cst_uchar_node,
7913 fold_convert (cst_uchar_ptr_node,
7914 arg2)));
7915 return fold (build2 (MINUS_EXPR, integer_type_node, ind1, ind2));
7916 }
7917
7918 return 0;
7919 }
7920
7921 /* Fold function call to builtin strcmp. Return
7922 NULL_TREE if no simplification can be made. */
7923
7924 static tree
7925 fold_builtin_strcmp (tree arglist)
7926 {
7927 tree arg1, arg2;
7928 const char *p1, *p2;
7929
7930 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7931 return 0;
7932
7933 arg1 = TREE_VALUE (arglist);
7934 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7935
7936 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7937 if (operand_equal_p (arg1, arg2, 0))
7938 return integer_zero_node;
7939
7940 p1 = c_getstr (arg1);
7941 p2 = c_getstr (arg2);
7942
7943 if (p1 && p2)
7944 {
7945 const int i = strcmp (p1, p2);
7946 if (i < 0)
7947 return integer_minus_one_node;
7948 else if (i > 0)
7949 return integer_one_node;
7950 else
7951 return integer_zero_node;
7952 }
7953
7954 /* If the second arg is "", return *(const unsigned char*)arg1. */
7955 if (p2 && *p2 == '\0')
7956 {
7957 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7958 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
7959 return fold_convert (integer_type_node,
7960 build1 (INDIRECT_REF, cst_uchar_node,
7961 fold_convert (cst_uchar_ptr_node,
7962 arg1)));
7963 }
7964
7965 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7966 if (p1 && *p1 == '\0')
7967 {
7968 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7969 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
7970 tree temp = fold_convert (integer_type_node,
7971 build1 (INDIRECT_REF, cst_uchar_node,
7972 fold_convert (cst_uchar_ptr_node,
7973 arg2)));
7974 return fold (build1 (NEGATE_EXPR, integer_type_node, temp));
7975 }
7976
7977 return 0;
7978 }
7979
7980 /* Fold function call to builtin strncmp. Return
7981 NULL_TREE if no simplification can be made. */
7982
7983 static tree
7984 fold_builtin_strncmp (tree arglist)
7985 {
7986 tree arg1, arg2, len;
7987 const char *p1, *p2;
7988
7989 if (!validate_arglist (arglist,
7990 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7991 return 0;
7992
7993 arg1 = TREE_VALUE (arglist);
7994 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7995 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7996
7997 /* If the LEN parameter is zero, return zero. */
7998 if (integer_zerop (len))
7999 return omit_two_operands (integer_type_node, integer_zero_node,
8000 arg1, arg2);
8001
8002 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8003 if (operand_equal_p (arg1, arg2, 0))
8004 return omit_one_operand (integer_type_node, integer_zero_node, len);
8005
8006 p1 = c_getstr (arg1);
8007 p2 = c_getstr (arg2);
8008
8009 if (host_integerp (len, 1) && p1 && p2)
8010 {
8011 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8012 if (i > 0)
8013 return integer_one_node;
8014 else if (i < 0)
8015 return integer_minus_one_node;
8016 else
8017 return integer_zero_node;
8018 }
8019
8020 /* If the second arg is "", and the length is greater than zero,
8021 return *(const unsigned char*)arg1. */
8022 if (p2 && *p2 == '\0'
8023 && TREE_CODE (len) == INTEGER_CST
8024 && tree_int_cst_sgn (len) == 1)
8025 {
8026 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8027 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
8028 return fold_convert (integer_type_node,
8029 build1 (INDIRECT_REF, cst_uchar_node,
8030 fold_convert (cst_uchar_ptr_node,
8031 arg1)));
8032 }
8033
8034 /* If the first arg is "", and the length is greater than zero,
8035 return -*(const unsigned char*)arg2. */
8036 if (p1 && *p1 == '\0'
8037 && TREE_CODE (len) == INTEGER_CST
8038 && tree_int_cst_sgn (len) == 1)
8039 {
8040 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8041 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
8042 tree temp = fold_convert (integer_type_node,
8043 build1 (INDIRECT_REF, cst_uchar_node,
8044 fold_convert (cst_uchar_ptr_node,
8045 arg2)));
8046 return fold (build1 (NEGATE_EXPR, integer_type_node, temp));
8047 }
8048
8049 /* If len parameter is one, return an expression corresponding to
8050 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8051 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8052 {
8053 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8054 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
8055 tree ind1 = fold_convert (integer_type_node,
8056 build1 (INDIRECT_REF, cst_uchar_node,
8057 fold_convert (cst_uchar_ptr_node,
8058 arg1)));
8059 tree ind2 = fold_convert (integer_type_node,
8060 build1 (INDIRECT_REF, cst_uchar_node,
8061 fold_convert (cst_uchar_ptr_node,
8062 arg2)));
8063 return fold (build2 (MINUS_EXPR, integer_type_node, ind1, ind2));
8064 }
8065
8066 return 0;
8067 }
8068
8069 /* Fold function call to builtin signbit, signbitf or signbitl. Return
8070 NULL_TREE if no simplification can be made. */
8071
8072 static tree
8073 fold_builtin_signbit (tree fndecl, tree arglist)
8074 {
8075 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8076 tree arg, temp;
8077
8078 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
8079 return NULL_TREE;
8080
8081 arg = TREE_VALUE (arglist);
8082
8083 /* If ARG is a compile-time constant, determine the result. */
8084 if (TREE_CODE (arg) == REAL_CST
8085 && !TREE_CONSTANT_OVERFLOW (arg))
8086 {
8087 REAL_VALUE_TYPE c;
8088
8089 c = TREE_REAL_CST (arg);
8090 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8091 return fold_convert (type, temp);
8092 }
8093
8094 /* If ARG is non-negative, the result is always zero. */
8095 if (tree_expr_nonnegative_p (arg))
8096 return omit_one_operand (type, integer_zero_node, arg);
8097
8098 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8099 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8100 return fold (build2 (LT_EXPR, type, arg,
8101 build_real (TREE_TYPE (arg), dconst0)));
8102
8103 return NULL_TREE;
8104 }
8105
8106 /* Fold function call to builtin copysign, copysignf or copysignl.
8107 Return NULL_TREE if no simplification can be made. */
8108
8109 static tree
8110 fold_builtin_copysign (tree fndecl, tree arglist, tree type)
8111 {
8112 tree arg1, arg2, tem;
8113
8114 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
8115 return NULL_TREE;
8116
8117 arg1 = TREE_VALUE (arglist);
8118 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8119
8120 /* copysign(X,X) is X. */
8121 if (operand_equal_p (arg1, arg2, 0))
8122 return fold_convert (type, arg1);
8123
8124 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8125 if (TREE_CODE (arg1) == REAL_CST
8126 && TREE_CODE (arg2) == REAL_CST
8127 && !TREE_CONSTANT_OVERFLOW (arg1)
8128 && !TREE_CONSTANT_OVERFLOW (arg2))
8129 {
8130 REAL_VALUE_TYPE c1, c2;
8131
8132 c1 = TREE_REAL_CST (arg1);
8133 c2 = TREE_REAL_CST (arg2);
8134 real_copysign (&c1, &c2);
8135 return build_real (type, c1);
8136 c1.sign = c2.sign;
8137 }
8138
8139 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8140 Remember to evaluate Y for side-effects. */
8141 if (tree_expr_nonnegative_p (arg2))
8142 return omit_one_operand (type,
8143 fold (build1 (ABS_EXPR, type, arg1)),
8144 arg2);
8145
8146 /* Strip sign changing operations for the first argument. */
8147 tem = fold_strip_sign_ops (arg1);
8148 if (tem)
8149 {
8150 arglist = tree_cons (NULL_TREE, tem, TREE_CHAIN (arglist));
8151 return build_function_call_expr (fndecl, arglist);
8152 }
8153
8154 return NULL_TREE;
8155 }
8156
8157 /* Fold a call to builtin isascii. */
8158
8159 static tree
8160 fold_builtin_isascii (tree arglist)
8161 {
8162 if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
8163 return 0;
8164 else
8165 {
8166 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8167 tree arg = TREE_VALUE (arglist);
8168
8169 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
8170 build_int_cst (NULL_TREE,
8171 ~ (unsigned HOST_WIDE_INT) 0x7f));
8172 arg = fold (build2 (EQ_EXPR, integer_type_node,
8173 arg, integer_zero_node));
8174
8175 if (in_gimple_form && !TREE_CONSTANT (arg))
8176 return NULL_TREE;
8177 else
8178 return arg;
8179 }
8180 }
8181
8182 /* Fold a call to builtin toascii. */
8183
8184 static tree
8185 fold_builtin_toascii (tree arglist)
8186 {
8187 if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
8188 return 0;
8189 else
8190 {
8191 /* Transform toascii(c) -> (c & 0x7f). */
8192 tree arg = TREE_VALUE (arglist);
8193
8194 return fold (build2 (BIT_AND_EXPR, integer_type_node, arg,
8195 build_int_cst (NULL_TREE, 0x7f)));
8196 }
8197 }
8198
8199 /* Fold a call to builtin isdigit. */
8200
8201 static tree
8202 fold_builtin_isdigit (tree arglist)
8203 {
8204 if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
8205 return 0;
8206 else
8207 {
8208 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8209 /* According to the C standard, isdigit is unaffected by locale.
8210 However, it definitely is affected by the target character set. */
8211 tree arg;
8212 unsigned HOST_WIDE_INT target_digit0
8213 = lang_hooks.to_target_charset ('0');
8214
8215 if (target_digit0 == 0)
8216 return NULL_TREE;
8217
8218 arg = fold_convert (unsigned_type_node, TREE_VALUE (arglist));
8219 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
8220 build_int_cst (unsigned_type_node, target_digit0));
8221 arg = build2 (LE_EXPR, integer_type_node, arg,
8222 build_int_cst (unsigned_type_node, 9));
8223 arg = fold (arg);
8224 if (in_gimple_form && !TREE_CONSTANT (arg))
8225 return NULL_TREE;
8226 else
8227 return arg;
8228 }
8229 }
8230
8231 /* Fold a call to fabs, fabsf or fabsl. */
8232
8233 static tree
8234 fold_builtin_fabs (tree arglist, tree type)
8235 {
8236 tree arg;
8237
8238 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
8239 return 0;
8240
8241 arg = TREE_VALUE (arglist);
8242 arg = fold_convert (type, arg);
8243 if (TREE_CODE (arg) == REAL_CST)
8244 return fold_abs_const (arg, type);
8245 return fold (build1 (ABS_EXPR, type, arg));
8246 }
8247
8248 /* Fold a call to abs, labs, llabs or imaxabs. */
8249
8250 static tree
8251 fold_builtin_abs (tree arglist, tree type)
8252 {
8253 tree arg;
8254
8255 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
8256 return 0;
8257
8258 arg = TREE_VALUE (arglist);
8259 arg = fold_convert (type, arg);
8260 if (TREE_CODE (arg) == INTEGER_CST)
8261 return fold_abs_const (arg, type);
8262 return fold (build1 (ABS_EXPR, type, arg));
8263 }
8264
8265 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8266 EXP is the CALL_EXPR for the call. */
8267
8268 static tree
8269 fold_builtin_classify (tree fndecl, tree arglist, int builtin_index)
8270 {
8271 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8272 tree arg;
8273 REAL_VALUE_TYPE r;
8274
8275 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
8276 {
8277 /* Check that we have exactly one argument. */
8278 if (arglist == 0)
8279 {
8280 error ("too few arguments to function %qs",
8281 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8282 return error_mark_node;
8283 }
8284 else if (TREE_CHAIN (arglist) != 0)
8285 {
8286 error ("too many arguments to function %qs",
8287 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8288 return error_mark_node;
8289 }
8290 else
8291 {
8292 error ("non-floating-point argument to function %qs",
8293 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8294 return error_mark_node;
8295 }
8296 }
8297
8298 arg = TREE_VALUE (arglist);
8299 switch (builtin_index)
8300 {
8301 case BUILT_IN_ISINF:
8302 if (!MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
8303 return omit_one_operand (type, integer_zero_node, arg);
8304
8305 if (TREE_CODE (arg) == REAL_CST)
8306 {
8307 r = TREE_REAL_CST (arg);
8308 if (real_isinf (&r))
8309 return real_compare (GT_EXPR, &r, &dconst0)
8310 ? integer_one_node : integer_minus_one_node;
8311 else
8312 return integer_zero_node;
8313 }
8314
8315 return NULL_TREE;
8316
8317 case BUILT_IN_FINITE:
8318 if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg)))
8319 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
8320 return omit_one_operand (type, integer_zero_node, arg);
8321
8322 if (TREE_CODE (arg) == REAL_CST)
8323 {
8324 r = TREE_REAL_CST (arg);
8325 return real_isinf (&r) || real_isnan (&r)
8326 ? integer_zero_node : integer_one_node;
8327 }
8328
8329 return NULL_TREE;
8330
8331 case BUILT_IN_ISNAN:
8332 if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg))))
8333 return omit_one_operand (type, integer_zero_node, arg);
8334
8335 if (TREE_CODE (arg) == REAL_CST)
8336 {
8337 r = TREE_REAL_CST (arg);
8338 return real_isnan (&r) ? integer_one_node : integer_zero_node;
8339 }
8340
8341 arg = builtin_save_expr (arg);
8342 return fold (build2 (UNORDERED_EXPR, type, arg, arg));
8343
8344 default:
8345 gcc_unreachable ();
8346 }
8347 }
8348
8349 /* Fold a call to an unordered comparison function such as
8350 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8351 being called and ARGLIST is the argument list for the call.
8352 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8353 the opposite of the desired result. UNORDERED_CODE is used
8354 for modes that can hold NaNs and ORDERED_CODE is used for
8355 the rest. */
8356
8357 static tree
8358 fold_builtin_unordered_cmp (tree fndecl, tree arglist,
8359 enum tree_code unordered_code,
8360 enum tree_code ordered_code)
8361 {
8362 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8363 enum tree_code code;
8364 tree arg0, arg1;
8365 tree type0, type1;
8366 enum tree_code code0, code1;
8367 tree cmp_type = NULL_TREE;
8368
8369 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
8370 {
8371 /* Check that we have exactly two arguments. */
8372 if (arglist == 0 || TREE_CHAIN (arglist) == 0)
8373 {
8374 error ("too few arguments to function %qs",
8375 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8376 return error_mark_node;
8377 }
8378 else if (TREE_CHAIN (TREE_CHAIN (arglist)) != 0)
8379 {
8380 error ("too many arguments to function %qs",
8381 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8382 return error_mark_node;
8383 }
8384 }
8385
8386 arg0 = TREE_VALUE (arglist);
8387 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
8388
8389 type0 = TREE_TYPE (arg0);
8390 type1 = TREE_TYPE (arg1);
8391
8392 code0 = TREE_CODE (type0);
8393 code1 = TREE_CODE (type1);
8394
8395 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8396 /* Choose the wider of two real types. */
8397 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8398 ? type0 : type1;
8399 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8400 cmp_type = type0;
8401 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8402 cmp_type = type1;
8403 else
8404 {
8405 error ("non-floating-point argument to function %qs",
8406 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8407 return error_mark_node;
8408 }
8409
8410 arg0 = fold_convert (cmp_type, arg0);
8411 arg1 = fold_convert (cmp_type, arg1);
8412
8413 if (unordered_code == UNORDERED_EXPR)
8414 {
8415 if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8416 return omit_two_operands (type, integer_zero_node, arg0, arg1);
8417 return fold (build2 (UNORDERED_EXPR, type, arg0, arg1));
8418 }
8419
8420 code = MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
8421 : ordered_code;
8422 return fold (build1 (TRUTH_NOT_EXPR, type,
8423 fold (build2 (code, type, arg0, arg1))));
8424 }
8425
8426 /* Fold a call to one of the external complex multiply libcalls. */
8427
8428 static tree
8429 fold_builtin_complex_mul (tree type, tree arglist)
8430 {
8431 tree ar, ai, br, bi;
8432
8433 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, REAL_TYPE,
8434 REAL_TYPE, VOID_TYPE))
8435 return NULL;
8436
8437 ar = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
8438 ai = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
8439 br = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
8440 bi = TREE_VALUE (arglist);
8441
8442 return fold_complex_mult_parts (type, ar, ai, br, bi);
8443 }
8444
8445 /* Fold a call to one of the external complex division libcalls. */
8446
8447 static tree
8448 fold_builtin_complex_div (tree type, tree arglist)
8449 {
8450 tree ar, ai, br, bi;
8451
8452 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, REAL_TYPE,
8453 REAL_TYPE, VOID_TYPE))
8454 return NULL;
8455
8456 ar = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
8457 ai = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
8458 br = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
8459 bi = TREE_VALUE (arglist);
8460
8461 return fold_complex_div_parts (type, ar, ai, br, bi, RDIV_EXPR);
8462 }
8463
8464 /* Used by constant folding to simplify calls to builtin functions. EXP is
8465 the CALL_EXPR of a call to a builtin function. IGNORE is true if the
8466 result of the function call is ignored. This function returns NULL_TREE
8467 if no simplification was possible. */
8468
8469 static tree
8470 fold_builtin_1 (tree fndecl, tree arglist, bool ignore)
8471 {
8472 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8473 enum built_in_function fcode;
8474
8475 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8476 return targetm.fold_builtin (fndecl, arglist, ignore);
8477
8478 fcode = DECL_FUNCTION_CODE (fndecl);
8479 switch (fcode)
8480 {
8481 case BUILT_IN_FPUTS:
8482 return fold_builtin_fputs (arglist, ignore, false, NULL_TREE);
8483
8484 case BUILT_IN_FPUTS_UNLOCKED:
8485 return fold_builtin_fputs (arglist, ignore, true, NULL_TREE);
8486
8487 case BUILT_IN_STRSTR:
8488 return fold_builtin_strstr (arglist, type);
8489
8490 case BUILT_IN_STRCAT:
8491 return fold_builtin_strcat (arglist);
8492
8493 case BUILT_IN_STRNCAT:
8494 return fold_builtin_strncat (arglist);
8495
8496 case BUILT_IN_STRSPN:
8497 return fold_builtin_strspn (arglist);
8498
8499 case BUILT_IN_STRCSPN:
8500 return fold_builtin_strcspn (arglist);
8501
8502 case BUILT_IN_STRCHR:
8503 case BUILT_IN_INDEX:
8504 return fold_builtin_strchr (arglist, type);
8505
8506 case BUILT_IN_STRRCHR:
8507 case BUILT_IN_RINDEX:
8508 return fold_builtin_strrchr (arglist, type);
8509
8510 case BUILT_IN_STRCPY:
8511 return fold_builtin_strcpy (fndecl, arglist, NULL_TREE);
8512
8513 case BUILT_IN_STRNCPY:
8514 return fold_builtin_strncpy (fndecl, arglist, NULL_TREE);
8515
8516 case BUILT_IN_STRCMP:
8517 return fold_builtin_strcmp (arglist);
8518
8519 case BUILT_IN_STRNCMP:
8520 return fold_builtin_strncmp (arglist);
8521
8522 case BUILT_IN_STRPBRK:
8523 return fold_builtin_strpbrk (arglist, type);
8524
8525 case BUILT_IN_BCMP:
8526 case BUILT_IN_MEMCMP:
8527 return fold_builtin_memcmp (arglist);
8528
8529 case BUILT_IN_SPRINTF:
8530 return fold_builtin_sprintf (arglist, ignore);
8531
8532 case BUILT_IN_CONSTANT_P:
8533 {
8534 tree val;
8535
8536 val = fold_builtin_constant_p (arglist);
8537 /* Gimplification will pull the CALL_EXPR for the builtin out of
8538 an if condition. When not optimizing, we'll not CSE it back.
8539 To avoid link error types of regressions, return false now. */
8540 if (!val && !optimize)
8541 val = integer_zero_node;
8542
8543 return val;
8544 }
8545
8546 case BUILT_IN_EXPECT:
8547 return fold_builtin_expect (arglist);
8548
8549 case BUILT_IN_CLASSIFY_TYPE:
8550 return fold_builtin_classify_type (arglist);
8551
8552 case BUILT_IN_STRLEN:
8553 return fold_builtin_strlen (arglist);
8554
8555 case BUILT_IN_FABS:
8556 case BUILT_IN_FABSF:
8557 case BUILT_IN_FABSL:
8558 return fold_builtin_fabs (arglist, type);
8559
8560 case BUILT_IN_ABS:
8561 case BUILT_IN_LABS:
8562 case BUILT_IN_LLABS:
8563 case BUILT_IN_IMAXABS:
8564 return fold_builtin_abs (arglist, type);
8565
8566 case BUILT_IN_CONJ:
8567 case BUILT_IN_CONJF:
8568 case BUILT_IN_CONJL:
8569 if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
8570 return fold (build1 (CONJ_EXPR, type, TREE_VALUE (arglist)));
8571 break;
8572
8573 case BUILT_IN_CREAL:
8574 case BUILT_IN_CREALF:
8575 case BUILT_IN_CREALL:
8576 if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
8577 return non_lvalue (fold (build1 (REALPART_EXPR, type,
8578 TREE_VALUE (arglist))));
8579 break;
8580
8581 case BUILT_IN_CIMAG:
8582 case BUILT_IN_CIMAGF:
8583 case BUILT_IN_CIMAGL:
8584 if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
8585 return non_lvalue (fold (build1 (IMAGPART_EXPR, type,
8586 TREE_VALUE (arglist))));
8587 break;
8588
8589 case BUILT_IN_CABS:
8590 case BUILT_IN_CABSF:
8591 case BUILT_IN_CABSL:
8592 return fold_builtin_cabs (arglist, type);
8593
8594 case BUILT_IN_SQRT:
8595 case BUILT_IN_SQRTF:
8596 case BUILT_IN_SQRTL:
8597 return fold_builtin_sqrt (arglist, type);
8598
8599 case BUILT_IN_CBRT:
8600 case BUILT_IN_CBRTF:
8601 case BUILT_IN_CBRTL:
8602 return fold_builtin_cbrt (arglist, type);
8603
8604 case BUILT_IN_SIN:
8605 case BUILT_IN_SINF:
8606 case BUILT_IN_SINL:
8607 return fold_builtin_sin (arglist);
8608
8609 case BUILT_IN_COS:
8610 case BUILT_IN_COSF:
8611 case BUILT_IN_COSL:
8612 return fold_builtin_cos (arglist, type, fndecl);
8613
8614 case BUILT_IN_EXP:
8615 case BUILT_IN_EXPF:
8616 case BUILT_IN_EXPL:
8617 return fold_builtin_exponent (fndecl, arglist, &dconste);
8618
8619 case BUILT_IN_EXP2:
8620 case BUILT_IN_EXP2F:
8621 case BUILT_IN_EXP2L:
8622 return fold_builtin_exponent (fndecl, arglist, &dconst2);
8623
8624 case BUILT_IN_EXP10:
8625 case BUILT_IN_EXP10F:
8626 case BUILT_IN_EXP10L:
8627 case BUILT_IN_POW10:
8628 case BUILT_IN_POW10F:
8629 case BUILT_IN_POW10L:
8630 return fold_builtin_exponent (fndecl, arglist, &dconst10);
8631
8632 case BUILT_IN_LOG:
8633 case BUILT_IN_LOGF:
8634 case BUILT_IN_LOGL:
8635 return fold_builtin_logarithm (fndecl, arglist, &dconste);
8636
8637 case BUILT_IN_LOG2:
8638 case BUILT_IN_LOG2F:
8639 case BUILT_IN_LOG2L:
8640 return fold_builtin_logarithm (fndecl, arglist, &dconst2);
8641
8642 case BUILT_IN_LOG10:
8643 case BUILT_IN_LOG10F:
8644 case BUILT_IN_LOG10L:
8645 return fold_builtin_logarithm (fndecl, arglist, &dconst10);
8646
8647 case BUILT_IN_TAN:
8648 case BUILT_IN_TANF:
8649 case BUILT_IN_TANL:
8650 return fold_builtin_tan (arglist);
8651
8652 case BUILT_IN_ATAN:
8653 case BUILT_IN_ATANF:
8654 case BUILT_IN_ATANL:
8655 return fold_builtin_atan (arglist, type);
8656
8657 case BUILT_IN_POW:
8658 case BUILT_IN_POWF:
8659 case BUILT_IN_POWL:
8660 return fold_builtin_pow (fndecl, arglist, type);
8661
8662 case BUILT_IN_POWI:
8663 case BUILT_IN_POWIF:
8664 case BUILT_IN_POWIL:
8665 return fold_builtin_powi (fndecl, arglist, type);
8666
8667 case BUILT_IN_INF:
8668 case BUILT_IN_INFF:
8669 case BUILT_IN_INFL:
8670 return fold_builtin_inf (type, true);
8671
8672 case BUILT_IN_HUGE_VAL:
8673 case BUILT_IN_HUGE_VALF:
8674 case BUILT_IN_HUGE_VALL:
8675 return fold_builtin_inf (type, false);
8676
8677 case BUILT_IN_NAN:
8678 case BUILT_IN_NANF:
8679 case BUILT_IN_NANL:
8680 return fold_builtin_nan (arglist, type, true);
8681
8682 case BUILT_IN_NANS:
8683 case BUILT_IN_NANSF:
8684 case BUILT_IN_NANSL:
8685 return fold_builtin_nan (arglist, type, false);
8686
8687 case BUILT_IN_FLOOR:
8688 case BUILT_IN_FLOORF:
8689 case BUILT_IN_FLOORL:
8690 return fold_builtin_floor (fndecl, arglist);
8691
8692 case BUILT_IN_CEIL:
8693 case BUILT_IN_CEILF:
8694 case BUILT_IN_CEILL:
8695 return fold_builtin_ceil (fndecl, arglist);
8696
8697 case BUILT_IN_TRUNC:
8698 case BUILT_IN_TRUNCF:
8699 case BUILT_IN_TRUNCL:
8700 return fold_builtin_trunc (fndecl, arglist);
8701
8702 case BUILT_IN_ROUND:
8703 case BUILT_IN_ROUNDF:
8704 case BUILT_IN_ROUNDL:
8705 return fold_builtin_round (fndecl, arglist);
8706
8707 case BUILT_IN_NEARBYINT:
8708 case BUILT_IN_NEARBYINTF:
8709 case BUILT_IN_NEARBYINTL:
8710 case BUILT_IN_RINT:
8711 case BUILT_IN_RINTF:
8712 case BUILT_IN_RINTL:
8713 return fold_trunc_transparent_mathfn (fndecl, arglist);
8714
8715 case BUILT_IN_LCEIL:
8716 case BUILT_IN_LCEILF:
8717 case BUILT_IN_LCEILL:
8718 case BUILT_IN_LLCEIL:
8719 case BUILT_IN_LLCEILF:
8720 case BUILT_IN_LLCEILL:
8721 case BUILT_IN_LFLOOR:
8722 case BUILT_IN_LFLOORF:
8723 case BUILT_IN_LFLOORL:
8724 case BUILT_IN_LLFLOOR:
8725 case BUILT_IN_LLFLOORF:
8726 case BUILT_IN_LLFLOORL:
8727 case BUILT_IN_LROUND:
8728 case BUILT_IN_LROUNDF:
8729 case BUILT_IN_LROUNDL:
8730 case BUILT_IN_LLROUND:
8731 case BUILT_IN_LLROUNDF:
8732 case BUILT_IN_LLROUNDL:
8733 return fold_builtin_int_roundingfn (fndecl, arglist);
8734
8735 case BUILT_IN_LRINT:
8736 case BUILT_IN_LRINTF:
8737 case BUILT_IN_LRINTL:
8738 case BUILT_IN_LLRINT:
8739 case BUILT_IN_LLRINTF:
8740 case BUILT_IN_LLRINTL:
8741 return fold_fixed_mathfn (fndecl, arglist);
8742
8743 case BUILT_IN_FFS:
8744 case BUILT_IN_FFSL:
8745 case BUILT_IN_FFSLL:
8746 case BUILT_IN_CLZ:
8747 case BUILT_IN_CLZL:
8748 case BUILT_IN_CLZLL:
8749 case BUILT_IN_CTZ:
8750 case BUILT_IN_CTZL:
8751 case BUILT_IN_CTZLL:
8752 case BUILT_IN_POPCOUNT:
8753 case BUILT_IN_POPCOUNTL:
8754 case BUILT_IN_POPCOUNTLL:
8755 case BUILT_IN_PARITY:
8756 case BUILT_IN_PARITYL:
8757 case BUILT_IN_PARITYLL:
8758 return fold_builtin_bitop (fndecl, arglist);
8759
8760 case BUILT_IN_MEMCPY:
8761 return fold_builtin_memcpy (fndecl, arglist);
8762
8763 case BUILT_IN_MEMPCPY:
8764 return fold_builtin_mempcpy (arglist, type, /*endp=*/1);
8765
8766 case BUILT_IN_MEMMOVE:
8767 return fold_builtin_memmove (arglist, type);
8768
8769 case BUILT_IN_SIGNBIT:
8770 case BUILT_IN_SIGNBITF:
8771 case BUILT_IN_SIGNBITL:
8772 return fold_builtin_signbit (fndecl, arglist);
8773
8774 case BUILT_IN_ISASCII:
8775 return fold_builtin_isascii (arglist);
8776
8777 case BUILT_IN_TOASCII:
8778 return fold_builtin_toascii (arglist);
8779
8780 case BUILT_IN_ISDIGIT:
8781 return fold_builtin_isdigit (arglist);
8782
8783 case BUILT_IN_COPYSIGN:
8784 case BUILT_IN_COPYSIGNF:
8785 case BUILT_IN_COPYSIGNL:
8786 return fold_builtin_copysign (fndecl, arglist, type);
8787
8788 case BUILT_IN_FINITE:
8789 case BUILT_IN_FINITEF:
8790 case BUILT_IN_FINITEL:
8791 return fold_builtin_classify (fndecl, arglist, BUILT_IN_FINITE);
8792
8793 case BUILT_IN_ISINF:
8794 case BUILT_IN_ISINFF:
8795 case BUILT_IN_ISINFL:
8796 return fold_builtin_classify (fndecl, arglist, BUILT_IN_ISINF);
8797
8798 case BUILT_IN_ISNAN:
8799 case BUILT_IN_ISNANF:
8800 case BUILT_IN_ISNANL:
8801 return fold_builtin_classify (fndecl, arglist, BUILT_IN_ISNAN);
8802
8803 case BUILT_IN_ISGREATER:
8804 return fold_builtin_unordered_cmp (fndecl, arglist, UNLE_EXPR, LE_EXPR);
8805 case BUILT_IN_ISGREATEREQUAL:
8806 return fold_builtin_unordered_cmp (fndecl, arglist, UNLT_EXPR, LT_EXPR);
8807 case BUILT_IN_ISLESS:
8808 return fold_builtin_unordered_cmp (fndecl, arglist, UNGE_EXPR, GE_EXPR);
8809 case BUILT_IN_ISLESSEQUAL:
8810 return fold_builtin_unordered_cmp (fndecl, arglist, UNGT_EXPR, GT_EXPR);
8811 case BUILT_IN_ISLESSGREATER:
8812 return fold_builtin_unordered_cmp (fndecl, arglist, UNEQ_EXPR, EQ_EXPR);
8813 case BUILT_IN_ISUNORDERED:
8814 return fold_builtin_unordered_cmp (fndecl, arglist, UNORDERED_EXPR,
8815 NOP_EXPR);
8816
8817 /* We do the folding for va_start in the expander. */
8818 case BUILT_IN_VA_START:
8819 break;
8820
8821 default:
8822 if (fcode >= BUILT_IN_COMPLEX_MUL_MIN
8823 && fcode <= BUILT_IN_COMPLEX_MUL_MAX)
8824 return fold_builtin_complex_mul (type, arglist);
8825 if (fcode >= BUILT_IN_COMPLEX_DIV_MIN
8826 && fcode <= BUILT_IN_COMPLEX_DIV_MAX)
8827 return fold_builtin_complex_div (type, arglist);
8828 break;
8829 }
8830
8831 return 0;
8832 }
8833
8834 /* A wrapper function for builtin folding that prevents warnings for
8835 "statement without effect" and the like, caused by removing the
8836 call node earlier than the warning is generated. */
8837
8838 tree
8839 fold_builtin (tree fndecl, tree arglist, bool ignore)
8840 {
8841 tree exp = fold_builtin_1 (fndecl, arglist, ignore);
8842 if (exp)
8843 {
8844 /* ??? Don't clobber shared nodes such as integer_zero_node. */
8845 if (CONSTANT_CLASS_P (exp))
8846 exp = build1 (NOP_EXPR, TREE_TYPE (exp), exp);
8847 TREE_NO_WARNING (exp) = 1;
8848 }
8849
8850 return exp;
8851 }
8852
8853 /* Conveniently construct a function call expression. */
8854
8855 tree
8856 build_function_call_expr (tree fn, tree arglist)
8857 {
8858 tree call_expr;
8859
8860 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
8861 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
8862 call_expr, arglist, NULL_TREE);
8863 return fold (call_expr);
8864 }
8865
8866 /* This function validates the types of a function call argument list
8867 represented as a tree chain of parameters against a specified list
8868 of tree_codes. If the last specifier is a 0, that represents an
8869 ellipses, otherwise the last specifier must be a VOID_TYPE. */
8870
8871 static int
8872 validate_arglist (tree arglist, ...)
8873 {
8874 enum tree_code code;
8875 int res = 0;
8876 va_list ap;
8877
8878 va_start (ap, arglist);
8879
8880 do
8881 {
8882 code = va_arg (ap, enum tree_code);
8883 switch (code)
8884 {
8885 case 0:
8886 /* This signifies an ellipses, any further arguments are all ok. */
8887 res = 1;
8888 goto end;
8889 case VOID_TYPE:
8890 /* This signifies an endlink, if no arguments remain, return
8891 true, otherwise return false. */
8892 res = arglist == 0;
8893 goto end;
8894 default:
8895 /* If no parameters remain or the parameter's code does not
8896 match the specified code, return false. Otherwise continue
8897 checking any remaining arguments. */
8898 if (arglist == 0)
8899 goto end;
8900 if (code == POINTER_TYPE)
8901 {
8902 if (! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))))
8903 goto end;
8904 }
8905 else if (code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
8906 goto end;
8907 break;
8908 }
8909 arglist = TREE_CHAIN (arglist);
8910 }
8911 while (1);
8912
8913 /* We need gotos here since we can only have one VA_CLOSE in a
8914 function. */
8915 end: ;
8916 va_end (ap);
8917
8918 return res;
8919 }
8920
8921 /* Default target-specific builtin expander that does nothing. */
8922
8923 rtx
8924 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8925 rtx target ATTRIBUTE_UNUSED,
8926 rtx subtarget ATTRIBUTE_UNUSED,
8927 enum machine_mode mode ATTRIBUTE_UNUSED,
8928 int ignore ATTRIBUTE_UNUSED)
8929 {
8930 return NULL_RTX;
8931 }
8932
8933 /* Returns true is EXP represents data that would potentially reside
8934 in a readonly section. */
8935
8936 static bool
8937 readonly_data_expr (tree exp)
8938 {
8939 STRIP_NOPS (exp);
8940
8941 if (TREE_CODE (exp) != ADDR_EXPR)
8942 return false;
8943
8944 exp = get_base_address (TREE_OPERAND (exp, 0));
8945 if (!exp)
8946 return false;
8947
8948 /* Make sure we call decl_readonly_section only for trees it
8949 can handle (since it returns true for everything it doesn't
8950 understand). */
8951 if (TREE_CODE (exp) == STRING_CST
8952 || TREE_CODE (exp) == CONSTRUCTOR
8953 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8954 return decl_readonly_section (exp, 0);
8955 else
8956 return false;
8957 }
8958
8959 /* Simplify a call to the strstr builtin.
8960
8961 Return 0 if no simplification was possible, otherwise return the
8962 simplified form of the call as a tree.
8963
8964 The simplified form may be a constant or other expression which
8965 computes the same value, but in a more efficient manner (including
8966 calls to other builtin functions).
8967
8968 The call may contain arguments which need to be evaluated, but
8969 which are not useful to determine the result of the call. In
8970 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8971 COMPOUND_EXPR will be an argument which must be evaluated.
8972 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8973 COMPOUND_EXPR in the chain will contain the tree for the simplified
8974 form of the builtin function call. */
8975
8976 static tree
8977 fold_builtin_strstr (tree arglist, tree type)
8978 {
8979 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8980 return 0;
8981 else
8982 {
8983 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
8984 tree fn;
8985 const char *p1, *p2;
8986
8987 p2 = c_getstr (s2);
8988 if (p2 == NULL)
8989 return 0;
8990
8991 p1 = c_getstr (s1);
8992 if (p1 != NULL)
8993 {
8994 const char *r = strstr (p1, p2);
8995 tree tem;
8996
8997 if (r == NULL)
8998 return build_int_cst (TREE_TYPE (s1), 0);
8999
9000 /* Return an offset into the constant string argument. */
9001 tem = fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
9002 s1, build_int_cst (TREE_TYPE (s1), r - p1)));
9003 return fold_convert (type, tem);
9004 }
9005
9006 if (p2[0] == '\0')
9007 return s1;
9008
9009 if (p2[1] != '\0')
9010 return 0;
9011
9012 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
9013 if (!fn)
9014 return 0;
9015
9016 /* New argument list transforming strstr(s1, s2) to
9017 strchr(s1, s2[0]). */
9018 arglist = build_tree_list (NULL_TREE,
9019 build_int_cst (NULL_TREE, p2[0]));
9020 arglist = tree_cons (NULL_TREE, s1, arglist);
9021 return build_function_call_expr (fn, arglist);
9022 }
9023 }
9024
9025 /* Simplify a call to the strchr builtin.
9026
9027 Return 0 if no simplification was possible, otherwise return the
9028 simplified form of the call as a tree.
9029
9030 The simplified form may be a constant or other expression which
9031 computes the same value, but in a more efficient manner (including
9032 calls to other builtin functions).
9033
9034 The call may contain arguments which need to be evaluated, but
9035 which are not useful to determine the result of the call. In
9036 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9037 COMPOUND_EXPR will be an argument which must be evaluated.
9038 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9039 COMPOUND_EXPR in the chain will contain the tree for the simplified
9040 form of the builtin function call. */
9041
9042 static tree
9043 fold_builtin_strchr (tree arglist, tree type)
9044 {
9045 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9046 return 0;
9047 else
9048 {
9049 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
9050 const char *p1;
9051
9052 if (TREE_CODE (s2) != INTEGER_CST)
9053 return 0;
9054
9055 p1 = c_getstr (s1);
9056 if (p1 != NULL)
9057 {
9058 char c;
9059 const char *r;
9060 tree tem;
9061
9062 if (target_char_cast (s2, &c))
9063 return 0;
9064
9065 r = strchr (p1, c);
9066
9067 if (r == NULL)
9068 return build_int_cst (TREE_TYPE (s1), 0);
9069
9070 /* Return an offset into the constant string argument. */
9071 tem = fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
9072 s1, build_int_cst (TREE_TYPE (s1), r - p1)));
9073 return fold_convert (type, tem);
9074 }
9075 return 0;
9076 }
9077 }
9078
9079 /* Simplify a call to the strrchr builtin.
9080
9081 Return 0 if no simplification was possible, otherwise return the
9082 simplified form of the call as a tree.
9083
9084 The simplified form may be a constant or other expression which
9085 computes the same value, but in a more efficient manner (including
9086 calls to other builtin functions).
9087
9088 The call may contain arguments which need to be evaluated, but
9089 which are not useful to determine the result of the call. In
9090 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9091 COMPOUND_EXPR will be an argument which must be evaluated.
9092 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9093 COMPOUND_EXPR in the chain will contain the tree for the simplified
9094 form of the builtin function call. */
9095
9096 static tree
9097 fold_builtin_strrchr (tree arglist, tree type)
9098 {
9099 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9100 return 0;
9101 else
9102 {
9103 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
9104 tree fn;
9105 const char *p1;
9106
9107 if (TREE_CODE (s2) != INTEGER_CST)
9108 return 0;
9109
9110 p1 = c_getstr (s1);
9111 if (p1 != NULL)
9112 {
9113 char c;
9114 const char *r;
9115 tree tem;
9116
9117 if (target_char_cast (s2, &c))
9118 return 0;
9119
9120 r = strrchr (p1, c);
9121
9122 if (r == NULL)
9123 return build_int_cst (TREE_TYPE (s1), 0);
9124
9125 /* Return an offset into the constant string argument. */
9126 tem = fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
9127 s1, build_int_cst (TREE_TYPE (s1), r - p1)));
9128 return fold_convert (type, tem);
9129 }
9130
9131 if (! integer_zerop (s2))
9132 return 0;
9133
9134 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
9135 if (!fn)
9136 return 0;
9137
9138 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9139 return build_function_call_expr (fn, arglist);
9140 }
9141 }
9142
9143 /* Simplify a call to the strpbrk builtin.
9144
9145 Return 0 if no simplification was possible, otherwise return the
9146 simplified form of the call as a tree.
9147
9148 The simplified form may be a constant or other expression which
9149 computes the same value, but in a more efficient manner (including
9150 calls to other builtin functions).
9151
9152 The call may contain arguments which need to be evaluated, but
9153 which are not useful to determine the result of the call. In
9154 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9155 COMPOUND_EXPR will be an argument which must be evaluated.
9156 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9157 COMPOUND_EXPR in the chain will contain the tree for the simplified
9158 form of the builtin function call. */
9159
9160 static tree
9161 fold_builtin_strpbrk (tree arglist, tree type)
9162 {
9163 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9164 return 0;
9165 else
9166 {
9167 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
9168 tree fn;
9169 const char *p1, *p2;
9170
9171 p2 = c_getstr (s2);
9172 if (p2 == NULL)
9173 return 0;
9174
9175 p1 = c_getstr (s1);
9176 if (p1 != NULL)
9177 {
9178 const char *r = strpbrk (p1, p2);
9179 tree tem;
9180
9181 if (r == NULL)
9182 return build_int_cst (TREE_TYPE (s1), 0);
9183
9184 /* Return an offset into the constant string argument. */
9185 tem = fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
9186 s1, build_int_cst (TREE_TYPE (s1), r - p1)));
9187 return fold_convert (type, tem);
9188 }
9189
9190 if (p2[0] == '\0')
9191 /* strpbrk(x, "") == NULL.
9192 Evaluate and ignore s1 in case it had side-effects. */
9193 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
9194
9195 if (p2[1] != '\0')
9196 return 0; /* Really call strpbrk. */
9197
9198 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
9199 if (!fn)
9200 return 0;
9201
9202 /* New argument list transforming strpbrk(s1, s2) to
9203 strchr(s1, s2[0]). */
9204 arglist = build_tree_list (NULL_TREE,
9205 build_int_cst (NULL_TREE, p2[0]));
9206 arglist = tree_cons (NULL_TREE, s1, arglist);
9207 return build_function_call_expr (fn, arglist);
9208 }
9209 }
9210
9211 /* Simplify a call to the strcat builtin.
9212
9213 Return 0 if no simplification was possible, otherwise return the
9214 simplified form of the call as a tree.
9215
9216 The simplified form may be a constant or other expression which
9217 computes the same value, but in a more efficient manner (including
9218 calls to other builtin functions).
9219
9220 The call may contain arguments which need to be evaluated, but
9221 which are not useful to determine the result of the call. In
9222 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9223 COMPOUND_EXPR will be an argument which must be evaluated.
9224 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9225 COMPOUND_EXPR in the chain will contain the tree for the simplified
9226 form of the builtin function call. */
9227
9228 static tree
9229 fold_builtin_strcat (tree arglist)
9230 {
9231 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9232 return 0;
9233 else
9234 {
9235 tree dst = TREE_VALUE (arglist),
9236 src = TREE_VALUE (TREE_CHAIN (arglist));
9237 const char *p = c_getstr (src);
9238
9239 /* If the string length is zero, return the dst parameter. */
9240 if (p && *p == '\0')
9241 return dst;
9242
9243 return 0;
9244 }
9245 }
9246
9247 /* Simplify a call to the strncat builtin.
9248
9249 Return 0 if no simplification was possible, otherwise return the
9250 simplified form of the call as a tree.
9251
9252 The simplified form may be a constant or other expression which
9253 computes the same value, but in a more efficient manner (including
9254 calls to other builtin functions).
9255
9256 The call may contain arguments which need to be evaluated, but
9257 which are not useful to determine the result of the call. In
9258 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9259 COMPOUND_EXPR will be an argument which must be evaluated.
9260 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9261 COMPOUND_EXPR in the chain will contain the tree for the simplified
9262 form of the builtin function call. */
9263
9264 static tree
9265 fold_builtin_strncat (tree arglist)
9266 {
9267 if (!validate_arglist (arglist,
9268 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9269 return 0;
9270 else
9271 {
9272 tree dst = TREE_VALUE (arglist);
9273 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9274 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9275 const char *p = c_getstr (src);
9276
9277 /* If the requested length is zero, or the src parameter string
9278 length is zero, return the dst parameter. */
9279 if (integer_zerop (len) || (p && *p == '\0'))
9280 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
9281
9282 /* If the requested len is greater than or equal to the string
9283 length, call strcat. */
9284 if (TREE_CODE (len) == INTEGER_CST && p
9285 && compare_tree_int (len, strlen (p)) >= 0)
9286 {
9287 tree newarglist
9288 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
9289 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
9290
9291 /* If the replacement _DECL isn't initialized, don't do the
9292 transformation. */
9293 if (!fn)
9294 return 0;
9295
9296 return build_function_call_expr (fn, newarglist);
9297 }
9298 return 0;
9299 }
9300 }
9301
9302 /* Simplify a call to the strspn builtin.
9303
9304 Return 0 if no simplification was possible, otherwise return the
9305 simplified form of the call as a tree.
9306
9307 The simplified form may be a constant or other expression which
9308 computes the same value, but in a more efficient manner (including
9309 calls to other builtin functions).
9310
9311 The call may contain arguments which need to be evaluated, but
9312 which are not useful to determine the result of the call. In
9313 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9314 COMPOUND_EXPR will be an argument which must be evaluated.
9315 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9316 COMPOUND_EXPR in the chain will contain the tree for the simplified
9317 form of the builtin function call. */
9318
9319 static tree
9320 fold_builtin_strspn (tree arglist)
9321 {
9322 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9323 return 0;
9324 else
9325 {
9326 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
9327 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9328
9329 /* If both arguments are constants, evaluate at compile-time. */
9330 if (p1 && p2)
9331 {
9332 const size_t r = strspn (p1, p2);
9333 return size_int (r);
9334 }
9335
9336 /* If either argument is "", return 0. */
9337 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9338 /* Evaluate and ignore both arguments in case either one has
9339 side-effects. */
9340 return omit_two_operands (integer_type_node, integer_zero_node,
9341 s1, s2);
9342 return 0;
9343 }
9344 }
9345
9346 /* Simplify a call to the strcspn builtin.
9347
9348 Return 0 if no simplification was possible, otherwise return the
9349 simplified form of the call as a tree.
9350
9351 The simplified form may be a constant or other expression which
9352 computes the same value, but in a more efficient manner (including
9353 calls to other builtin functions).
9354
9355 The call may contain arguments which need to be evaluated, but
9356 which are not useful to determine the result of the call. In
9357 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9358 COMPOUND_EXPR will be an argument which must be evaluated.
9359 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9360 COMPOUND_EXPR in the chain will contain the tree for the simplified
9361 form of the builtin function call. */
9362
9363 static tree
9364 fold_builtin_strcspn (tree arglist)
9365 {
9366 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9367 return 0;
9368 else
9369 {
9370 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
9371 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9372
9373 /* If both arguments are constants, evaluate at compile-time. */
9374 if (p1 && p2)
9375 {
9376 const size_t r = strcspn (p1, p2);
9377 return size_int (r);
9378 }
9379
9380 /* If the first argument is "", return 0. */
9381 if (p1 && *p1 == '\0')
9382 {
9383 /* Evaluate and ignore argument s2 in case it has
9384 side-effects. */
9385 return omit_one_operand (integer_type_node,
9386 integer_zero_node, s2);
9387 }
9388
9389 /* If the second argument is "", return __builtin_strlen(s1). */
9390 if (p2 && *p2 == '\0')
9391 {
9392 tree newarglist = build_tree_list (NULL_TREE, s1),
9393 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
9394
9395 /* If the replacement _DECL isn't initialized, don't do the
9396 transformation. */
9397 if (!fn)
9398 return 0;
9399
9400 return build_function_call_expr (fn, newarglist);
9401 }
9402 return 0;
9403 }
9404 }
9405
9406 /* Fold a call to the fputs builtin. IGNORE is true if the value returned
9407 by the builtin will be ignored. UNLOCKED is true is true if this
9408 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
9409 the known length of the string. Return NULL_TREE if no simplification
9410 was possible. */
9411
9412 tree
9413 fold_builtin_fputs (tree arglist, bool ignore, bool unlocked, tree len)
9414 {
9415 tree fn;
9416 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
9417 : implicit_built_in_decls[BUILT_IN_FPUTC];
9418 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
9419 : implicit_built_in_decls[BUILT_IN_FWRITE];
9420
9421 /* If the return value is used, or the replacement _DECL isn't
9422 initialized, don't do the transformation. */
9423 if (!ignore || !fn_fputc || !fn_fwrite)
9424 return 0;
9425
9426 /* Verify the arguments in the original call. */
9427 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9428 return 0;
9429
9430 if (! len)
9431 len = c_strlen (TREE_VALUE (arglist), 0);
9432
9433 /* Get the length of the string passed to fputs. If the length
9434 can't be determined, punt. */
9435 if (!len
9436 || TREE_CODE (len) != INTEGER_CST)
9437 return 0;
9438
9439 switch (compare_tree_int (len, 1))
9440 {
9441 case -1: /* length is 0, delete the call entirely . */
9442 return omit_one_operand (integer_type_node, integer_zero_node,
9443 TREE_VALUE (TREE_CHAIN (arglist)));
9444
9445 case 0: /* length is 1, call fputc. */
9446 {
9447 const char *p = c_getstr (TREE_VALUE (arglist));
9448
9449 if (p != NULL)
9450 {
9451 /* New argument list transforming fputs(string, stream) to
9452 fputc(string[0], stream). */
9453 arglist = build_tree_list (NULL_TREE,
9454 TREE_VALUE (TREE_CHAIN (arglist)));
9455 arglist = tree_cons (NULL_TREE,
9456 build_int_cst (NULL_TREE, p[0]),
9457 arglist);
9458 fn = fn_fputc;
9459 break;
9460 }
9461 }
9462 /* FALLTHROUGH */
9463 case 1: /* length is greater than 1, call fwrite. */
9464 {
9465 tree string_arg;
9466
9467 /* If optimizing for size keep fputs. */
9468 if (optimize_size)
9469 return 0;
9470 string_arg = TREE_VALUE (arglist);
9471 /* New argument list transforming fputs(string, stream) to
9472 fwrite(string, 1, len, stream). */
9473 arglist = build_tree_list (NULL_TREE,
9474 TREE_VALUE (TREE_CHAIN (arglist)));
9475 arglist = tree_cons (NULL_TREE, len, arglist);
9476 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
9477 arglist = tree_cons (NULL_TREE, string_arg, arglist);
9478 fn = fn_fwrite;
9479 break;
9480 }
9481 default:
9482 gcc_unreachable ();
9483 }
9484
9485 /* These optimizations are only performed when the result is ignored,
9486 hence there's no need to cast the result to integer_type_node. */
9487 return build_function_call_expr (fn, arglist);
9488 }
9489
9490 /* Fold the new_arg's arguments (ARGLIST). Returns true if there was an error
9491 produced. False otherwise. This is done so that we don't output the error
9492 or warning twice or three times. */
9493 bool
9494 fold_builtin_next_arg (tree arglist)
9495 {
9496 tree fntype = TREE_TYPE (current_function_decl);
9497
9498 if (TYPE_ARG_TYPES (fntype) == 0
9499 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
9500 == void_type_node))
9501 {
9502 error ("%<va_start%> used in function with fixed args");
9503 return true;
9504 }
9505 else if (!arglist)
9506 {
9507 /* Evidently an out of date version of <stdarg.h>; can't validate
9508 va_start's second argument, but can still work as intended. */
9509 warning (0, "%<__builtin_next_arg%> called without an argument");
9510 return true;
9511 }
9512 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9513 when we checked the arguments and if needed issued a warning. */
9514 else if (!TREE_CHAIN (arglist)
9515 || !integer_zerop (TREE_VALUE (arglist))
9516 || !integer_zerop (TREE_VALUE (TREE_CHAIN (arglist)))
9517 || TREE_CHAIN (TREE_CHAIN (arglist)))
9518 {
9519 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9520 tree arg = TREE_VALUE (arglist);
9521
9522 if (TREE_CHAIN (arglist))
9523 {
9524 error ("%<va_start%> used with too many arguments");
9525 return true;
9526 }
9527
9528 /* Strip off all nops for the sake of the comparison. This
9529 is not quite the same as STRIP_NOPS. It does more.
9530 We must also strip off INDIRECT_EXPR for C++ reference
9531 parameters. */
9532 while (TREE_CODE (arg) == NOP_EXPR
9533 || TREE_CODE (arg) == CONVERT_EXPR
9534 || TREE_CODE (arg) == NON_LVALUE_EXPR
9535 || TREE_CODE (arg) == INDIRECT_REF)
9536 arg = TREE_OPERAND (arg, 0);
9537 if (arg != last_parm)
9538 {
9539 /* FIXME: Sometimes with the tree optimizers we can get the
9540 not the last argument even though the user used the last
9541 argument. We just warn and set the arg to be the last
9542 argument so that we will get wrong-code because of
9543 it. */
9544 warning (0, "second parameter of %<va_start%> not last named argument");
9545 }
9546 /* We want to verify the second parameter just once before the tree
9547 optimizers are run and then avoid keeping it in the tree,
9548 as otherwise we could warn even for correct code like:
9549 void foo (int i, ...)
9550 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9551 TREE_VALUE (arglist) = integer_zero_node;
9552 TREE_CHAIN (arglist) = build_tree_list (NULL, integer_zero_node);
9553 }
9554 return false;
9555 }
9556
9557
9558 /* Simplify a call to the sprintf builtin.
9559
9560 Return 0 if no simplification was possible, otherwise return the
9561 simplified form of the call as a tree. If IGNORED is true, it means that
9562 the caller does not use the returned value of the function. */
9563
9564 static tree
9565 fold_builtin_sprintf (tree arglist, int ignored)
9566 {
9567 tree call, retval, dest, fmt;
9568 const char *fmt_str = NULL;
9569
9570 /* Verify the required arguments in the original call. We deal with two
9571 types of sprintf() calls: 'sprintf (str, fmt)' and
9572 'sprintf (dest, "%s", orig)'. */
9573 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
9574 && !validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
9575 VOID_TYPE))
9576 return NULL_TREE;
9577
9578 /* Get the destination string and the format specifier. */
9579 dest = TREE_VALUE (arglist);
9580 fmt = TREE_VALUE (TREE_CHAIN (arglist));
9581
9582 /* Check whether the format is a literal string constant. */
9583 fmt_str = c_getstr (fmt);
9584 if (fmt_str == NULL)
9585 return NULL_TREE;
9586
9587 call = NULL_TREE;
9588 retval = NULL_TREE;
9589
9590 /* If the format doesn't contain % args or %%, use strcpy. */
9591 if (strchr (fmt_str, '%') == NULL)
9592 {
9593 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
9594
9595 if (!fn)
9596 return NULL_TREE;
9597
9598 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
9599 'format' is known to contain no % formats. */
9600 arglist = build_tree_list (NULL_TREE, fmt);
9601 arglist = tree_cons (NULL_TREE, dest, arglist);
9602 call = build_function_call_expr (fn, arglist);
9603 if (!ignored)
9604 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
9605 }
9606
9607 /* If the format is "%s", use strcpy if the result isn't used. */
9608 else if (fmt_str && strcmp (fmt_str, "%s") == 0)
9609 {
9610 tree fn, orig;
9611 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
9612
9613 if (!fn)
9614 return NULL_TREE;
9615
9616 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
9617 orig = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9618 arglist = build_tree_list (NULL_TREE, orig);
9619 arglist = tree_cons (NULL_TREE, dest, arglist);
9620 if (!ignored)
9621 {
9622 retval = c_strlen (orig, 1);
9623 if (!retval || TREE_CODE (retval) != INTEGER_CST)
9624 return NULL_TREE;
9625 }
9626 call = build_function_call_expr (fn, arglist);
9627 }
9628
9629 if (call && retval)
9630 {
9631 retval = convert
9632 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
9633 retval);
9634 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
9635 }
9636 else
9637 return call;
9638 }