builtins.c (expand_builtin_expect_jump): Remove redundant tests that are also in...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
50
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) STRINGX(X),
68 const char *const built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init = 0;
84 static REAL_VALUE_TYPE dconstpi;
85 static REAL_VALUE_TYPE dconste;
86
87 static int get_pointer_alignment PARAMS ((tree, unsigned int));
88 static tree c_strlen PARAMS ((tree));
89 static const char *c_getstr PARAMS ((tree));
90 static rtx c_readstr PARAMS ((const char *,
91 enum machine_mode));
92 static int target_char_cast PARAMS ((tree, char *));
93 static rtx get_memory_rtx PARAMS ((tree));
94 static int apply_args_size PARAMS ((void));
95 static int apply_result_size PARAMS ((void));
96 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
97 static rtx result_vector PARAMS ((int, rtx));
98 #endif
99 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
100 static void expand_builtin_prefetch PARAMS ((tree));
101 static rtx expand_builtin_apply_args PARAMS ((void));
102 static rtx expand_builtin_apply_args_1 PARAMS ((void));
103 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
104 static void expand_builtin_return PARAMS ((rtx));
105 static enum type_class type_to_class PARAMS ((tree));
106 static rtx expand_builtin_classify_type PARAMS ((tree));
107 static void expand_errno_check PARAMS ((tree, rtx));
108 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
109 static rtx expand_builtin_mathfn_2 PARAMS ((tree, rtx, rtx));
110 static rtx expand_builtin_constant_p PARAMS ((tree, enum machine_mode));
111 static rtx expand_builtin_args_info PARAMS ((tree));
112 static rtx expand_builtin_next_arg PARAMS ((tree));
113 static rtx expand_builtin_va_start PARAMS ((tree));
114 static rtx expand_builtin_va_end PARAMS ((tree));
115 static rtx expand_builtin_va_copy PARAMS ((tree));
116 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
117 enum machine_mode));
118 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
119 enum machine_mode));
120 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
121 enum machine_mode));
122 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
123 enum machine_mode));
124 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
125 enum machine_mode));
126 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
127 enum machine_mode));
128 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
129 enum machine_mode));
130 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
131 enum machine_mode));
132 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
133 enum machine_mode));
134 static rtx expand_builtin_mempcpy PARAMS ((tree, rtx,
135 enum machine_mode, int));
136 static rtx expand_builtin_memmove PARAMS ((tree, rtx,
137 enum machine_mode));
138 static rtx expand_builtin_bcopy PARAMS ((tree));
139 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
140 enum machine_mode));
141 static rtx expand_builtin_stpcpy PARAMS ((tree, rtx,
142 enum machine_mode));
143 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
144 enum machine_mode));
145 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
146 enum machine_mode));
147 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
148 enum machine_mode));
149 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
150 enum machine_mode));
151 static rtx expand_builtin_memset PARAMS ((tree, rtx,
152 enum machine_mode));
153 static rtx expand_builtin_bzero PARAMS ((tree));
154 static rtx expand_builtin_strlen PARAMS ((tree, rtx, enum machine_mode));
155 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
156 enum machine_mode));
157 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
158 enum machine_mode));
159 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
160 enum machine_mode));
161 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
162 enum machine_mode));
163 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
164 static rtx expand_builtin_unop PARAMS ((enum machine_mode,
165 tree, rtx, rtx, optab));
166 static rtx expand_builtin_frame_address PARAMS ((tree, tree));
167 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
168 static tree stabilize_va_list PARAMS ((tree, int));
169 static rtx expand_builtin_expect PARAMS ((tree, rtx));
170 static tree fold_builtin_constant_p PARAMS ((tree));
171 static tree fold_builtin_classify_type PARAMS ((tree));
172 static tree fold_builtin_inf PARAMS ((tree, int));
173 static tree fold_builtin_nan PARAMS ((tree, tree, int));
174 static int validate_arglist PARAMS ((tree, ...));
175 static tree fold_trunc_transparent_mathfn PARAMS ((tree));
176 static bool readonly_data_expr PARAMS ((tree));
177 static rtx expand_builtin_fabs PARAMS ((tree, rtx, rtx));
178 static rtx expand_builtin_cabs PARAMS ((tree, rtx));
179 static void init_builtin_dconsts PARAMS ((void));
180 static tree fold_builtin_cabs PARAMS ((tree, tree, tree));
181
182 /* Initialize mathematical constants for constant folding builtins.
183 These constants need to be given to at least 160 bits precision. */
184
185 static void
186 init_builtin_dconsts ()
187 {
188 real_from_string (&dconstpi,
189 "3.1415926535897932384626433832795028841971693993751058209749445923078");
190 real_from_string (&dconste,
191 "2.7182818284590452353602874713526624977572470936999595749669676277241");
192
193 builtin_dconsts_init = true;
194 }
195
196 /* Return the alignment in bits of EXP, a pointer valued expression.
197 But don't return more than MAX_ALIGN no matter what.
198 The alignment returned is, by default, the alignment of the thing that
199 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
200
201 Otherwise, look at the expression to see if we can do better, i.e., if the
202 expression is actually pointing at an object whose alignment is tighter. */
203
204 static int
205 get_pointer_alignment (exp, max_align)
206 tree exp;
207 unsigned int max_align;
208 {
209 unsigned int align, inner;
210
211 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
212 return 0;
213
214 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
215 align = MIN (align, max_align);
216
217 while (1)
218 {
219 switch (TREE_CODE (exp))
220 {
221 case NOP_EXPR:
222 case CONVERT_EXPR:
223 case NON_LVALUE_EXPR:
224 exp = TREE_OPERAND (exp, 0);
225 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
226 return align;
227
228 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
229 align = MIN (inner, max_align);
230 break;
231
232 case PLUS_EXPR:
233 /* If sum of pointer + int, restrict our maximum alignment to that
234 imposed by the integer. If not, we can't do any better than
235 ALIGN. */
236 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
237 return align;
238
239 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
240 & (max_align / BITS_PER_UNIT - 1))
241 != 0)
242 max_align >>= 1;
243
244 exp = TREE_OPERAND (exp, 0);
245 break;
246
247 case ADDR_EXPR:
248 /* See what we are pointing at and look at its alignment. */
249 exp = TREE_OPERAND (exp, 0);
250 if (TREE_CODE (exp) == FUNCTION_DECL)
251 align = FUNCTION_BOUNDARY;
252 else if (DECL_P (exp))
253 align = DECL_ALIGN (exp);
254 #ifdef CONSTANT_ALIGNMENT
255 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
256 align = CONSTANT_ALIGNMENT (exp, align);
257 #endif
258 return MIN (align, max_align);
259
260 default:
261 return align;
262 }
263 }
264 }
265
266 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
267 way, because it could contain a zero byte in the middle.
268 TREE_STRING_LENGTH is the size of the character array, not the string.
269
270 The value returned is of type `ssizetype'.
271
272 Unfortunately, string_constant can't access the values of const char
273 arrays with initializers, so neither can we do so here. */
274
275 static tree
276 c_strlen (src)
277 tree src;
278 {
279 tree offset_node;
280 HOST_WIDE_INT offset;
281 int max;
282 const char *ptr;
283
284 src = string_constant (src, &offset_node);
285 if (src == 0)
286 return 0;
287
288 max = TREE_STRING_LENGTH (src) - 1;
289 ptr = TREE_STRING_POINTER (src);
290
291 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
292 {
293 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
294 compute the offset to the following null if we don't know where to
295 start searching for it. */
296 int i;
297
298 for (i = 0; i < max; i++)
299 if (ptr[i] == 0)
300 return 0;
301
302 /* We don't know the starting offset, but we do know that the string
303 has no internal zero bytes. We can assume that the offset falls
304 within the bounds of the string; otherwise, the programmer deserves
305 what he gets. Subtract the offset from the length of the string,
306 and return that. This would perhaps not be valid if we were dealing
307 with named arrays in addition to literal string constants. */
308
309 return size_diffop (size_int (max), offset_node);
310 }
311
312 /* We have a known offset into the string. Start searching there for
313 a null character if we can represent it as a single HOST_WIDE_INT. */
314 if (offset_node == 0)
315 offset = 0;
316 else if (! host_integerp (offset_node, 0))
317 offset = -1;
318 else
319 offset = tree_low_cst (offset_node, 0);
320
321 /* If the offset is known to be out of bounds, warn, and call strlen at
322 runtime. */
323 if (offset < 0 || offset > max)
324 {
325 warning ("offset outside bounds of constant string");
326 return 0;
327 }
328
329 /* Use strlen to search for the first zero byte. Since any strings
330 constructed with build_string will have nulls appended, we win even
331 if we get handed something like (char[4])"abcd".
332
333 Since OFFSET is our starting index into the string, no further
334 calculation is needed. */
335 return ssize_int (strlen (ptr + offset));
336 }
337
338 /* Return a char pointer for a C string if it is a string constant
339 or sum of string constant and integer constant. */
340
341 static const char *
342 c_getstr (src)
343 tree src;
344 {
345 tree offset_node;
346
347 src = string_constant (src, &offset_node);
348 if (src == 0)
349 return 0;
350
351 if (offset_node == 0)
352 return TREE_STRING_POINTER (src);
353 else if (!host_integerp (offset_node, 1)
354 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
355 return 0;
356
357 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
358 }
359
360 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
361 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
362
363 static rtx
364 c_readstr (str, mode)
365 const char *str;
366 enum machine_mode mode;
367 {
368 HOST_WIDE_INT c[2];
369 HOST_WIDE_INT ch;
370 unsigned int i, j;
371
372 if (GET_MODE_CLASS (mode) != MODE_INT)
373 abort ();
374 c[0] = 0;
375 c[1] = 0;
376 ch = 1;
377 for (i = 0; i < GET_MODE_SIZE (mode); i++)
378 {
379 j = i;
380 if (WORDS_BIG_ENDIAN)
381 j = GET_MODE_SIZE (mode) - i - 1;
382 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
383 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
384 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
385 j *= BITS_PER_UNIT;
386 if (j > 2 * HOST_BITS_PER_WIDE_INT)
387 abort ();
388 if (ch)
389 ch = (unsigned char) str[i];
390 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
391 }
392 return immed_double_const (c[0], c[1], mode);
393 }
394
395 /* Cast a target constant CST to target CHAR and if that value fits into
396 host char type, return zero and put that value into variable pointed by
397 P. */
398
399 static int
400 target_char_cast (cst, p)
401 tree cst;
402 char *p;
403 {
404 unsigned HOST_WIDE_INT val, hostval;
405
406 if (!host_integerp (cst, 1)
407 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
408 return 1;
409
410 val = tree_low_cst (cst, 1);
411 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
412 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
413
414 hostval = val;
415 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
416 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
417
418 if (val != hostval)
419 return 1;
420
421 *p = hostval;
422 return 0;
423 }
424
425 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
426 times to get the address of either a higher stack frame, or a return
427 address located within it (depending on FNDECL_CODE). */
428
429 rtx
430 expand_builtin_return_addr (fndecl_code, count, tem)
431 enum built_in_function fndecl_code;
432 int count;
433 rtx tem;
434 {
435 int i;
436
437 /* Some machines need special handling before we can access
438 arbitrary frames. For example, on the sparc, we must first flush
439 all register windows to the stack. */
440 #ifdef SETUP_FRAME_ADDRESSES
441 if (count > 0)
442 SETUP_FRAME_ADDRESSES ();
443 #endif
444
445 /* On the sparc, the return address is not in the frame, it is in a
446 register. There is no way to access it off of the current frame
447 pointer, but it can be accessed off the previous frame pointer by
448 reading the value from the register window save area. */
449 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
450 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
451 count--;
452 #endif
453
454 /* Scan back COUNT frames to the specified frame. */
455 for (i = 0; i < count; i++)
456 {
457 /* Assume the dynamic chain pointer is in the word that the
458 frame address points to, unless otherwise specified. */
459 #ifdef DYNAMIC_CHAIN_ADDRESS
460 tem = DYNAMIC_CHAIN_ADDRESS (tem);
461 #endif
462 tem = memory_address (Pmode, tem);
463 tem = gen_rtx_MEM (Pmode, tem);
464 set_mem_alias_set (tem, get_frame_alias_set ());
465 tem = copy_to_reg (tem);
466 }
467
468 /* For __builtin_frame_address, return what we've got. */
469 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
470 return tem;
471
472 /* For __builtin_return_address, Get the return address from that
473 frame. */
474 #ifdef RETURN_ADDR_RTX
475 tem = RETURN_ADDR_RTX (count, tem);
476 #else
477 tem = memory_address (Pmode,
478 plus_constant (tem, GET_MODE_SIZE (Pmode)));
479 tem = gen_rtx_MEM (Pmode, tem);
480 set_mem_alias_set (tem, get_frame_alias_set ());
481 #endif
482 return tem;
483 }
484
485 /* Alias set used for setjmp buffer. */
486 static HOST_WIDE_INT setjmp_alias_set = -1;
487
488 /* Construct the leading half of a __builtin_setjmp call. Control will
489 return to RECEIVER_LABEL. This is used directly by sjlj exception
490 handling code. */
491
492 void
493 expand_builtin_setjmp_setup (buf_addr, receiver_label)
494 rtx buf_addr;
495 rtx receiver_label;
496 {
497 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
498 rtx stack_save;
499 rtx mem;
500
501 if (setjmp_alias_set == -1)
502 setjmp_alias_set = new_alias_set ();
503
504 #ifdef POINTERS_EXTEND_UNSIGNED
505 if (GET_MODE (buf_addr) != Pmode)
506 buf_addr = convert_memory_address (Pmode, buf_addr);
507 #endif
508
509 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
510
511 emit_queue ();
512
513 /* We store the frame pointer and the address of receiver_label in
514 the buffer and use the rest of it for the stack save area, which
515 is machine-dependent. */
516
517 #ifndef BUILTIN_SETJMP_FRAME_VALUE
518 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
519 #endif
520
521 mem = gen_rtx_MEM (Pmode, buf_addr);
522 set_mem_alias_set (mem, setjmp_alias_set);
523 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
524
525 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
526 set_mem_alias_set (mem, setjmp_alias_set);
527
528 emit_move_insn (validize_mem (mem),
529 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
530
531 stack_save = gen_rtx_MEM (sa_mode,
532 plus_constant (buf_addr,
533 2 * GET_MODE_SIZE (Pmode)));
534 set_mem_alias_set (stack_save, setjmp_alias_set);
535 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
536
537 /* If there is further processing to do, do it. */
538 #ifdef HAVE_builtin_setjmp_setup
539 if (HAVE_builtin_setjmp_setup)
540 emit_insn (gen_builtin_setjmp_setup (buf_addr));
541 #endif
542
543 /* Tell optimize_save_area_alloca that extra work is going to
544 need to go on during alloca. */
545 current_function_calls_setjmp = 1;
546
547 /* Set this so all the registers get saved in our frame; we need to be
548 able to copy the saved values for any registers from frames we unwind. */
549 current_function_has_nonlocal_label = 1;
550 }
551
552 /* Construct the trailing part of a __builtin_setjmp call.
553 This is used directly by sjlj exception handling code. */
554
555 void
556 expand_builtin_setjmp_receiver (receiver_label)
557 rtx receiver_label ATTRIBUTE_UNUSED;
558 {
559 /* Clobber the FP when we get here, so we have to make sure it's
560 marked as used by this function. */
561 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
562
563 /* Mark the static chain as clobbered here so life information
564 doesn't get messed up for it. */
565 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
566
567 /* Now put in the code to restore the frame pointer, and argument
568 pointer, if needed. The code below is from expand_end_bindings
569 in stmt.c; see detailed documentation there. */
570 #ifdef HAVE_nonlocal_goto
571 if (! HAVE_nonlocal_goto)
572 #endif
573 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
574
575 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
576 if (fixed_regs[ARG_POINTER_REGNUM])
577 {
578 #ifdef ELIMINABLE_REGS
579 size_t i;
580 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
581
582 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
583 if (elim_regs[i].from == ARG_POINTER_REGNUM
584 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
585 break;
586
587 if (i == ARRAY_SIZE (elim_regs))
588 #endif
589 {
590 /* Now restore our arg pointer from the address at which it
591 was saved in our stack frame. */
592 emit_move_insn (virtual_incoming_args_rtx,
593 copy_to_reg (get_arg_pointer_save_area (cfun)));
594 }
595 }
596 #endif
597
598 #ifdef HAVE_builtin_setjmp_receiver
599 if (HAVE_builtin_setjmp_receiver)
600 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
601 else
602 #endif
603 #ifdef HAVE_nonlocal_goto_receiver
604 if (HAVE_nonlocal_goto_receiver)
605 emit_insn (gen_nonlocal_goto_receiver ());
606 else
607 #endif
608 { /* Nothing */ }
609
610 /* @@@ This is a kludge. Not all machine descriptions define a blockage
611 insn, but we must not allow the code we just generated to be reordered
612 by scheduling. Specifically, the update of the frame pointer must
613 happen immediately, not later. So emit an ASM_INPUT to act as blockage
614 insn. */
615 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
616 }
617
618 /* __builtin_setjmp is passed a pointer to an array of five words (not
619 all will be used on all machines). It operates similarly to the C
620 library function of the same name, but is more efficient. Much of
621 the code below (and for longjmp) is copied from the handling of
622 non-local gotos.
623
624 NOTE: This is intended for use by GNAT and the exception handling
625 scheme in the compiler and will only work in the method used by
626 them. */
627
628 static rtx
629 expand_builtin_setjmp (arglist, target)
630 tree arglist;
631 rtx target;
632 {
633 rtx buf_addr, next_lab, cont_lab;
634
635 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
636 return NULL_RTX;
637
638 if (target == 0 || GET_CODE (target) != REG
639 || REGNO (target) < FIRST_PSEUDO_REGISTER)
640 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
641
642 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
643
644 next_lab = gen_label_rtx ();
645 cont_lab = gen_label_rtx ();
646
647 expand_builtin_setjmp_setup (buf_addr, next_lab);
648
649 /* Set TARGET to zero and branch to the continue label. */
650 emit_move_insn (target, const0_rtx);
651 emit_jump_insn (gen_jump (cont_lab));
652 emit_barrier ();
653 emit_label (next_lab);
654
655 expand_builtin_setjmp_receiver (next_lab);
656
657 /* Set TARGET to one. */
658 emit_move_insn (target, const1_rtx);
659 emit_label (cont_lab);
660
661 /* Tell flow about the strange goings on. Putting `next_lab' on
662 `nonlocal_goto_handler_labels' to indicates that function
663 calls may traverse the arc back to this label. */
664
665 current_function_has_nonlocal_label = 1;
666 nonlocal_goto_handler_labels
667 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
668
669 return target;
670 }
671
672 /* __builtin_longjmp is passed a pointer to an array of five words (not
673 all will be used on all machines). It operates similarly to the C
674 library function of the same name, but is more efficient. Much of
675 the code below is copied from the handling of non-local gotos.
676
677 NOTE: This is intended for use by GNAT and the exception handling
678 scheme in the compiler and will only work in the method used by
679 them. */
680
681 void
682 expand_builtin_longjmp (buf_addr, value)
683 rtx buf_addr, value;
684 {
685 rtx fp, lab, stack, insn, last;
686 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
687
688 if (setjmp_alias_set == -1)
689 setjmp_alias_set = new_alias_set ();
690
691 #ifdef POINTERS_EXTEND_UNSIGNED
692 if (GET_MODE (buf_addr) != Pmode)
693 buf_addr = convert_memory_address (Pmode, buf_addr);
694 #endif
695
696 buf_addr = force_reg (Pmode, buf_addr);
697
698 /* We used to store value in static_chain_rtx, but that fails if pointers
699 are smaller than integers. We instead require that the user must pass
700 a second argument of 1, because that is what builtin_setjmp will
701 return. This also makes EH slightly more efficient, since we are no
702 longer copying around a value that we don't care about. */
703 if (value != const1_rtx)
704 abort ();
705
706 current_function_calls_longjmp = 1;
707
708 last = get_last_insn ();
709 #ifdef HAVE_builtin_longjmp
710 if (HAVE_builtin_longjmp)
711 emit_insn (gen_builtin_longjmp (buf_addr));
712 else
713 #endif
714 {
715 fp = gen_rtx_MEM (Pmode, buf_addr);
716 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
717 GET_MODE_SIZE (Pmode)));
718
719 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
720 2 * GET_MODE_SIZE (Pmode)));
721 set_mem_alias_set (fp, setjmp_alias_set);
722 set_mem_alias_set (lab, setjmp_alias_set);
723 set_mem_alias_set (stack, setjmp_alias_set);
724
725 /* Pick up FP, label, and SP from the block and jump. This code is
726 from expand_goto in stmt.c; see there for detailed comments. */
727 #if HAVE_nonlocal_goto
728 if (HAVE_nonlocal_goto)
729 /* We have to pass a value to the nonlocal_goto pattern that will
730 get copied into the static_chain pointer, but it does not matter
731 what that value is, because builtin_setjmp does not use it. */
732 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
733 else
734 #endif
735 {
736 lab = copy_to_reg (lab);
737
738 emit_move_insn (hard_frame_pointer_rtx, fp);
739 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
740
741 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
742 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
743 emit_indirect_jump (lab);
744 }
745 }
746
747 /* Search backwards and mark the jump insn as a non-local goto.
748 Note that this precludes the use of __builtin_longjmp to a
749 __builtin_setjmp target in the same function. However, we've
750 already cautioned the user that these functions are for
751 internal exception handling use only. */
752 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
753 {
754 if (insn == last)
755 abort ();
756 if (GET_CODE (insn) == JUMP_INSN)
757 {
758 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
759 REG_NOTES (insn));
760 break;
761 }
762 else if (GET_CODE (insn) == CALL_INSN)
763 break;
764 }
765 }
766
767 /* Expand a call to __builtin_prefetch. For a target that does not support
768 data prefetch, evaluate the memory address argument in case it has side
769 effects. */
770
771 static void
772 expand_builtin_prefetch (arglist)
773 tree arglist;
774 {
775 tree arg0, arg1, arg2;
776 rtx op0, op1, op2;
777
778 if (!validate_arglist (arglist, POINTER_TYPE, 0))
779 return;
780
781 arg0 = TREE_VALUE (arglist);
782 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
783 zero (read) and argument 2 (locality) defaults to 3 (high degree of
784 locality). */
785 if (TREE_CHAIN (arglist))
786 {
787 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
788 if (TREE_CHAIN (TREE_CHAIN (arglist)))
789 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
790 else
791 arg2 = build_int_2 (3, 0);
792 }
793 else
794 {
795 arg1 = integer_zero_node;
796 arg2 = build_int_2 (3, 0);
797 }
798
799 /* Argument 0 is an address. */
800 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
801
802 /* Argument 1 (read/write flag) must be a compile-time constant int. */
803 if (TREE_CODE (arg1) != INTEGER_CST)
804 {
805 error ("second arg to `__builtin_prefetch' must be a constant");
806 arg1 = integer_zero_node;
807 }
808 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
809 /* Argument 1 must be either zero or one. */
810 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
811 {
812 warning ("invalid second arg to __builtin_prefetch; using zero");
813 op1 = const0_rtx;
814 }
815
816 /* Argument 2 (locality) must be a compile-time constant int. */
817 if (TREE_CODE (arg2) != INTEGER_CST)
818 {
819 error ("third arg to `__builtin_prefetch' must be a constant");
820 arg2 = integer_zero_node;
821 }
822 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
823 /* Argument 2 must be 0, 1, 2, or 3. */
824 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
825 {
826 warning ("invalid third arg to __builtin_prefetch; using zero");
827 op2 = const0_rtx;
828 }
829
830 #ifdef HAVE_prefetch
831 if (HAVE_prefetch)
832 {
833 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
834 (op0,
835 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
836 || (GET_MODE(op0) != Pmode))
837 {
838 #ifdef POINTERS_EXTEND_UNSIGNED
839 if (GET_MODE(op0) != Pmode)
840 op0 = convert_memory_address (Pmode, op0);
841 #endif
842 op0 = force_reg (Pmode, op0);
843 }
844 emit_insn (gen_prefetch (op0, op1, op2));
845 }
846 else
847 #endif
848 op0 = protect_from_queue (op0, 0);
849 /* Don't do anything with direct references to volatile memory, but
850 generate code to handle other side effects. */
851 if (GET_CODE (op0) != MEM && side_effects_p (op0))
852 emit_insn (op0);
853 }
854
855 /* Get a MEM rtx for expression EXP which is the address of an operand
856 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
857
858 static rtx
859 get_memory_rtx (exp)
860 tree exp;
861 {
862 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
863 rtx mem;
864
865 #ifdef POINTERS_EXTEND_UNSIGNED
866 if (GET_MODE (addr) != Pmode)
867 addr = convert_memory_address (Pmode, addr);
868 #endif
869
870 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
871
872 /* Get an expression we can use to find the attributes to assign to MEM.
873 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
874 we can. First remove any nops. */
875 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
876 || TREE_CODE (exp) == NON_LVALUE_EXPR)
877 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
878 exp = TREE_OPERAND (exp, 0);
879
880 if (TREE_CODE (exp) == ADDR_EXPR)
881 {
882 exp = TREE_OPERAND (exp, 0);
883 set_mem_attributes (mem, exp, 0);
884 }
885 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
886 {
887 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
888 /* memcpy, memset and other builtin stringops can alias with anything. */
889 set_mem_alias_set (mem, 0);
890 }
891
892 return mem;
893 }
894 \f
895 /* Built-in functions to perform an untyped call and return. */
896
897 /* For each register that may be used for calling a function, this
898 gives a mode used to copy the register's value. VOIDmode indicates
899 the register is not used for calling a function. If the machine
900 has register windows, this gives only the outbound registers.
901 INCOMING_REGNO gives the corresponding inbound register. */
902 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
903
904 /* For each register that may be used for returning values, this gives
905 a mode used to copy the register's value. VOIDmode indicates the
906 register is not used for returning values. If the machine has
907 register windows, this gives only the outbound registers.
908 INCOMING_REGNO gives the corresponding inbound register. */
909 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
910
911 /* For each register that may be used for calling a function, this
912 gives the offset of that register into the block returned by
913 __builtin_apply_args. 0 indicates that the register is not
914 used for calling a function. */
915 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
916
917 /* Return the offset of register REGNO into the block returned by
918 __builtin_apply_args. This is not declared static, since it is
919 needed in objc-act.c. */
920
921 int
922 apply_args_register_offset (regno)
923 int regno;
924 {
925 apply_args_size ();
926
927 /* Arguments are always put in outgoing registers (in the argument
928 block) if such make sense. */
929 #ifdef OUTGOING_REGNO
930 regno = OUTGOING_REGNO (regno);
931 #endif
932 return apply_args_reg_offset[regno];
933 }
934
935 /* Return the size required for the block returned by __builtin_apply_args,
936 and initialize apply_args_mode. */
937
938 static int
939 apply_args_size ()
940 {
941 static int size = -1;
942 int align;
943 unsigned int regno;
944 enum machine_mode mode;
945
946 /* The values computed by this function never change. */
947 if (size < 0)
948 {
949 /* The first value is the incoming arg-pointer. */
950 size = GET_MODE_SIZE (Pmode);
951
952 /* The second value is the structure value address unless this is
953 passed as an "invisible" first argument. */
954 if (struct_value_rtx)
955 size += GET_MODE_SIZE (Pmode);
956
957 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
958 if (FUNCTION_ARG_REGNO_P (regno))
959 {
960 /* Search for the proper mode for copying this register's
961 value. I'm not sure this is right, but it works so far. */
962 enum machine_mode best_mode = VOIDmode;
963
964 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
965 mode != VOIDmode;
966 mode = GET_MODE_WIDER_MODE (mode))
967 if (HARD_REGNO_MODE_OK (regno, mode)
968 && HARD_REGNO_NREGS (regno, mode) == 1)
969 best_mode = mode;
970
971 if (best_mode == VOIDmode)
972 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
973 mode != VOIDmode;
974 mode = GET_MODE_WIDER_MODE (mode))
975 if (HARD_REGNO_MODE_OK (regno, mode)
976 && have_insn_for (SET, mode))
977 best_mode = mode;
978
979 if (best_mode == VOIDmode)
980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
981 mode != VOIDmode;
982 mode = GET_MODE_WIDER_MODE (mode))
983 if (HARD_REGNO_MODE_OK (regno, mode)
984 && have_insn_for (SET, mode))
985 best_mode = mode;
986
987 if (best_mode == VOIDmode)
988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
989 mode != VOIDmode;
990 mode = GET_MODE_WIDER_MODE (mode))
991 if (HARD_REGNO_MODE_OK (regno, mode)
992 && have_insn_for (SET, mode))
993 best_mode = mode;
994
995 mode = best_mode;
996 if (mode == VOIDmode)
997 abort ();
998
999 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1000 if (size % align != 0)
1001 size = CEIL (size, align) * align;
1002 apply_args_reg_offset[regno] = size;
1003 size += GET_MODE_SIZE (mode);
1004 apply_args_mode[regno] = mode;
1005 }
1006 else
1007 {
1008 apply_args_mode[regno] = VOIDmode;
1009 apply_args_reg_offset[regno] = 0;
1010 }
1011 }
1012 return size;
1013 }
1014
1015 /* Return the size required for the block returned by __builtin_apply,
1016 and initialize apply_result_mode. */
1017
1018 static int
1019 apply_result_size ()
1020 {
1021 static int size = -1;
1022 int align, regno;
1023 enum machine_mode mode;
1024
1025 /* The values computed by this function never change. */
1026 if (size < 0)
1027 {
1028 size = 0;
1029
1030 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1031 if (FUNCTION_VALUE_REGNO_P (regno))
1032 {
1033 /* Search for the proper mode for copying this register's
1034 value. I'm not sure this is right, but it works so far. */
1035 enum machine_mode best_mode = VOIDmode;
1036
1037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1038 mode != TImode;
1039 mode = GET_MODE_WIDER_MODE (mode))
1040 if (HARD_REGNO_MODE_OK (regno, mode))
1041 best_mode = mode;
1042
1043 if (best_mode == VOIDmode)
1044 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1045 mode != VOIDmode;
1046 mode = GET_MODE_WIDER_MODE (mode))
1047 if (HARD_REGNO_MODE_OK (regno, mode)
1048 && have_insn_for (SET, mode))
1049 best_mode = mode;
1050
1051 if (best_mode == VOIDmode)
1052 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1053 mode != VOIDmode;
1054 mode = GET_MODE_WIDER_MODE (mode))
1055 if (HARD_REGNO_MODE_OK (regno, mode)
1056 && have_insn_for (SET, mode))
1057 best_mode = mode;
1058
1059 if (best_mode == VOIDmode)
1060 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1061 mode != VOIDmode;
1062 mode = GET_MODE_WIDER_MODE (mode))
1063 if (HARD_REGNO_MODE_OK (regno, mode)
1064 && have_insn_for (SET, mode))
1065 best_mode = mode;
1066
1067 mode = best_mode;
1068 if (mode == VOIDmode)
1069 abort ();
1070
1071 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1072 if (size % align != 0)
1073 size = CEIL (size, align) * align;
1074 size += GET_MODE_SIZE (mode);
1075 apply_result_mode[regno] = mode;
1076 }
1077 else
1078 apply_result_mode[regno] = VOIDmode;
1079
1080 /* Allow targets that use untyped_call and untyped_return to override
1081 the size so that machine-specific information can be stored here. */
1082 #ifdef APPLY_RESULT_SIZE
1083 size = APPLY_RESULT_SIZE;
1084 #endif
1085 }
1086 return size;
1087 }
1088
1089 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1090 /* Create a vector describing the result block RESULT. If SAVEP is true,
1091 the result block is used to save the values; otherwise it is used to
1092 restore the values. */
1093
1094 static rtx
1095 result_vector (savep, result)
1096 int savep;
1097 rtx result;
1098 {
1099 int regno, size, align, nelts;
1100 enum machine_mode mode;
1101 rtx reg, mem;
1102 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1103
1104 size = nelts = 0;
1105 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1106 if ((mode = apply_result_mode[regno]) != VOIDmode)
1107 {
1108 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1109 if (size % align != 0)
1110 size = CEIL (size, align) * align;
1111 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1112 mem = adjust_address (result, mode, size);
1113 savevec[nelts++] = (savep
1114 ? gen_rtx_SET (VOIDmode, mem, reg)
1115 : gen_rtx_SET (VOIDmode, reg, mem));
1116 size += GET_MODE_SIZE (mode);
1117 }
1118 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1119 }
1120 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1121
1122 /* Save the state required to perform an untyped call with the same
1123 arguments as were passed to the current function. */
1124
1125 static rtx
1126 expand_builtin_apply_args_1 ()
1127 {
1128 rtx registers;
1129 int size, align, regno;
1130 enum machine_mode mode;
1131
1132 /* Create a block where the arg-pointer, structure value address,
1133 and argument registers can be saved. */
1134 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1135
1136 /* Walk past the arg-pointer and structure value address. */
1137 size = GET_MODE_SIZE (Pmode);
1138 if (struct_value_rtx)
1139 size += GET_MODE_SIZE (Pmode);
1140
1141 /* Save each register used in calling a function to the block. */
1142 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1143 if ((mode = apply_args_mode[regno]) != VOIDmode)
1144 {
1145 rtx tem;
1146
1147 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1148 if (size % align != 0)
1149 size = CEIL (size, align) * align;
1150
1151 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1152
1153 emit_move_insn (adjust_address (registers, mode, size), tem);
1154 size += GET_MODE_SIZE (mode);
1155 }
1156
1157 /* Save the arg pointer to the block. */
1158 emit_move_insn (adjust_address (registers, Pmode, 0),
1159 copy_to_reg (virtual_incoming_args_rtx));
1160 size = GET_MODE_SIZE (Pmode);
1161
1162 /* Save the structure value address unless this is passed as an
1163 "invisible" first argument. */
1164 if (struct_value_incoming_rtx)
1165 {
1166 emit_move_insn (adjust_address (registers, Pmode, size),
1167 copy_to_reg (struct_value_incoming_rtx));
1168 size += GET_MODE_SIZE (Pmode);
1169 }
1170
1171 /* Return the address of the block. */
1172 return copy_addr_to_reg (XEXP (registers, 0));
1173 }
1174
1175 /* __builtin_apply_args returns block of memory allocated on
1176 the stack into which is stored the arg pointer, structure
1177 value address, static chain, and all the registers that might
1178 possibly be used in performing a function call. The code is
1179 moved to the start of the function so the incoming values are
1180 saved. */
1181
1182 static rtx
1183 expand_builtin_apply_args ()
1184 {
1185 /* Don't do __builtin_apply_args more than once in a function.
1186 Save the result of the first call and reuse it. */
1187 if (apply_args_value != 0)
1188 return apply_args_value;
1189 {
1190 /* When this function is called, it means that registers must be
1191 saved on entry to this function. So we migrate the
1192 call to the first insn of this function. */
1193 rtx temp;
1194 rtx seq;
1195
1196 start_sequence ();
1197 temp = expand_builtin_apply_args_1 ();
1198 seq = get_insns ();
1199 end_sequence ();
1200
1201 apply_args_value = temp;
1202
1203 /* Put the insns after the NOTE that starts the function.
1204 If this is inside a start_sequence, make the outer-level insn
1205 chain current, so the code is placed at the start of the
1206 function. */
1207 push_topmost_sequence ();
1208 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1209 pop_topmost_sequence ();
1210 return temp;
1211 }
1212 }
1213
1214 /* Perform an untyped call and save the state required to perform an
1215 untyped return of whatever value was returned by the given function. */
1216
1217 static rtx
1218 expand_builtin_apply (function, arguments, argsize)
1219 rtx function, arguments, argsize;
1220 {
1221 int size, align, regno;
1222 enum machine_mode mode;
1223 rtx incoming_args, result, reg, dest, src, call_insn;
1224 rtx old_stack_level = 0;
1225 rtx call_fusage = 0;
1226
1227 #ifdef POINTERS_EXTEND_UNSIGNED
1228 if (GET_MODE (arguments) != Pmode)
1229 arguments = convert_memory_address (Pmode, arguments);
1230 #endif
1231
1232 /* Create a block where the return registers can be saved. */
1233 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1234
1235 /* Fetch the arg pointer from the ARGUMENTS block. */
1236 incoming_args = gen_reg_rtx (Pmode);
1237 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1238 #ifndef STACK_GROWS_DOWNWARD
1239 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1240 incoming_args, 0, OPTAB_LIB_WIDEN);
1241 #endif
1242
1243 /* Perform postincrements before actually calling the function. */
1244 emit_queue ();
1245
1246 /* Push a new argument block and copy the arguments. Do not allow
1247 the (potential) memcpy call below to interfere with our stack
1248 manipulations. */
1249 do_pending_stack_adjust ();
1250 NO_DEFER_POP;
1251
1252 /* Save the stack with nonlocal if available */
1253 #ifdef HAVE_save_stack_nonlocal
1254 if (HAVE_save_stack_nonlocal)
1255 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1256 else
1257 #endif
1258 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1259
1260 /* Push a block of memory onto the stack to store the memory arguments.
1261 Save the address in a register, and copy the memory arguments. ??? I
1262 haven't figured out how the calling convention macros effect this,
1263 but it's likely that the source and/or destination addresses in
1264 the block copy will need updating in machine specific ways. */
1265 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1266 dest = gen_rtx_MEM (BLKmode, dest);
1267 set_mem_align (dest, PARM_BOUNDARY);
1268 src = gen_rtx_MEM (BLKmode, incoming_args);
1269 set_mem_align (src, PARM_BOUNDARY);
1270 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1271
1272 /* Refer to the argument block. */
1273 apply_args_size ();
1274 arguments = gen_rtx_MEM (BLKmode, arguments);
1275 set_mem_align (arguments, PARM_BOUNDARY);
1276
1277 /* Walk past the arg-pointer and structure value address. */
1278 size = GET_MODE_SIZE (Pmode);
1279 if (struct_value_rtx)
1280 size += GET_MODE_SIZE (Pmode);
1281
1282 /* Restore each of the registers previously saved. Make USE insns
1283 for each of these registers for use in making the call. */
1284 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1285 if ((mode = apply_args_mode[regno]) != VOIDmode)
1286 {
1287 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1288 if (size % align != 0)
1289 size = CEIL (size, align) * align;
1290 reg = gen_rtx_REG (mode, regno);
1291 emit_move_insn (reg, adjust_address (arguments, mode, size));
1292 use_reg (&call_fusage, reg);
1293 size += GET_MODE_SIZE (mode);
1294 }
1295
1296 /* Restore the structure value address unless this is passed as an
1297 "invisible" first argument. */
1298 size = GET_MODE_SIZE (Pmode);
1299 if (struct_value_rtx)
1300 {
1301 rtx value = gen_reg_rtx (Pmode);
1302 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1303 emit_move_insn (struct_value_rtx, value);
1304 if (GET_CODE (struct_value_rtx) == REG)
1305 use_reg (&call_fusage, struct_value_rtx);
1306 size += GET_MODE_SIZE (Pmode);
1307 }
1308
1309 /* All arguments and registers used for the call are set up by now! */
1310 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1311
1312 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1313 and we don't want to load it into a register as an optimization,
1314 because prepare_call_address already did it if it should be done. */
1315 if (GET_CODE (function) != SYMBOL_REF)
1316 function = memory_address (FUNCTION_MODE, function);
1317
1318 /* Generate the actual call instruction and save the return value. */
1319 #ifdef HAVE_untyped_call
1320 if (HAVE_untyped_call)
1321 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1322 result, result_vector (1, result)));
1323 else
1324 #endif
1325 #ifdef HAVE_call_value
1326 if (HAVE_call_value)
1327 {
1328 rtx valreg = 0;
1329
1330 /* Locate the unique return register. It is not possible to
1331 express a call that sets more than one return register using
1332 call_value; use untyped_call for that. In fact, untyped_call
1333 only needs to save the return registers in the given block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_result_mode[regno]) != VOIDmode)
1336 {
1337 if (valreg)
1338 abort (); /* HAVE_untyped_call required. */
1339 valreg = gen_rtx_REG (mode, regno);
1340 }
1341
1342 emit_call_insn (GEN_CALL_VALUE (valreg,
1343 gen_rtx_MEM (FUNCTION_MODE, function),
1344 const0_rtx, NULL_RTX, const0_rtx));
1345
1346 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1347 }
1348 else
1349 #endif
1350 abort ();
1351
1352 /* Find the CALL insn we just emitted, and attach the register usage
1353 information. */
1354 call_insn = last_call_insn ();
1355 add_function_usage_to (call_insn, call_fusage);
1356
1357 /* Restore the stack. */
1358 #ifdef HAVE_save_stack_nonlocal
1359 if (HAVE_save_stack_nonlocal)
1360 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1361 else
1362 #endif
1363 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1364
1365 OK_DEFER_POP;
1366
1367 /* Return the address of the result block. */
1368 return copy_addr_to_reg (XEXP (result, 0));
1369 }
1370
1371 /* Perform an untyped return. */
1372
1373 static void
1374 expand_builtin_return (result)
1375 rtx result;
1376 {
1377 int size, align, regno;
1378 enum machine_mode mode;
1379 rtx reg;
1380 rtx call_fusage = 0;
1381
1382 #ifdef POINTERS_EXTEND_UNSIGNED
1383 if (GET_MODE (result) != Pmode)
1384 result = convert_memory_address (Pmode, result);
1385 #endif
1386
1387 apply_result_size ();
1388 result = gen_rtx_MEM (BLKmode, result);
1389
1390 #ifdef HAVE_untyped_return
1391 if (HAVE_untyped_return)
1392 {
1393 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1394 emit_barrier ();
1395 return;
1396 }
1397 #endif
1398
1399 /* Restore the return value and note that each value is used. */
1400 size = 0;
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_result_mode[regno]) != VOIDmode)
1403 {
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1407 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1408 emit_move_insn (reg, adjust_address (result, mode, size));
1409
1410 push_to_sequence (call_fusage);
1411 emit_insn (gen_rtx_USE (VOIDmode, reg));
1412 call_fusage = get_insns ();
1413 end_sequence ();
1414 size += GET_MODE_SIZE (mode);
1415 }
1416
1417 /* Put the USE insns before the return. */
1418 emit_insn (call_fusage);
1419
1420 /* Return whatever values was restored by jumping directly to the end
1421 of the function. */
1422 expand_null_return ();
1423 }
1424
1425 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1426
1427 static enum type_class
1428 type_to_class (type)
1429 tree type;
1430 {
1431 switch (TREE_CODE (type))
1432 {
1433 case VOID_TYPE: return void_type_class;
1434 case INTEGER_TYPE: return integer_type_class;
1435 case CHAR_TYPE: return char_type_class;
1436 case ENUMERAL_TYPE: return enumeral_type_class;
1437 case BOOLEAN_TYPE: return boolean_type_class;
1438 case POINTER_TYPE: return pointer_type_class;
1439 case REFERENCE_TYPE: return reference_type_class;
1440 case OFFSET_TYPE: return offset_type_class;
1441 case REAL_TYPE: return real_type_class;
1442 case COMPLEX_TYPE: return complex_type_class;
1443 case FUNCTION_TYPE: return function_type_class;
1444 case METHOD_TYPE: return method_type_class;
1445 case RECORD_TYPE: return record_type_class;
1446 case UNION_TYPE:
1447 case QUAL_UNION_TYPE: return union_type_class;
1448 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1449 ? string_type_class : array_type_class);
1450 case SET_TYPE: return set_type_class;
1451 case FILE_TYPE: return file_type_class;
1452 case LANG_TYPE: return lang_type_class;
1453 default: return no_type_class;
1454 }
1455 }
1456
1457 /* Expand a call to __builtin_classify_type with arguments found in
1458 ARGLIST. */
1459
1460 static rtx
1461 expand_builtin_classify_type (arglist)
1462 tree arglist;
1463 {
1464 if (arglist != 0)
1465 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1466 return GEN_INT (no_type_class);
1467 }
1468
1469 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1470
1471 static rtx
1472 expand_builtin_constant_p (arglist, target_mode)
1473 tree arglist;
1474 enum machine_mode target_mode;
1475 {
1476 rtx tmp;
1477
1478 if (arglist == 0)
1479 return const0_rtx;
1480 arglist = TREE_VALUE (arglist);
1481
1482 /* We have taken care of the easy cases during constant folding. This
1483 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1484 get a chance to see if it can deduce whether ARGLIST is constant. */
1485
1486 current_function_calls_constant_p = 1;
1487
1488 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1489 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1490 return tmp;
1491 }
1492
1493 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1494 if available. */
1495 tree
1496 mathfn_built_in (type, fn)
1497 tree type;
1498 enum built_in_function fn;
1499 {
1500 enum built_in_function fcode = NOT_BUILT_IN;
1501 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1502 switch (fn)
1503 {
1504 case BUILT_IN_SQRT:
1505 case BUILT_IN_SQRTF:
1506 case BUILT_IN_SQRTL:
1507 fcode = BUILT_IN_SQRT;
1508 break;
1509 case BUILT_IN_SIN:
1510 case BUILT_IN_SINF:
1511 case BUILT_IN_SINL:
1512 fcode = BUILT_IN_SIN;
1513 break;
1514 case BUILT_IN_COS:
1515 case BUILT_IN_COSF:
1516 case BUILT_IN_COSL:
1517 fcode = BUILT_IN_COS;
1518 break;
1519 case BUILT_IN_EXP:
1520 case BUILT_IN_EXPF:
1521 case BUILT_IN_EXPL:
1522 fcode = BUILT_IN_EXP;
1523 break;
1524 case BUILT_IN_LOG:
1525 case BUILT_IN_LOGF:
1526 case BUILT_IN_LOGL:
1527 fcode = BUILT_IN_LOG;
1528 break;
1529 case BUILT_IN_TAN:
1530 case BUILT_IN_TANF:
1531 case BUILT_IN_TANL:
1532 fcode = BUILT_IN_TAN;
1533 break;
1534 case BUILT_IN_ATAN:
1535 case BUILT_IN_ATANF:
1536 case BUILT_IN_ATANL:
1537 fcode = BUILT_IN_ATAN;
1538 break;
1539 case BUILT_IN_FLOOR:
1540 case BUILT_IN_FLOORF:
1541 case BUILT_IN_FLOORL:
1542 fcode = BUILT_IN_FLOOR;
1543 break;
1544 case BUILT_IN_CEIL:
1545 case BUILT_IN_CEILF:
1546 case BUILT_IN_CEILL:
1547 fcode = BUILT_IN_CEIL;
1548 break;
1549 case BUILT_IN_TRUNC:
1550 case BUILT_IN_TRUNCF:
1551 case BUILT_IN_TRUNCL:
1552 fcode = BUILT_IN_TRUNC;
1553 break;
1554 case BUILT_IN_ROUND:
1555 case BUILT_IN_ROUNDF:
1556 case BUILT_IN_ROUNDL:
1557 fcode = BUILT_IN_ROUND;
1558 break;
1559 case BUILT_IN_NEARBYINT:
1560 case BUILT_IN_NEARBYINTF:
1561 case BUILT_IN_NEARBYINTL:
1562 fcode = BUILT_IN_NEARBYINT;
1563 break;
1564 default:
1565 abort ();
1566 }
1567 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1568 switch (fn)
1569 {
1570 case BUILT_IN_SQRT:
1571 case BUILT_IN_SQRTF:
1572 case BUILT_IN_SQRTL:
1573 fcode = BUILT_IN_SQRTF;
1574 break;
1575 case BUILT_IN_SIN:
1576 case BUILT_IN_SINF:
1577 case BUILT_IN_SINL:
1578 fcode = BUILT_IN_SINF;
1579 break;
1580 case BUILT_IN_COS:
1581 case BUILT_IN_COSF:
1582 case BUILT_IN_COSL:
1583 fcode = BUILT_IN_COSF;
1584 break;
1585 case BUILT_IN_EXP:
1586 case BUILT_IN_EXPF:
1587 case BUILT_IN_EXPL:
1588 fcode = BUILT_IN_EXPF;
1589 break;
1590 case BUILT_IN_LOG:
1591 case BUILT_IN_LOGF:
1592 case BUILT_IN_LOGL:
1593 fcode = BUILT_IN_LOGF;
1594 break;
1595 case BUILT_IN_TAN:
1596 case BUILT_IN_TANF:
1597 case BUILT_IN_TANL:
1598 fcode = BUILT_IN_TANF;
1599 break;
1600 case BUILT_IN_ATAN:
1601 case BUILT_IN_ATANF:
1602 case BUILT_IN_ATANL:
1603 fcode = BUILT_IN_ATANF;
1604 break;
1605 case BUILT_IN_FLOOR:
1606 case BUILT_IN_FLOORF:
1607 case BUILT_IN_FLOORL:
1608 fcode = BUILT_IN_FLOORF;
1609 break;
1610 case BUILT_IN_CEIL:
1611 case BUILT_IN_CEILF:
1612 case BUILT_IN_CEILL:
1613 fcode = BUILT_IN_CEILF;
1614 break;
1615 case BUILT_IN_TRUNC:
1616 case BUILT_IN_TRUNCF:
1617 case BUILT_IN_TRUNCL:
1618 fcode = BUILT_IN_TRUNCF;
1619 break;
1620 case BUILT_IN_ROUND:
1621 case BUILT_IN_ROUNDF:
1622 case BUILT_IN_ROUNDL:
1623 fcode = BUILT_IN_ROUNDF;
1624 break;
1625 case BUILT_IN_NEARBYINT:
1626 case BUILT_IN_NEARBYINTF:
1627 case BUILT_IN_NEARBYINTL:
1628 fcode = BUILT_IN_NEARBYINTF;
1629 break;
1630 default:
1631 abort ();
1632 }
1633 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1634 switch (fn)
1635 {
1636 case BUILT_IN_SQRT:
1637 case BUILT_IN_SQRTF:
1638 case BUILT_IN_SQRTL:
1639 fcode = BUILT_IN_SQRTL;
1640 break;
1641 case BUILT_IN_SIN:
1642 case BUILT_IN_SINF:
1643 case BUILT_IN_SINL:
1644 fcode = BUILT_IN_SINL;
1645 break;
1646 case BUILT_IN_COS:
1647 case BUILT_IN_COSF:
1648 case BUILT_IN_COSL:
1649 fcode = BUILT_IN_COSL;
1650 break;
1651 case BUILT_IN_EXP:
1652 case BUILT_IN_EXPF:
1653 case BUILT_IN_EXPL:
1654 fcode = BUILT_IN_EXPL;
1655 break;
1656 case BUILT_IN_LOG:
1657 case BUILT_IN_LOGF:
1658 case BUILT_IN_LOGL:
1659 fcode = BUILT_IN_LOGL;
1660 break;
1661 case BUILT_IN_TAN:
1662 case BUILT_IN_TANF:
1663 case BUILT_IN_TANL:
1664 fcode = BUILT_IN_TANL;
1665 break;
1666 case BUILT_IN_ATAN:
1667 case BUILT_IN_ATANF:
1668 case BUILT_IN_ATANL:
1669 fcode = BUILT_IN_ATANL;
1670 break;
1671 case BUILT_IN_FLOOR:
1672 case BUILT_IN_FLOORF:
1673 case BUILT_IN_FLOORL:
1674 fcode = BUILT_IN_FLOORL;
1675 break;
1676 case BUILT_IN_CEIL:
1677 case BUILT_IN_CEILF:
1678 case BUILT_IN_CEILL:
1679 fcode = BUILT_IN_CEILL;
1680 break;
1681 case BUILT_IN_TRUNC:
1682 case BUILT_IN_TRUNCF:
1683 case BUILT_IN_TRUNCL:
1684 fcode = BUILT_IN_TRUNCL;
1685 break;
1686 case BUILT_IN_ROUND:
1687 case BUILT_IN_ROUNDF:
1688 case BUILT_IN_ROUNDL:
1689 fcode = BUILT_IN_ROUNDL;
1690 break;
1691 case BUILT_IN_NEARBYINT:
1692 case BUILT_IN_NEARBYINTF:
1693 case BUILT_IN_NEARBYINTL:
1694 fcode = BUILT_IN_NEARBYINTL;
1695 break;
1696 default:
1697 abort ();
1698 }
1699 return implicit_built_in_decls[fcode];
1700 }
1701
1702 /* If errno must be maintained, expand the RTL to check if the result,
1703 TARGET, of a built-in function call, EXP, is NaN, and if so set
1704 errno to EDOM. */
1705
1706 static void
1707 expand_errno_check (exp, target)
1708 tree exp;
1709 rtx target;
1710 {
1711 rtx lab;
1712
1713 if (flag_errno_math && HONOR_NANS (GET_MODE (target)))
1714 {
1715 lab = gen_label_rtx ();
1716
1717 /* Test the result; if it is NaN, set errno=EDOM because
1718 the argument was not in the domain. */
1719 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1720 0, lab);
1721
1722 #ifdef TARGET_EDOM
1723 {
1724 #ifdef GEN_ERRNO_RTX
1725 rtx errno_rtx = GEN_ERRNO_RTX;
1726 #else
1727 rtx errno_rtx
1728 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1729 #endif
1730
1731 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1732 }
1733 #else
1734 /* We can't set errno=EDOM directly; let the library call do it.
1735 Pop the arguments right away in case the call gets deleted. */
1736 NO_DEFER_POP;
1737 expand_call (exp, target, 0);
1738 OK_DEFER_POP;
1739 #endif
1740
1741 emit_label (lab);
1742 }
1743 }
1744
1745
1746 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1747 Return 0 if a normal call should be emitted rather than expanding the
1748 function in-line. EXP is the expression that is a call to the builtin
1749 function; if convenient, the result should be placed in TARGET.
1750 SUBTARGET may be used as the target for computing one of EXP's operands. */
1751
1752 static rtx
1753 expand_builtin_mathfn (exp, target, subtarget)
1754 tree exp;
1755 rtx target, subtarget;
1756 {
1757 optab builtin_optab;
1758 rtx op0, insns;
1759 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1760 tree arglist = TREE_OPERAND (exp, 1);
1761 enum machine_mode argmode;
1762 bool errno_set = true;
1763
1764 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1765 return 0;
1766
1767 /* Stabilize and compute the argument. */
1768 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1769 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1770 {
1771 exp = copy_node (exp);
1772 TREE_OPERAND (exp, 1) = arglist;
1773 /* Wrap the computation of the argument in a SAVE_EXPR. That
1774 way, if we need to expand the argument again (as in the
1775 flag_errno_math case below where we cannot directly set
1776 errno), we will not perform side-effects more than once.
1777 Note that here we're mutating the original EXP as well as the
1778 copy; that's the right thing to do in case the original EXP
1779 is expanded later. */
1780 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1781 arglist = copy_node (arglist);
1782 }
1783 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1784
1785 /* Make a suitable register to place result in. */
1786 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1787
1788 emit_queue ();
1789 start_sequence ();
1790
1791 switch (DECL_FUNCTION_CODE (fndecl))
1792 {
1793 case BUILT_IN_SIN:
1794 case BUILT_IN_SINF:
1795 case BUILT_IN_SINL:
1796 builtin_optab = sin_optab; break;
1797 case BUILT_IN_COS:
1798 case BUILT_IN_COSF:
1799 case BUILT_IN_COSL:
1800 builtin_optab = cos_optab; break;
1801 case BUILT_IN_SQRT:
1802 case BUILT_IN_SQRTF:
1803 case BUILT_IN_SQRTL:
1804 builtin_optab = sqrt_optab; break;
1805 case BUILT_IN_EXP:
1806 case BUILT_IN_EXPF:
1807 case BUILT_IN_EXPL:
1808 builtin_optab = exp_optab; break;
1809 case BUILT_IN_LOG:
1810 case BUILT_IN_LOGF:
1811 case BUILT_IN_LOGL:
1812 builtin_optab = log_optab; break;
1813 case BUILT_IN_FLOOR:
1814 case BUILT_IN_FLOORF:
1815 case BUILT_IN_FLOORL:
1816 errno_set = false ; builtin_optab = floor_optab; break;
1817 case BUILT_IN_CEIL:
1818 case BUILT_IN_CEILF:
1819 case BUILT_IN_CEILL:
1820 errno_set = false ; builtin_optab = ceil_optab; break;
1821 case BUILT_IN_TRUNC:
1822 case BUILT_IN_TRUNCF:
1823 case BUILT_IN_TRUNCL:
1824 errno_set = false ; builtin_optab = trunc_optab; break;
1825 case BUILT_IN_ROUND:
1826 case BUILT_IN_ROUNDF:
1827 case BUILT_IN_ROUNDL:
1828 errno_set = false ; builtin_optab = round_optab; break;
1829 case BUILT_IN_NEARBYINT:
1830 case BUILT_IN_NEARBYINTF:
1831 case BUILT_IN_NEARBYINTL:
1832 errno_set = false ; builtin_optab = nearbyint_optab; break;
1833 default:
1834 abort ();
1835 }
1836
1837 /* Compute into TARGET.
1838 Set TARGET to wherever the result comes back. */
1839 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1840 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1841
1842 /* If we were unable to expand via the builtin, stop the
1843 sequence (without outputting the insns) and return 0, causing
1844 a call to the library function. */
1845 if (target == 0)
1846 {
1847 end_sequence ();
1848 return 0;
1849 }
1850
1851 if (errno_set)
1852 expand_errno_check (exp, target);
1853
1854 /* Output the entire sequence. */
1855 insns = get_insns ();
1856 end_sequence ();
1857 emit_insn (insns);
1858
1859 return target;
1860 }
1861
1862 /* Expand a call to the builtin binary math functions (pow and atan2).
1863 Return 0 if a normal call should be emitted rather than expanding the
1864 function in-line. EXP is the expression that is a call to the builtin
1865 function; if convenient, the result should be placed in TARGET.
1866 SUBTARGET may be used as the target for computing one of EXP's
1867 operands. */
1868
1869 static rtx
1870 expand_builtin_mathfn_2 (exp, target, subtarget)
1871 tree exp;
1872 rtx target, subtarget;
1873 {
1874 optab builtin_optab;
1875 rtx op0, op1, insns;
1876 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1877 tree arglist = TREE_OPERAND (exp, 1);
1878 tree arg0, arg1;
1879 enum machine_mode argmode;
1880 bool errno_set = true;
1881 bool stable = true;
1882
1883 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1884 return 0;
1885
1886 arg0 = TREE_VALUE (arglist);
1887 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1888
1889 /* Stabilize the arguments. */
1890 if (TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
1891 {
1892 arg0 = save_expr (arg0);
1893 TREE_VALUE (arglist) = arg0;
1894 stable = false;
1895 }
1896 if (TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
1897 {
1898 arg1 = save_expr (arg1);
1899 TREE_VALUE (TREE_CHAIN (arglist)) = arg1;
1900 stable = false;
1901 }
1902
1903 if (! stable)
1904 {
1905 exp = copy_node (exp);
1906 arglist = tree_cons (NULL_TREE, arg0,
1907 build_tree_list (NULL_TREE, arg1));
1908 TREE_OPERAND (exp, 1) = arglist;
1909 }
1910
1911 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1912 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1913
1914 /* Make a suitable register to place result in. */
1915 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1916
1917 emit_queue ();
1918 start_sequence ();
1919
1920 switch (DECL_FUNCTION_CODE (fndecl))
1921 {
1922 case BUILT_IN_POW:
1923 case BUILT_IN_POWF:
1924 case BUILT_IN_POWL:
1925 builtin_optab = pow_optab; break;
1926 case BUILT_IN_ATAN2:
1927 case BUILT_IN_ATAN2F:
1928 case BUILT_IN_ATAN2L:
1929 builtin_optab = atan2_optab; break;
1930 default:
1931 abort ();
1932 }
1933
1934 /* Compute into TARGET.
1935 Set TARGET to wherever the result comes back. */
1936 argmode = TYPE_MODE (TREE_TYPE (arg0));
1937 target = expand_binop (argmode, builtin_optab, op0, op1,
1938 target, 0, OPTAB_DIRECT);
1939
1940 /* If we were unable to expand via the builtin, stop the
1941 sequence (without outputting the insns) and return 0, causing
1942 a call to the library function. */
1943 if (target == 0)
1944 {
1945 end_sequence ();
1946 return 0;
1947 }
1948
1949 if (errno_set)
1950 expand_errno_check (exp, target);
1951
1952 /* Output the entire sequence. */
1953 insns = get_insns ();
1954 end_sequence ();
1955 emit_insn (insns);
1956
1957 return target;
1958 }
1959
1960 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1961 if we failed the caller should emit a normal call, otherwise
1962 try to get the result in TARGET, if convenient. */
1963
1964 static rtx
1965 expand_builtin_strlen (arglist, target, target_mode)
1966 tree arglist;
1967 rtx target;
1968 enum machine_mode target_mode;
1969 {
1970 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1971 return 0;
1972 else
1973 {
1974 rtx pat;
1975 tree len, src = TREE_VALUE (arglist);
1976 rtx result, src_reg, char_rtx, before_strlen;
1977 enum machine_mode insn_mode = target_mode, char_mode;
1978 enum insn_code icode = CODE_FOR_nothing;
1979 int align;
1980
1981 /* If the length can be computed at compile-time, return it. */
1982 len = c_strlen (src);
1983 if (len)
1984 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
1985
1986 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1987
1988 /* If SRC is not a pointer type, don't do this operation inline. */
1989 if (align == 0)
1990 return 0;
1991
1992 /* Bail out if we can't compute strlen in the right mode. */
1993 while (insn_mode != VOIDmode)
1994 {
1995 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1996 if (icode != CODE_FOR_nothing)
1997 break;
1998
1999 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2000 }
2001 if (insn_mode == VOIDmode)
2002 return 0;
2003
2004 /* Make a place to write the result of the instruction. */
2005 result = target;
2006 if (! (result != 0
2007 && GET_CODE (result) == REG
2008 && GET_MODE (result) == insn_mode
2009 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2010 result = gen_reg_rtx (insn_mode);
2011
2012 /* Make a place to hold the source address. We will not expand
2013 the actual source until we are sure that the expansion will
2014 not fail -- there are trees that cannot be expanded twice. */
2015 src_reg = gen_reg_rtx (Pmode);
2016
2017 /* Mark the beginning of the strlen sequence so we can emit the
2018 source operand later. */
2019 before_strlen = get_last_insn ();
2020
2021 char_rtx = const0_rtx;
2022 char_mode = insn_data[(int) icode].operand[2].mode;
2023 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2024 char_mode))
2025 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2026
2027 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2028 char_rtx, GEN_INT (align));
2029 if (! pat)
2030 return 0;
2031 emit_insn (pat);
2032
2033 /* Now that we are assured of success, expand the source. */
2034 start_sequence ();
2035 pat = memory_address (BLKmode,
2036 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2037 if (pat != src_reg)
2038 emit_move_insn (src_reg, pat);
2039 pat = get_insns ();
2040 end_sequence ();
2041
2042 if (before_strlen)
2043 emit_insn_after (pat, before_strlen);
2044 else
2045 emit_insn_before (pat, get_insns ());
2046
2047 /* Return the value in the proper mode for this function. */
2048 if (GET_MODE (result) == target_mode)
2049 target = result;
2050 else if (target != 0)
2051 convert_move (target, result, 0);
2052 else
2053 target = convert_to_mode (target_mode, result, 0);
2054
2055 return target;
2056 }
2057 }
2058
2059 /* Expand a call to the strstr builtin. Return 0 if we failed the
2060 caller should emit a normal call, otherwise try to get the result
2061 in TARGET, if convenient (and in mode MODE if that's convenient). */
2062
2063 static rtx
2064 expand_builtin_strstr (arglist, target, mode)
2065 tree arglist;
2066 rtx target;
2067 enum machine_mode mode;
2068 {
2069 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2070 return 0;
2071 else
2072 {
2073 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2074 tree fn;
2075 const char *p1, *p2;
2076
2077 p2 = c_getstr (s2);
2078 if (p2 == NULL)
2079 return 0;
2080
2081 p1 = c_getstr (s1);
2082 if (p1 != NULL)
2083 {
2084 const char *r = strstr (p1, p2);
2085
2086 if (r == NULL)
2087 return const0_rtx;
2088
2089 /* Return an offset into the constant string argument. */
2090 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2091 s1, ssize_int (r - p1))),
2092 target, mode, EXPAND_NORMAL);
2093 }
2094
2095 if (p2[0] == '\0')
2096 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2097
2098 if (p2[1] != '\0')
2099 return 0;
2100
2101 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2102 if (!fn)
2103 return 0;
2104
2105 /* New argument list transforming strstr(s1, s2) to
2106 strchr(s1, s2[0]). */
2107 arglist =
2108 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2109 arglist = tree_cons (NULL_TREE, s1, arglist);
2110 return expand_expr (build_function_call_expr (fn, arglist),
2111 target, mode, EXPAND_NORMAL);
2112 }
2113 }
2114
2115 /* Expand a call to the strchr builtin. Return 0 if we failed the
2116 caller should emit a normal call, otherwise try to get the result
2117 in TARGET, if convenient (and in mode MODE if that's convenient). */
2118
2119 static rtx
2120 expand_builtin_strchr (arglist, target, mode)
2121 tree arglist;
2122 rtx target;
2123 enum machine_mode mode;
2124 {
2125 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2126 return 0;
2127 else
2128 {
2129 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2130 const char *p1;
2131
2132 if (TREE_CODE (s2) != INTEGER_CST)
2133 return 0;
2134
2135 p1 = c_getstr (s1);
2136 if (p1 != NULL)
2137 {
2138 char c;
2139 const char *r;
2140
2141 if (target_char_cast (s2, &c))
2142 return 0;
2143
2144 r = strchr (p1, c);
2145
2146 if (r == NULL)
2147 return const0_rtx;
2148
2149 /* Return an offset into the constant string argument. */
2150 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2151 s1, ssize_int (r - p1))),
2152 target, mode, EXPAND_NORMAL);
2153 }
2154
2155 /* FIXME: Should use here strchrM optab so that ports can optimize
2156 this. */
2157 return 0;
2158 }
2159 }
2160
2161 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2162 caller should emit a normal call, otherwise try to get the result
2163 in TARGET, if convenient (and in mode MODE if that's convenient). */
2164
2165 static rtx
2166 expand_builtin_strrchr (arglist, target, mode)
2167 tree arglist;
2168 rtx target;
2169 enum machine_mode mode;
2170 {
2171 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2172 return 0;
2173 else
2174 {
2175 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2176 tree fn;
2177 const char *p1;
2178
2179 if (TREE_CODE (s2) != INTEGER_CST)
2180 return 0;
2181
2182 p1 = c_getstr (s1);
2183 if (p1 != NULL)
2184 {
2185 char c;
2186 const char *r;
2187
2188 if (target_char_cast (s2, &c))
2189 return 0;
2190
2191 r = strrchr (p1, c);
2192
2193 if (r == NULL)
2194 return const0_rtx;
2195
2196 /* Return an offset into the constant string argument. */
2197 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2198 s1, ssize_int (r - p1))),
2199 target, mode, EXPAND_NORMAL);
2200 }
2201
2202 if (! integer_zerop (s2))
2203 return 0;
2204
2205 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2206 if (!fn)
2207 return 0;
2208
2209 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2210 return expand_expr (build_function_call_expr (fn, arglist),
2211 target, mode, EXPAND_NORMAL);
2212 }
2213 }
2214
2215 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2216 caller should emit a normal call, otherwise try to get the result
2217 in TARGET, if convenient (and in mode MODE if that's convenient). */
2218
2219 static rtx
2220 expand_builtin_strpbrk (arglist, target, mode)
2221 tree arglist;
2222 rtx target;
2223 enum machine_mode mode;
2224 {
2225 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2226 return 0;
2227 else
2228 {
2229 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2230 tree fn;
2231 const char *p1, *p2;
2232
2233 p2 = c_getstr (s2);
2234 if (p2 == NULL)
2235 return 0;
2236
2237 p1 = c_getstr (s1);
2238 if (p1 != NULL)
2239 {
2240 const char *r = strpbrk (p1, p2);
2241
2242 if (r == NULL)
2243 return const0_rtx;
2244
2245 /* Return an offset into the constant string argument. */
2246 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2247 s1, ssize_int (r - p1))),
2248 target, mode, EXPAND_NORMAL);
2249 }
2250
2251 if (p2[0] == '\0')
2252 {
2253 /* strpbrk(x, "") == NULL.
2254 Evaluate and ignore the arguments in case they had
2255 side-effects. */
2256 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2257 return const0_rtx;
2258 }
2259
2260 if (p2[1] != '\0')
2261 return 0; /* Really call strpbrk. */
2262
2263 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2264 if (!fn)
2265 return 0;
2266
2267 /* New argument list transforming strpbrk(s1, s2) to
2268 strchr(s1, s2[0]). */
2269 arglist =
2270 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2271 arglist = tree_cons (NULL_TREE, s1, arglist);
2272 return expand_expr (build_function_call_expr (fn, arglist),
2273 target, mode, EXPAND_NORMAL);
2274 }
2275 }
2276
2277 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2278 bytes from constant string DATA + OFFSET and return it as target
2279 constant. */
2280
2281 static rtx
2282 builtin_memcpy_read_str (data, offset, mode)
2283 PTR data;
2284 HOST_WIDE_INT offset;
2285 enum machine_mode mode;
2286 {
2287 const char *str = (const char *) data;
2288
2289 if (offset < 0
2290 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2291 > strlen (str) + 1))
2292 abort (); /* Attempt to read past the end of constant string. */
2293
2294 return c_readstr (str + offset, mode);
2295 }
2296
2297 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2298 Return 0 if we failed, the caller should emit a normal call,
2299 otherwise try to get the result in TARGET, if convenient (and in
2300 mode MODE if that's convenient). */
2301 static rtx
2302 expand_builtin_memcpy (arglist, target, mode)
2303 tree arglist;
2304 rtx target;
2305 enum machine_mode mode;
2306 {
2307 if (!validate_arglist (arglist,
2308 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2309 return 0;
2310 else
2311 {
2312 tree dest = TREE_VALUE (arglist);
2313 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2314 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2315 const char *src_str;
2316 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2317 unsigned int dest_align
2318 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2319 rtx dest_mem, src_mem, dest_addr, len_rtx;
2320
2321 /* If DEST is not a pointer type, call the normal function. */
2322 if (dest_align == 0)
2323 return 0;
2324
2325 /* If the LEN parameter is zero, return DEST. */
2326 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2327 {
2328 /* Evaluate and ignore SRC in case it has side-effects. */
2329 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2330 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2331 }
2332
2333 /* If either SRC is not a pointer type, don't do this
2334 operation in-line. */
2335 if (src_align == 0)
2336 return 0;
2337
2338 dest_mem = get_memory_rtx (dest);
2339 set_mem_align (dest_mem, dest_align);
2340 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2341 src_str = c_getstr (src);
2342
2343 /* If SRC is a string constant and block move would be done
2344 by pieces, we can avoid loading the string from memory
2345 and only stored the computed constants. */
2346 if (src_str
2347 && GET_CODE (len_rtx) == CONST_INT
2348 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2349 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2350 (PTR) src_str, dest_align))
2351 {
2352 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2353 builtin_memcpy_read_str,
2354 (PTR) src_str, dest_align, 0);
2355 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2356 #ifdef POINTERS_EXTEND_UNSIGNED
2357 if (GET_MODE (dest_mem) != ptr_mode)
2358 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2359 #endif
2360 return dest_mem;
2361 }
2362
2363 src_mem = get_memory_rtx (src);
2364 set_mem_align (src_mem, src_align);
2365
2366 /* Copy word part most expediently. */
2367 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2368 BLOCK_OP_NORMAL);
2369
2370 if (dest_addr == 0)
2371 {
2372 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2373 #ifdef POINTERS_EXTEND_UNSIGNED
2374 if (GET_MODE (dest_addr) != ptr_mode)
2375 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2376 #endif
2377 }
2378 return dest_addr;
2379 }
2380 }
2381
2382 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2383 Return 0 if we failed the caller should emit a normal call,
2384 otherwise try to get the result in TARGET, if convenient (and in
2385 mode MODE if that's convenient). If ENDP is 0 return the
2386 destination pointer, if ENDP is 1 return the end pointer ala
2387 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2388 stpcpy. */
2389
2390 static rtx
2391 expand_builtin_mempcpy (arglist, target, mode, endp)
2392 tree arglist;
2393 rtx target;
2394 enum machine_mode mode;
2395 int endp;
2396 {
2397 if (!validate_arglist (arglist,
2398 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2399 return 0;
2400 /* If return value is ignored, transform mempcpy into memcpy. */
2401 else if (target == const0_rtx)
2402 {
2403 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2404
2405 if (!fn)
2406 return 0;
2407
2408 return expand_expr (build_function_call_expr (fn, arglist),
2409 target, mode, EXPAND_NORMAL);
2410 }
2411 else
2412 {
2413 tree dest = TREE_VALUE (arglist);
2414 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2415 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2416 const char *src_str;
2417 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2418 unsigned int dest_align
2419 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2420 rtx dest_mem, src_mem, len_rtx;
2421
2422 /* If DEST is not a pointer type or LEN is not constant,
2423 call the normal function. */
2424 if (dest_align == 0 || !host_integerp (len, 1))
2425 return 0;
2426
2427 /* If the LEN parameter is zero, return DEST. */
2428 if (tree_low_cst (len, 1) == 0)
2429 {
2430 /* Evaluate and ignore SRC in case it has side-effects. */
2431 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2432 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2433 }
2434
2435 /* If either SRC is not a pointer type, don't do this
2436 operation in-line. */
2437 if (src_align == 0)
2438 return 0;
2439
2440 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2441 src_str = c_getstr (src);
2442
2443 /* If SRC is a string constant and block move would be done
2444 by pieces, we can avoid loading the string from memory
2445 and only stored the computed constants. */
2446 if (src_str
2447 && GET_CODE (len_rtx) == CONST_INT
2448 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2449 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2450 (PTR) src_str, dest_align))
2451 {
2452 dest_mem = get_memory_rtx (dest);
2453 set_mem_align (dest_mem, dest_align);
2454 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2455 builtin_memcpy_read_str,
2456 (PTR) src_str, dest_align, endp);
2457 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2458 #ifdef POINTERS_EXTEND_UNSIGNED
2459 if (GET_MODE (dest_mem) != ptr_mode)
2460 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2461 #endif
2462 return dest_mem;
2463 }
2464
2465 if (GET_CODE (len_rtx) == CONST_INT
2466 && can_move_by_pieces (INTVAL (len_rtx),
2467 MIN (dest_align, src_align)))
2468 {
2469 dest_mem = get_memory_rtx (dest);
2470 set_mem_align (dest_mem, dest_align);
2471 src_mem = get_memory_rtx (src);
2472 set_mem_align (src_mem, src_align);
2473 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2474 MIN (dest_align, src_align), endp);
2475 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2476 #ifdef POINTERS_EXTEND_UNSIGNED
2477 if (GET_MODE (dest_mem) != ptr_mode)
2478 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2479 #endif
2480 return dest_mem;
2481 }
2482
2483 return 0;
2484 }
2485 }
2486
2487 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2488 if we failed the caller should emit a normal call. */
2489
2490 static rtx
2491 expand_builtin_memmove (arglist, target, mode)
2492 tree arglist;
2493 rtx target;
2494 enum machine_mode mode;
2495 {
2496 if (!validate_arglist (arglist,
2497 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2498 return 0;
2499 else
2500 {
2501 tree dest = TREE_VALUE (arglist);
2502 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2503 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2504
2505 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2506 unsigned int dest_align
2507 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2508
2509 /* If DEST is not a pointer type, call the normal function. */
2510 if (dest_align == 0)
2511 return 0;
2512
2513 /* If the LEN parameter is zero, return DEST. */
2514 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2515 {
2516 /* Evaluate and ignore SRC in case it has side-effects. */
2517 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2518 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2519 }
2520
2521 /* If either SRC is not a pointer type, don't do this
2522 operation in-line. */
2523 if (src_align == 0)
2524 return 0;
2525
2526 /* If src is categorized for a readonly section we can use
2527 normal memcpy. */
2528 if (readonly_data_expr (src))
2529 {
2530 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2531 if (!fn)
2532 return 0;
2533 return expand_expr (build_function_call_expr (fn, arglist),
2534 target, mode, EXPAND_NORMAL);
2535 }
2536
2537 /* Otherwise, call the normal function. */
2538 return 0;
2539 }
2540 }
2541
2542 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2543 if we failed the caller should emit a normal call. */
2544
2545 static rtx
2546 expand_builtin_bcopy (arglist)
2547 tree arglist;
2548 {
2549 tree src, dest, size, newarglist;
2550
2551 if (!validate_arglist (arglist,
2552 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2553 return NULL_RTX;
2554
2555 src = TREE_VALUE (arglist);
2556 dest = TREE_VALUE (TREE_CHAIN (arglist));
2557 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2558
2559 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2560 memmove(ptr y, ptr x, size_t z). This is done this way
2561 so that if it isn't expanded inline, we fallback to
2562 calling bcopy instead of memmove. */
2563
2564 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2565 newarglist = tree_cons (NULL_TREE, src, newarglist);
2566 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2567
2568 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2569 }
2570
2571 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2572 if we failed the caller should emit a normal call, otherwise try to get
2573 the result in TARGET, if convenient (and in mode MODE if that's
2574 convenient). */
2575
2576 static rtx
2577 expand_builtin_strcpy (arglist, target, mode)
2578 tree arglist;
2579 rtx target;
2580 enum machine_mode mode;
2581 {
2582 tree fn, len;
2583
2584 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2585 return 0;
2586
2587 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2588 if (!fn)
2589 return 0;
2590
2591 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2592 if (len == 0)
2593 return 0;
2594
2595 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2596 chainon (arglist, build_tree_list (NULL_TREE, len));
2597 return expand_expr (build_function_call_expr (fn, arglist),
2598 target, mode, EXPAND_NORMAL);
2599 }
2600
2601 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2602 Return 0 if we failed the caller should emit a normal call,
2603 otherwise try to get the result in TARGET, if convenient (and in
2604 mode MODE if that's convenient). */
2605
2606 static rtx
2607 expand_builtin_stpcpy (arglist, target, mode)
2608 tree arglist;
2609 rtx target;
2610 enum machine_mode mode;
2611 {
2612 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2613 return 0;
2614 else
2615 {
2616 tree newarglist;
2617 tree src, len;
2618
2619 /* If return value is ignored, transform stpcpy into strcpy. */
2620 if (target == const0_rtx)
2621 {
2622 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2623 if (!fn)
2624 return 0;
2625
2626 return expand_expr (build_function_call_expr (fn, arglist),
2627 target, mode, EXPAND_NORMAL);
2628 }
2629
2630 /* Ensure we get an actual string whose length can be evaluated at
2631 compile-time, not an expression containing a string. This is
2632 because the latter will potentially produce pessimized code
2633 when used to produce the return value. */
2634 src = TREE_VALUE (TREE_CHAIN (arglist));
2635 if (! c_getstr (src) || ! (len = c_strlen (src)))
2636 return 0;
2637
2638 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2639 newarglist = copy_list (arglist);
2640 chainon (newarglist, build_tree_list (NULL_TREE, len));
2641 return expand_builtin_mempcpy (newarglist, target, mode, /*endp=*/2);
2642 }
2643 }
2644
2645 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2646 bytes from constant string DATA + OFFSET and return it as target
2647 constant. */
2648
2649 static rtx
2650 builtin_strncpy_read_str (data, offset, mode)
2651 PTR data;
2652 HOST_WIDE_INT offset;
2653 enum machine_mode mode;
2654 {
2655 const char *str = (const char *) data;
2656
2657 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2658 return const0_rtx;
2659
2660 return c_readstr (str + offset, mode);
2661 }
2662
2663 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2664 if we failed the caller should emit a normal call. */
2665
2666 static rtx
2667 expand_builtin_strncpy (arglist, target, mode)
2668 tree arglist;
2669 rtx target;
2670 enum machine_mode mode;
2671 {
2672 if (!validate_arglist (arglist,
2673 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2674 return 0;
2675 else
2676 {
2677 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2678 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2679 tree fn;
2680
2681 /* We must be passed a constant len parameter. */
2682 if (TREE_CODE (len) != INTEGER_CST)
2683 return 0;
2684
2685 /* If the len parameter is zero, return the dst parameter. */
2686 if (integer_zerop (len))
2687 {
2688 /* Evaluate and ignore the src argument in case it has
2689 side-effects. */
2690 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2691 VOIDmode, EXPAND_NORMAL);
2692 /* Return the dst parameter. */
2693 return expand_expr (TREE_VALUE (arglist), target, mode,
2694 EXPAND_NORMAL);
2695 }
2696
2697 /* Now, we must be passed a constant src ptr parameter. */
2698 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2699 return 0;
2700
2701 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2702
2703 /* We're required to pad with trailing zeros if the requested
2704 len is greater than strlen(s2)+1. In that case try to
2705 use store_by_pieces, if it fails, punt. */
2706 if (tree_int_cst_lt (slen, len))
2707 {
2708 tree dest = TREE_VALUE (arglist);
2709 unsigned int dest_align
2710 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2711 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2712 rtx dest_mem;
2713
2714 if (!p || dest_align == 0 || !host_integerp (len, 1)
2715 || !can_store_by_pieces (tree_low_cst (len, 1),
2716 builtin_strncpy_read_str,
2717 (PTR) p, dest_align))
2718 return 0;
2719
2720 dest_mem = get_memory_rtx (dest);
2721 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2722 builtin_strncpy_read_str,
2723 (PTR) p, dest_align, 0);
2724 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2725 #ifdef POINTERS_EXTEND_UNSIGNED
2726 if (GET_MODE (dest_mem) != ptr_mode)
2727 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2728 #endif
2729 return dest_mem;
2730 }
2731
2732 /* OK transform into builtin memcpy. */
2733 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2734 if (!fn)
2735 return 0;
2736 return expand_expr (build_function_call_expr (fn, arglist),
2737 target, mode, EXPAND_NORMAL);
2738 }
2739 }
2740
2741 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2742 bytes from constant string DATA + OFFSET and return it as target
2743 constant. */
2744
2745 static rtx
2746 builtin_memset_read_str (data, offset, mode)
2747 PTR data;
2748 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2749 enum machine_mode mode;
2750 {
2751 const char *c = (const char *) data;
2752 char *p = alloca (GET_MODE_SIZE (mode));
2753
2754 memset (p, *c, GET_MODE_SIZE (mode));
2755
2756 return c_readstr (p, mode);
2757 }
2758
2759 /* Callback routine for store_by_pieces. Return the RTL of a register
2760 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2761 char value given in the RTL register data. For example, if mode is
2762 4 bytes wide, return the RTL for 0x01010101*data. */
2763
2764 static rtx
2765 builtin_memset_gen_str (data, offset, mode)
2766 PTR data;
2767 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2768 enum machine_mode mode;
2769 {
2770 rtx target, coeff;
2771 size_t size;
2772 char *p;
2773
2774 size = GET_MODE_SIZE (mode);
2775 if (size == 1)
2776 return (rtx) data;
2777
2778 p = alloca (size);
2779 memset (p, 1, size);
2780 coeff = c_readstr (p, mode);
2781
2782 target = convert_to_mode (mode, (rtx) data, 1);
2783 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2784 return force_reg (mode, target);
2785 }
2786
2787 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2788 if we failed the caller should emit a normal call, otherwise try to get
2789 the result in TARGET, if convenient (and in mode MODE if that's
2790 convenient). */
2791
2792 static rtx
2793 expand_builtin_memset (arglist, target, mode)
2794 tree arglist;
2795 rtx target;
2796 enum machine_mode mode;
2797 {
2798 if (!validate_arglist (arglist,
2799 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2800 return 0;
2801 else
2802 {
2803 tree dest = TREE_VALUE (arglist);
2804 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2805 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2806 char c;
2807
2808 unsigned int dest_align
2809 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2810 rtx dest_mem, dest_addr, len_rtx;
2811
2812 /* If DEST is not a pointer type, don't do this
2813 operation in-line. */
2814 if (dest_align == 0)
2815 return 0;
2816
2817 /* If the LEN parameter is zero, return DEST. */
2818 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2819 {
2820 /* Evaluate and ignore VAL in case it has side-effects. */
2821 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2822 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2823 }
2824
2825 if (TREE_CODE (val) != INTEGER_CST)
2826 {
2827 rtx val_rtx;
2828
2829 if (!host_integerp (len, 1))
2830 return 0;
2831
2832 if (optimize_size && tree_low_cst (len, 1) > 1)
2833 return 0;
2834
2835 /* Assume that we can memset by pieces if we can store the
2836 * the coefficients by pieces (in the required modes).
2837 * We can't pass builtin_memset_gen_str as that emits RTL. */
2838 c = 1;
2839 if (!can_store_by_pieces (tree_low_cst (len, 1),
2840 builtin_memset_read_str,
2841 (PTR) &c, dest_align))
2842 return 0;
2843
2844 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2845 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2846 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2847 val_rtx);
2848 dest_mem = get_memory_rtx (dest);
2849 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2850 builtin_memset_gen_str,
2851 (PTR) val_rtx, dest_align, 0);
2852 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2853 #ifdef POINTERS_EXTEND_UNSIGNED
2854 if (GET_MODE (dest_mem) != ptr_mode)
2855 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2856 #endif
2857 return dest_mem;
2858 }
2859
2860 if (target_char_cast (val, &c))
2861 return 0;
2862
2863 if (c)
2864 {
2865 if (!host_integerp (len, 1))
2866 return 0;
2867 if (!can_store_by_pieces (tree_low_cst (len, 1),
2868 builtin_memset_read_str, (PTR) &c,
2869 dest_align))
2870 return 0;
2871
2872 dest_mem = get_memory_rtx (dest);
2873 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2874 builtin_memset_read_str,
2875 (PTR) &c, dest_align, 0);
2876 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2877 #ifdef POINTERS_EXTEND_UNSIGNED
2878 if (GET_MODE (dest_mem) != ptr_mode)
2879 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2880 #endif
2881 return dest_mem;
2882 }
2883
2884 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2885
2886 dest_mem = get_memory_rtx (dest);
2887 set_mem_align (dest_mem, dest_align);
2888 dest_addr = clear_storage (dest_mem, len_rtx);
2889
2890 if (dest_addr == 0)
2891 {
2892 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2893 #ifdef POINTERS_EXTEND_UNSIGNED
2894 if (GET_MODE (dest_addr) != ptr_mode)
2895 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2896 #endif
2897 }
2898
2899 return dest_addr;
2900 }
2901 }
2902
2903 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2904 if we failed the caller should emit a normal call. */
2905
2906 static rtx
2907 expand_builtin_bzero (arglist)
2908 tree arglist;
2909 {
2910 tree dest, size, newarglist;
2911
2912 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2913 return NULL_RTX;
2914
2915 dest = TREE_VALUE (arglist);
2916 size = TREE_VALUE (TREE_CHAIN (arglist));
2917
2918 /* New argument list transforming bzero(ptr x, int y) to
2919 memset(ptr x, int 0, size_t y). This is done this way
2920 so that if it isn't expanded inline, we fallback to
2921 calling bzero instead of memset. */
2922
2923 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2924 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2925 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2926
2927 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
2928 }
2929
2930 /* Expand expression EXP, which is a call to the memcmp built-in function.
2931 ARGLIST is the argument list for this call. Return 0 if we failed and the
2932 caller should emit a normal call, otherwise try to get the result in
2933 TARGET, if convenient (and in mode MODE, if that's convenient). */
2934
2935 static rtx
2936 expand_builtin_memcmp (exp, arglist, target, mode)
2937 tree exp ATTRIBUTE_UNUSED;
2938 tree arglist;
2939 rtx target;
2940 enum machine_mode mode;
2941 {
2942 tree arg1, arg2, len;
2943 const char *p1, *p2;
2944
2945 if (!validate_arglist (arglist,
2946 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2947 return 0;
2948
2949 arg1 = TREE_VALUE (arglist);
2950 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2951 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2952
2953 /* If the len parameter is zero, return zero. */
2954 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2955 {
2956 /* Evaluate and ignore arg1 and arg2 in case they have
2957 side-effects. */
2958 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2959 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2960 return const0_rtx;
2961 }
2962
2963 p1 = c_getstr (arg1);
2964 p2 = c_getstr (arg2);
2965
2966 /* If all arguments are constant, and the value of len is not greater
2967 than the lengths of arg1 and arg2, evaluate at compile-time. */
2968 if (host_integerp (len, 1) && p1 && p2
2969 && compare_tree_int (len, strlen (p1) + 1) <= 0
2970 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2971 {
2972 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2973
2974 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2975 }
2976
2977 /* If len parameter is one, return an expression corresponding to
2978 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2979 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2980 {
2981 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2982 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2983 tree ind1 =
2984 fold (build1 (CONVERT_EXPR, integer_type_node,
2985 build1 (INDIRECT_REF, cst_uchar_node,
2986 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2987 tree ind2 =
2988 fold (build1 (CONVERT_EXPR, integer_type_node,
2989 build1 (INDIRECT_REF, cst_uchar_node,
2990 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2991 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2992 return expand_expr (result, target, mode, EXPAND_NORMAL);
2993 }
2994
2995 #ifdef HAVE_cmpstrsi
2996 {
2997 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2998 rtx result;
2999 rtx insn;
3000
3001 int arg1_align
3002 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3003 int arg2_align
3004 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3005 enum machine_mode insn_mode
3006 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3007
3008 /* If we don't have POINTER_TYPE, call the function. */
3009 if (arg1_align == 0 || arg2_align == 0)
3010 return 0;
3011
3012 /* Make a place to write the result of the instruction. */
3013 result = target;
3014 if (! (result != 0
3015 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3016 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3017 result = gen_reg_rtx (insn_mode);
3018
3019 arg1_rtx = get_memory_rtx (arg1);
3020 arg2_rtx = get_memory_rtx (arg2);
3021 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3022 if (!HAVE_cmpstrsi)
3023 insn = NULL_RTX;
3024 else
3025 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3026 GEN_INT (MIN (arg1_align, arg2_align)));
3027
3028 if (insn)
3029 emit_insn (insn);
3030 else
3031 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3032 TYPE_MODE (integer_type_node), 3,
3033 XEXP (arg1_rtx, 0), Pmode,
3034 XEXP (arg2_rtx, 0), Pmode,
3035 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3036 TREE_UNSIGNED (sizetype)),
3037 TYPE_MODE (sizetype));
3038
3039 /* Return the value in the proper mode for this function. */
3040 mode = TYPE_MODE (TREE_TYPE (exp));
3041 if (GET_MODE (result) == mode)
3042 return result;
3043 else if (target != 0)
3044 {
3045 convert_move (target, result, 0);
3046 return target;
3047 }
3048 else
3049 return convert_to_mode (mode, result, 0);
3050 }
3051 #endif
3052
3053 return 0;
3054 }
3055
3056 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3057 if we failed the caller should emit a normal call, otherwise try to get
3058 the result in TARGET, if convenient. */
3059
3060 static rtx
3061 expand_builtin_strcmp (exp, target, mode)
3062 tree exp;
3063 rtx target;
3064 enum machine_mode mode;
3065 {
3066 tree arglist = TREE_OPERAND (exp, 1);
3067 tree arg1, arg2;
3068 const char *p1, *p2;
3069
3070 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3071 return 0;
3072
3073 arg1 = TREE_VALUE (arglist);
3074 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3075
3076 p1 = c_getstr (arg1);
3077 p2 = c_getstr (arg2);
3078
3079 if (p1 && p2)
3080 {
3081 const int i = strcmp (p1, p2);
3082 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
3083 }
3084
3085 /* If either arg is "", return an expression corresponding to
3086 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3087 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3088 {
3089 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3090 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3091 tree ind1 =
3092 fold (build1 (CONVERT_EXPR, integer_type_node,
3093 build1 (INDIRECT_REF, cst_uchar_node,
3094 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3095 tree ind2 =
3096 fold (build1 (CONVERT_EXPR, integer_type_node,
3097 build1 (INDIRECT_REF, cst_uchar_node,
3098 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3099 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3100 return expand_expr (result, target, mode, EXPAND_NORMAL);
3101 }
3102
3103 #ifdef HAVE_cmpstrsi
3104 if (HAVE_cmpstrsi)
3105 {
3106 tree len, len1, len2;
3107 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3108 rtx result, insn;
3109
3110 int arg1_align
3111 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3112 int arg2_align
3113 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3114 enum machine_mode insn_mode
3115 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3116
3117 len1 = c_strlen (arg1);
3118 len2 = c_strlen (arg2);
3119
3120 if (len1)
3121 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3122 if (len2)
3123 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3124
3125 /* If we don't have a constant length for the first, use the length
3126 of the second, if we know it. We don't require a constant for
3127 this case; some cost analysis could be done if both are available
3128 but neither is constant. For now, assume they're equally cheap,
3129 unless one has side effects. If both strings have constant lengths,
3130 use the smaller. */
3131
3132 if (!len1)
3133 len = len2;
3134 else if (!len2)
3135 len = len1;
3136 else if (TREE_SIDE_EFFECTS (len1))
3137 len = len2;
3138 else if (TREE_SIDE_EFFECTS (len2))
3139 len = len1;
3140 else if (TREE_CODE (len1) != INTEGER_CST)
3141 len = len2;
3142 else if (TREE_CODE (len2) != INTEGER_CST)
3143 len = len1;
3144 else if (tree_int_cst_lt (len1, len2))
3145 len = len1;
3146 else
3147 len = len2;
3148
3149 /* If both arguments have side effects, we cannot optimize. */
3150 if (!len || TREE_SIDE_EFFECTS (len))
3151 return 0;
3152
3153 /* If we don't have POINTER_TYPE, call the function. */
3154 if (arg1_align == 0 || arg2_align == 0)
3155 return 0;
3156
3157 /* Make a place to write the result of the instruction. */
3158 result = target;
3159 if (! (result != 0
3160 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3161 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3162 result = gen_reg_rtx (insn_mode);
3163
3164 arg1_rtx = get_memory_rtx (arg1);
3165 arg2_rtx = get_memory_rtx (arg2);
3166 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3167 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3168 GEN_INT (MIN (arg1_align, arg2_align)));
3169 if (!insn)
3170 return 0;
3171
3172 emit_insn (insn);
3173
3174 /* Return the value in the proper mode for this function. */
3175 mode = TYPE_MODE (TREE_TYPE (exp));
3176 if (GET_MODE (result) == mode)
3177 return result;
3178 if (target == 0)
3179 return convert_to_mode (mode, result, 0);
3180 convert_move (target, result, 0);
3181 return target;
3182 }
3183 #endif
3184 return 0;
3185 }
3186
3187 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3188 if we failed the caller should emit a normal call, otherwise try to get
3189 the result in TARGET, if convenient. */
3190
3191 static rtx
3192 expand_builtin_strncmp (exp, target, mode)
3193 tree exp;
3194 rtx target;
3195 enum machine_mode mode;
3196 {
3197 tree arglist = TREE_OPERAND (exp, 1);
3198 tree arg1, arg2, arg3;
3199 const char *p1, *p2;
3200
3201 if (!validate_arglist (arglist,
3202 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3203 return 0;
3204
3205 arg1 = TREE_VALUE (arglist);
3206 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3207 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3208
3209 /* If the len parameter is zero, return zero. */
3210 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3211 {
3212 /* Evaluate and ignore arg1 and arg2 in case they have
3213 side-effects. */
3214 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3215 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3216 return const0_rtx;
3217 }
3218
3219 p1 = c_getstr (arg1);
3220 p2 = c_getstr (arg2);
3221
3222 /* If all arguments are constant, evaluate at compile-time. */
3223 if (host_integerp (arg3, 1) && p1 && p2)
3224 {
3225 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3226 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3227 }
3228
3229 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3230 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3231 if (host_integerp (arg3, 1)
3232 && (tree_low_cst (arg3, 1) == 1
3233 || (tree_low_cst (arg3, 1) > 1
3234 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3235 {
3236 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3237 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3238 tree ind1 =
3239 fold (build1 (CONVERT_EXPR, integer_type_node,
3240 build1 (INDIRECT_REF, cst_uchar_node,
3241 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3242 tree ind2 =
3243 fold (build1 (CONVERT_EXPR, integer_type_node,
3244 build1 (INDIRECT_REF, cst_uchar_node,
3245 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3246 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3247 return expand_expr (result, target, mode, EXPAND_NORMAL);
3248 }
3249
3250 /* If c_strlen can determine an expression for one of the string
3251 lengths, and it doesn't have side effects, then emit cmpstrsi
3252 using length MIN(strlen(string)+1, arg3). */
3253 #ifdef HAVE_cmpstrsi
3254 if (HAVE_cmpstrsi)
3255 {
3256 tree len, len1, len2;
3257 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3258 rtx result, insn;
3259
3260 int arg1_align
3261 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3262 int arg2_align
3263 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3264 enum machine_mode insn_mode
3265 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3266
3267 len1 = c_strlen (arg1);
3268 len2 = c_strlen (arg2);
3269
3270 if (len1)
3271 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3272 if (len2)
3273 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3274
3275 /* If we don't have a constant length for the first, use the length
3276 of the second, if we know it. We don't require a constant for
3277 this case; some cost analysis could be done if both are available
3278 but neither is constant. For now, assume they're equally cheap,
3279 unless one has side effects. If both strings have constant lengths,
3280 use the smaller. */
3281
3282 if (!len1)
3283 len = len2;
3284 else if (!len2)
3285 len = len1;
3286 else if (TREE_SIDE_EFFECTS (len1))
3287 len = len2;
3288 else if (TREE_SIDE_EFFECTS (len2))
3289 len = len1;
3290 else if (TREE_CODE (len1) != INTEGER_CST)
3291 len = len2;
3292 else if (TREE_CODE (len2) != INTEGER_CST)
3293 len = len1;
3294 else if (tree_int_cst_lt (len1, len2))
3295 len = len1;
3296 else
3297 len = len2;
3298
3299 /* If both arguments have side effects, we cannot optimize. */
3300 if (!len || TREE_SIDE_EFFECTS (len))
3301 return 0;
3302
3303 /* The actual new length parameter is MIN(len,arg3). */
3304 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3305
3306 /* If we don't have POINTER_TYPE, call the function. */
3307 if (arg1_align == 0 || arg2_align == 0)
3308 return 0;
3309
3310 /* Make a place to write the result of the instruction. */
3311 result = target;
3312 if (! (result != 0
3313 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3314 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3315 result = gen_reg_rtx (insn_mode);
3316
3317 arg1_rtx = get_memory_rtx (arg1);
3318 arg2_rtx = get_memory_rtx (arg2);
3319 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3320 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3321 GEN_INT (MIN (arg1_align, arg2_align)));
3322 if (!insn)
3323 return 0;
3324
3325 emit_insn (insn);
3326
3327 /* Return the value in the proper mode for this function. */
3328 mode = TYPE_MODE (TREE_TYPE (exp));
3329 if (GET_MODE (result) == mode)
3330 return result;
3331 if (target == 0)
3332 return convert_to_mode (mode, result, 0);
3333 convert_move (target, result, 0);
3334 return target;
3335 }
3336 #endif
3337 return 0;
3338 }
3339
3340 /* Expand expression EXP, which is a call to the strcat builtin.
3341 Return 0 if we failed the caller should emit a normal call,
3342 otherwise try to get the result in TARGET, if convenient. */
3343
3344 static rtx
3345 expand_builtin_strcat (arglist, target, mode)
3346 tree arglist;
3347 rtx target;
3348 enum machine_mode mode;
3349 {
3350 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3351 return 0;
3352 else
3353 {
3354 tree dst = TREE_VALUE (arglist),
3355 src = TREE_VALUE (TREE_CHAIN (arglist));
3356 const char *p = c_getstr (src);
3357
3358 /* If the string length is zero, return the dst parameter. */
3359 if (p && *p == '\0')
3360 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3361
3362 return 0;
3363 }
3364 }
3365
3366 /* Expand expression EXP, which is a call to the strncat builtin.
3367 Return 0 if we failed the caller should emit a normal call,
3368 otherwise try to get the result in TARGET, if convenient. */
3369
3370 static rtx
3371 expand_builtin_strncat (arglist, target, mode)
3372 tree arglist;
3373 rtx target;
3374 enum machine_mode mode;
3375 {
3376 if (!validate_arglist (arglist,
3377 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3378 return 0;
3379 else
3380 {
3381 tree dst = TREE_VALUE (arglist),
3382 src = TREE_VALUE (TREE_CHAIN (arglist)),
3383 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3384 const char *p = c_getstr (src);
3385
3386 /* If the requested length is zero, or the src parameter string
3387 length is zero, return the dst parameter. */
3388 if (integer_zerop (len) || (p && *p == '\0'))
3389 {
3390 /* Evaluate and ignore the src and len parameters in case
3391 they have side-effects. */
3392 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3393 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3394 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3395 }
3396
3397 /* If the requested len is greater than or equal to the string
3398 length, call strcat. */
3399 if (TREE_CODE (len) == INTEGER_CST && p
3400 && compare_tree_int (len, strlen (p)) >= 0)
3401 {
3402 tree newarglist
3403 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3404 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3405
3406 /* If the replacement _DECL isn't initialized, don't do the
3407 transformation. */
3408 if (!fn)
3409 return 0;
3410
3411 return expand_expr (build_function_call_expr (fn, newarglist),
3412 target, mode, EXPAND_NORMAL);
3413 }
3414 return 0;
3415 }
3416 }
3417
3418 /* Expand expression EXP, which is a call to the strspn builtin.
3419 Return 0 if we failed the caller should emit a normal call,
3420 otherwise try to get the result in TARGET, if convenient. */
3421
3422 static rtx
3423 expand_builtin_strspn (arglist, target, mode)
3424 tree arglist;
3425 rtx target;
3426 enum machine_mode mode;
3427 {
3428 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3429 return 0;
3430 else
3431 {
3432 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3433 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3434
3435 /* If both arguments are constants, evaluate at compile-time. */
3436 if (p1 && p2)
3437 {
3438 const size_t r = strspn (p1, p2);
3439 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3440 }
3441
3442 /* If either argument is "", return 0. */
3443 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3444 {
3445 /* Evaluate and ignore both arguments in case either one has
3446 side-effects. */
3447 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3448 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3449 return const0_rtx;
3450 }
3451 return 0;
3452 }
3453 }
3454
3455 /* Expand expression EXP, which is a call to the strcspn builtin.
3456 Return 0 if we failed the caller should emit a normal call,
3457 otherwise try to get the result in TARGET, if convenient. */
3458
3459 static rtx
3460 expand_builtin_strcspn (arglist, target, mode)
3461 tree arglist;
3462 rtx target;
3463 enum machine_mode mode;
3464 {
3465 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3466 return 0;
3467 else
3468 {
3469 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3470 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3471
3472 /* If both arguments are constants, evaluate at compile-time. */
3473 if (p1 && p2)
3474 {
3475 const size_t r = strcspn (p1, p2);
3476 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3477 }
3478
3479 /* If the first argument is "", return 0. */
3480 if (p1 && *p1 == '\0')
3481 {
3482 /* Evaluate and ignore argument s2 in case it has
3483 side-effects. */
3484 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3485 return const0_rtx;
3486 }
3487
3488 /* If the second argument is "", return __builtin_strlen(s1). */
3489 if (p2 && *p2 == '\0')
3490 {
3491 tree newarglist = build_tree_list (NULL_TREE, s1),
3492 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3493
3494 /* If the replacement _DECL isn't initialized, don't do the
3495 transformation. */
3496 if (!fn)
3497 return 0;
3498
3499 return expand_expr (build_function_call_expr (fn, newarglist),
3500 target, mode, EXPAND_NORMAL);
3501 }
3502 return 0;
3503 }
3504 }
3505
3506 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3507 if that's convenient. */
3508
3509 rtx
3510 expand_builtin_saveregs ()
3511 {
3512 rtx val, seq;
3513
3514 /* Don't do __builtin_saveregs more than once in a function.
3515 Save the result of the first call and reuse it. */
3516 if (saveregs_value != 0)
3517 return saveregs_value;
3518
3519 /* When this function is called, it means that registers must be
3520 saved on entry to this function. So we migrate the call to the
3521 first insn of this function. */
3522
3523 start_sequence ();
3524
3525 #ifdef EXPAND_BUILTIN_SAVEREGS
3526 /* Do whatever the machine needs done in this case. */
3527 val = EXPAND_BUILTIN_SAVEREGS ();
3528 #else
3529 /* ??? We used to try and build up a call to the out of line function,
3530 guessing about what registers needed saving etc. This became much
3531 harder with __builtin_va_start, since we don't have a tree for a
3532 call to __builtin_saveregs to fall back on. There was exactly one
3533 port (i860) that used this code, and I'm unconvinced it could actually
3534 handle the general case. So we no longer try to handle anything
3535 weird and make the backend absorb the evil. */
3536
3537 error ("__builtin_saveregs not supported by this target");
3538 val = const0_rtx;
3539 #endif
3540
3541 seq = get_insns ();
3542 end_sequence ();
3543
3544 saveregs_value = val;
3545
3546 /* Put the insns after the NOTE that starts the function. If this
3547 is inside a start_sequence, make the outer-level insn chain current, so
3548 the code is placed at the start of the function. */
3549 push_topmost_sequence ();
3550 emit_insn_after (seq, get_insns ());
3551 pop_topmost_sequence ();
3552
3553 return val;
3554 }
3555
3556 /* __builtin_args_info (N) returns word N of the arg space info
3557 for the current function. The number and meanings of words
3558 is controlled by the definition of CUMULATIVE_ARGS. */
3559
3560 static rtx
3561 expand_builtin_args_info (arglist)
3562 tree arglist;
3563 {
3564 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3565 int *word_ptr = (int *) &current_function_args_info;
3566
3567 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3568 abort ();
3569
3570 if (arglist != 0)
3571 {
3572 if (!host_integerp (TREE_VALUE (arglist), 0))
3573 error ("argument of `__builtin_args_info' must be constant");
3574 else
3575 {
3576 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3577
3578 if (wordnum < 0 || wordnum >= nwords)
3579 error ("argument of `__builtin_args_info' out of range");
3580 else
3581 return GEN_INT (word_ptr[wordnum]);
3582 }
3583 }
3584 else
3585 error ("missing argument in `__builtin_args_info'");
3586
3587 return const0_rtx;
3588 }
3589
3590 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3591
3592 static rtx
3593 expand_builtin_next_arg (arglist)
3594 tree arglist;
3595 {
3596 tree fntype = TREE_TYPE (current_function_decl);
3597
3598 if (TYPE_ARG_TYPES (fntype) == 0
3599 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3600 == void_type_node))
3601 {
3602 error ("`va_start' used in function with fixed args");
3603 return const0_rtx;
3604 }
3605
3606 if (arglist)
3607 {
3608 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3609 tree arg = TREE_VALUE (arglist);
3610
3611 /* Strip off all nops for the sake of the comparison. This
3612 is not quite the same as STRIP_NOPS. It does more.
3613 We must also strip off INDIRECT_EXPR for C++ reference
3614 parameters. */
3615 while (TREE_CODE (arg) == NOP_EXPR
3616 || TREE_CODE (arg) == CONVERT_EXPR
3617 || TREE_CODE (arg) == NON_LVALUE_EXPR
3618 || TREE_CODE (arg) == INDIRECT_REF)
3619 arg = TREE_OPERAND (arg, 0);
3620 if (arg != last_parm)
3621 warning ("second parameter of `va_start' not last named argument");
3622 }
3623 else
3624 /* Evidently an out of date version of <stdarg.h>; can't validate
3625 va_start's second argument, but can still work as intended. */
3626 warning ("`__builtin_next_arg' called without an argument");
3627
3628 return expand_binop (Pmode, add_optab,
3629 current_function_internal_arg_pointer,
3630 current_function_arg_offset_rtx,
3631 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3632 }
3633
3634 /* Make it easier for the backends by protecting the valist argument
3635 from multiple evaluations. */
3636
3637 static tree
3638 stabilize_va_list (valist, needs_lvalue)
3639 tree valist;
3640 int needs_lvalue;
3641 {
3642 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3643 {
3644 if (TREE_SIDE_EFFECTS (valist))
3645 valist = save_expr (valist);
3646
3647 /* For this case, the backends will be expecting a pointer to
3648 TREE_TYPE (va_list_type_node), but it's possible we've
3649 actually been given an array (an actual va_list_type_node).
3650 So fix it. */
3651 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3652 {
3653 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3654 tree p2 = build_pointer_type (va_list_type_node);
3655
3656 valist = build1 (ADDR_EXPR, p2, valist);
3657 valist = fold (build1 (NOP_EXPR, p1, valist));
3658 }
3659 }
3660 else
3661 {
3662 tree pt;
3663
3664 if (! needs_lvalue)
3665 {
3666 if (! TREE_SIDE_EFFECTS (valist))
3667 return valist;
3668
3669 pt = build_pointer_type (va_list_type_node);
3670 valist = fold (build1 (ADDR_EXPR, pt, valist));
3671 TREE_SIDE_EFFECTS (valist) = 1;
3672 }
3673
3674 if (TREE_SIDE_EFFECTS (valist))
3675 valist = save_expr (valist);
3676 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3677 valist));
3678 }
3679
3680 return valist;
3681 }
3682
3683 /* The "standard" implementation of va_start: just assign `nextarg' to
3684 the variable. */
3685
3686 void
3687 std_expand_builtin_va_start (valist, nextarg)
3688 tree valist;
3689 rtx nextarg;
3690 {
3691 tree t;
3692
3693 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3694 make_tree (ptr_type_node, nextarg));
3695 TREE_SIDE_EFFECTS (t) = 1;
3696
3697 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3698 }
3699
3700 /* Expand ARGLIST, from a call to __builtin_va_start. */
3701
3702 static rtx
3703 expand_builtin_va_start (arglist)
3704 tree arglist;
3705 {
3706 rtx nextarg;
3707 tree chain, valist;
3708
3709 chain = TREE_CHAIN (arglist);
3710
3711 if (TREE_CHAIN (chain))
3712 error ("too many arguments to function `va_start'");
3713
3714 nextarg = expand_builtin_next_arg (chain);
3715 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3716
3717 #ifdef EXPAND_BUILTIN_VA_START
3718 EXPAND_BUILTIN_VA_START (valist, nextarg);
3719 #else
3720 std_expand_builtin_va_start (valist, nextarg);
3721 #endif
3722
3723 return const0_rtx;
3724 }
3725
3726 /* The "standard" implementation of va_arg: read the value from the
3727 current (padded) address and increment by the (padded) size. */
3728
3729 rtx
3730 std_expand_builtin_va_arg (valist, type)
3731 tree valist, type;
3732 {
3733 tree addr_tree, t, type_size = NULL;
3734 tree align, alignm1;
3735 tree rounded_size;
3736 rtx addr;
3737
3738 /* Compute the rounded size of the type. */
3739 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3740 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3741 if (type == error_mark_node
3742 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3743 || TREE_OVERFLOW (type_size))
3744 rounded_size = size_zero_node;
3745 else
3746 rounded_size = fold (build (MULT_EXPR, sizetype,
3747 fold (build (TRUNC_DIV_EXPR, sizetype,
3748 fold (build (PLUS_EXPR, sizetype,
3749 type_size, alignm1)),
3750 align)),
3751 align));
3752
3753 /* Get AP. */
3754 addr_tree = valist;
3755 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3756 {
3757 /* Small args are padded downward. */
3758 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3759 fold (build (COND_EXPR, sizetype,
3760 fold (build (GT_EXPR, sizetype,
3761 rounded_size,
3762 align)),
3763 size_zero_node,
3764 fold (build (MINUS_EXPR, sizetype,
3765 rounded_size,
3766 type_size))))));
3767 }
3768
3769 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3770 addr = copy_to_reg (addr);
3771
3772 /* Compute new value for AP. */
3773 if (! integer_zerop (rounded_size))
3774 {
3775 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3776 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3777 rounded_size));
3778 TREE_SIDE_EFFECTS (t) = 1;
3779 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3780 }
3781
3782 return addr;
3783 }
3784
3785 /* Expand __builtin_va_arg, which is not really a builtin function, but
3786 a very special sort of operator. */
3787
3788 rtx
3789 expand_builtin_va_arg (valist, type)
3790 tree valist, type;
3791 {
3792 rtx addr, result;
3793 tree promoted_type, want_va_type, have_va_type;
3794
3795 /* Verify that valist is of the proper type. */
3796
3797 want_va_type = va_list_type_node;
3798 have_va_type = TREE_TYPE (valist);
3799 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3800 {
3801 /* If va_list is an array type, the argument may have decayed
3802 to a pointer type, e.g. by being passed to another function.
3803 In that case, unwrap both types so that we can compare the
3804 underlying records. */
3805 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3806 || TREE_CODE (have_va_type) == POINTER_TYPE)
3807 {
3808 want_va_type = TREE_TYPE (want_va_type);
3809 have_va_type = TREE_TYPE (have_va_type);
3810 }
3811 }
3812 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3813 {
3814 error ("first argument to `va_arg' not of type `va_list'");
3815 addr = const0_rtx;
3816 }
3817
3818 /* Generate a diagnostic for requesting data of a type that cannot
3819 be passed through `...' due to type promotion at the call site. */
3820 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3821 != type)
3822 {
3823 const char *name = "<anonymous type>", *pname = 0;
3824 static bool gave_help;
3825
3826 if (TYPE_NAME (type))
3827 {
3828 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3829 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3830 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3831 && DECL_NAME (TYPE_NAME (type)))
3832 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3833 }
3834 if (TYPE_NAME (promoted_type))
3835 {
3836 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3837 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3838 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3839 && DECL_NAME (TYPE_NAME (promoted_type)))
3840 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3841 }
3842
3843 /* Unfortunately, this is merely undefined, rather than a constraint
3844 violation, so we cannot make this an error. If this call is never
3845 executed, the program is still strictly conforming. */
3846 warning ("`%s' is promoted to `%s' when passed through `...'",
3847 name, pname);
3848 if (! gave_help)
3849 {
3850 gave_help = true;
3851 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3852 pname, name);
3853 }
3854
3855 /* We can, however, treat "undefined" any way we please.
3856 Call abort to encourage the user to fix the program. */
3857 expand_builtin_trap ();
3858
3859 /* This is dead code, but go ahead and finish so that the
3860 mode of the result comes out right. */
3861 addr = const0_rtx;
3862 }
3863 else
3864 {
3865 /* Make it easier for the backends by protecting the valist argument
3866 from multiple evaluations. */
3867 valist = stabilize_va_list (valist, 0);
3868
3869 #ifdef EXPAND_BUILTIN_VA_ARG
3870 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3871 #else
3872 addr = std_expand_builtin_va_arg (valist, type);
3873 #endif
3874 }
3875
3876 #ifdef POINTERS_EXTEND_UNSIGNED
3877 if (GET_MODE (addr) != Pmode)
3878 addr = convert_memory_address (Pmode, addr);
3879 #endif
3880
3881 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3882 set_mem_alias_set (result, get_varargs_alias_set ());
3883
3884 return result;
3885 }
3886
3887 /* Expand ARGLIST, from a call to __builtin_va_end. */
3888
3889 static rtx
3890 expand_builtin_va_end (arglist)
3891 tree arglist;
3892 {
3893 tree valist = TREE_VALUE (arglist);
3894
3895 #ifdef EXPAND_BUILTIN_VA_END
3896 valist = stabilize_va_list (valist, 0);
3897 EXPAND_BUILTIN_VA_END (arglist);
3898 #else
3899 /* Evaluate for side effects, if needed. I hate macros that don't
3900 do that. */
3901 if (TREE_SIDE_EFFECTS (valist))
3902 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3903 #endif
3904
3905 return const0_rtx;
3906 }
3907
3908 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3909 builtin rather than just as an assignment in stdarg.h because of the
3910 nastiness of array-type va_list types. */
3911
3912 static rtx
3913 expand_builtin_va_copy (arglist)
3914 tree arglist;
3915 {
3916 tree dst, src, t;
3917
3918 dst = TREE_VALUE (arglist);
3919 src = TREE_VALUE (TREE_CHAIN (arglist));
3920
3921 dst = stabilize_va_list (dst, 1);
3922 src = stabilize_va_list (src, 0);
3923
3924 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3925 {
3926 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3927 TREE_SIDE_EFFECTS (t) = 1;
3928 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3929 }
3930 else
3931 {
3932 rtx dstb, srcb, size;
3933
3934 /* Evaluate to pointers. */
3935 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3936 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3937 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3938 VOIDmode, EXPAND_NORMAL);
3939
3940 #ifdef POINTERS_EXTEND_UNSIGNED
3941 if (GET_MODE (dstb) != Pmode)
3942 dstb = convert_memory_address (Pmode, dstb);
3943
3944 if (GET_MODE (srcb) != Pmode)
3945 srcb = convert_memory_address (Pmode, srcb);
3946 #endif
3947
3948 /* "Dereference" to BLKmode memories. */
3949 dstb = gen_rtx_MEM (BLKmode, dstb);
3950 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3951 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3952 srcb = gen_rtx_MEM (BLKmode, srcb);
3953 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3954 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3955
3956 /* Copy. */
3957 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3958 }
3959
3960 return const0_rtx;
3961 }
3962
3963 /* Expand a call to one of the builtin functions __builtin_frame_address or
3964 __builtin_return_address. */
3965
3966 static rtx
3967 expand_builtin_frame_address (fndecl, arglist)
3968 tree fndecl, arglist;
3969 {
3970 /* The argument must be a nonnegative integer constant.
3971 It counts the number of frames to scan up the stack.
3972 The value is the return address saved in that frame. */
3973 if (arglist == 0)
3974 /* Warning about missing arg was already issued. */
3975 return const0_rtx;
3976 else if (! host_integerp (TREE_VALUE (arglist), 1))
3977 {
3978 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3979 error ("invalid arg to `__builtin_frame_address'");
3980 else
3981 error ("invalid arg to `__builtin_return_address'");
3982 return const0_rtx;
3983 }
3984 else
3985 {
3986 rtx tem
3987 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3988 tree_low_cst (TREE_VALUE (arglist), 1),
3989 hard_frame_pointer_rtx);
3990
3991 /* Some ports cannot access arbitrary stack frames. */
3992 if (tem == NULL)
3993 {
3994 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3995 warning ("unsupported arg to `__builtin_frame_address'");
3996 else
3997 warning ("unsupported arg to `__builtin_return_address'");
3998 return const0_rtx;
3999 }
4000
4001 /* For __builtin_frame_address, return what we've got. */
4002 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4003 return tem;
4004
4005 if (GET_CODE (tem) != REG
4006 && ! CONSTANT_P (tem))
4007 tem = copy_to_mode_reg (Pmode, tem);
4008 return tem;
4009 }
4010 }
4011
4012 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4013 we failed and the caller should emit a normal call, otherwise try to get
4014 the result in TARGET, if convenient. */
4015
4016 static rtx
4017 expand_builtin_alloca (arglist, target)
4018 tree arglist;
4019 rtx target;
4020 {
4021 rtx op0;
4022 rtx result;
4023
4024 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4025 return 0;
4026
4027 /* Compute the argument. */
4028 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
4029
4030 /* Allocate the desired space. */
4031 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4032
4033 #ifdef POINTERS_EXTEND_UNSIGNED
4034 if (GET_MODE (result) != ptr_mode)
4035 result = convert_memory_address (ptr_mode, result);
4036 #endif
4037
4038 return result;
4039 }
4040
4041 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4042 Return 0 if a normal call should be emitted rather than expanding the
4043 function in-line. If convenient, the result should be placed in TARGET.
4044 SUBTARGET may be used as the target for computing one of EXP's operands. */
4045
4046 static rtx
4047 expand_builtin_unop (target_mode, arglist, target, subtarget, op_optab)
4048 enum machine_mode target_mode;
4049 tree arglist;
4050 rtx target, subtarget;
4051 optab op_optab;
4052 {
4053 rtx op0;
4054 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4055 return 0;
4056
4057 /* Compute the argument. */
4058 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4059 /* Compute op, into TARGET if possible.
4060 Set TARGET to wherever the result comes back. */
4061 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4062 op_optab, op0, target, 1);
4063 if (target == 0)
4064 abort ();
4065
4066 return convert_to_mode (target_mode, target, 0);
4067 }
4068
4069 /* If the string passed to fputs is a constant and is one character
4070 long, we attempt to transform this call into __builtin_fputc(). */
4071
4072 static rtx
4073 expand_builtin_fputs (arglist, ignore, unlocked)
4074 tree arglist;
4075 int ignore;
4076 int unlocked;
4077 {
4078 tree len, fn;
4079 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
4080 : implicit_built_in_decls[BUILT_IN_FPUTC];
4081 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
4082 : implicit_built_in_decls[BUILT_IN_FWRITE];
4083
4084 /* If the return value is used, or the replacement _DECL isn't
4085 initialized, don't do the transformation. */
4086 if (!ignore || !fn_fputc || !fn_fwrite)
4087 return 0;
4088
4089 /* Verify the arguments in the original call. */
4090 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4091 return 0;
4092
4093 /* Get the length of the string passed to fputs. If the length
4094 can't be determined, punt. */
4095 if (!(len = c_strlen (TREE_VALUE (arglist)))
4096 || TREE_CODE (len) != INTEGER_CST)
4097 return 0;
4098
4099 switch (compare_tree_int (len, 1))
4100 {
4101 case -1: /* length is 0, delete the call entirely . */
4102 {
4103 /* Evaluate and ignore the argument in case it has
4104 side-effects. */
4105 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
4106 VOIDmode, EXPAND_NORMAL);
4107 return const0_rtx;
4108 }
4109 case 0: /* length is 1, call fputc. */
4110 {
4111 const char *p = c_getstr (TREE_VALUE (arglist));
4112
4113 if (p != NULL)
4114 {
4115 /* New argument list transforming fputs(string, stream) to
4116 fputc(string[0], stream). */
4117 arglist =
4118 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4119 arglist =
4120 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
4121 fn = fn_fputc;
4122 break;
4123 }
4124 }
4125 /* FALLTHROUGH */
4126 case 1: /* length is greater than 1, call fwrite. */
4127 {
4128 tree string_arg;
4129
4130 /* If optimizing for size keep fputs. */
4131 if (optimize_size)
4132 return 0;
4133 string_arg = TREE_VALUE (arglist);
4134 /* New argument list transforming fputs(string, stream) to
4135 fwrite(string, 1, len, stream). */
4136 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4137 arglist = tree_cons (NULL_TREE, len, arglist);
4138 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
4139 arglist = tree_cons (NULL_TREE, string_arg, arglist);
4140 fn = fn_fwrite;
4141 break;
4142 }
4143 default:
4144 abort ();
4145 }
4146
4147 return expand_expr (build_function_call_expr (fn, arglist),
4148 (ignore ? const0_rtx : NULL_RTX),
4149 VOIDmode, EXPAND_NORMAL);
4150 }
4151
4152 /* Expand a call to __builtin_expect. We return our argument and emit a
4153 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4154 a non-jump context. */
4155
4156 static rtx
4157 expand_builtin_expect (arglist, target)
4158 tree arglist;
4159 rtx target;
4160 {
4161 tree exp, c;
4162 rtx note, rtx_c;
4163
4164 if (arglist == NULL_TREE
4165 || TREE_CHAIN (arglist) == NULL_TREE)
4166 return const0_rtx;
4167 exp = TREE_VALUE (arglist);
4168 c = TREE_VALUE (TREE_CHAIN (arglist));
4169
4170 if (TREE_CODE (c) != INTEGER_CST)
4171 {
4172 error ("second arg to `__builtin_expect' must be a constant");
4173 c = integer_zero_node;
4174 }
4175
4176 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4177
4178 /* Don't bother with expected value notes for integral constants. */
4179 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4180 {
4181 /* We do need to force this into a register so that we can be
4182 moderately sure to be able to correctly interpret the branch
4183 condition later. */
4184 target = force_reg (GET_MODE (target), target);
4185
4186 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4187
4188 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
4189 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4190 }
4191
4192 return target;
4193 }
4194
4195 /* Like expand_builtin_expect, except do this in a jump context. This is
4196 called from do_jump if the conditional is a __builtin_expect. Return either
4197 a list of insns to emit the jump or NULL if we cannot optimize
4198 __builtin_expect. We need to optimize this at jump time so that machines
4199 like the PowerPC don't turn the test into a SCC operation, and then jump
4200 based on the test being 0/1. */
4201
4202 rtx
4203 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
4204 tree exp;
4205 rtx if_false_label;
4206 rtx if_true_label;
4207 {
4208 tree arglist = TREE_OPERAND (exp, 1);
4209 tree arg0 = TREE_VALUE (arglist);
4210 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4211 rtx ret = NULL_RTX;
4212
4213 /* Only handle __builtin_expect (test, 0) and
4214 __builtin_expect (test, 1). */
4215 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4216 && (integer_zerop (arg1) || integer_onep (arg1)))
4217 {
4218 int num_jumps = 0;
4219 rtx insn;
4220
4221 /* If we fail to locate an appropriate conditional jump, we'll
4222 fall back to normal evaluation. Ensure that the expression
4223 can be re-evaluated. */
4224 switch (unsafe_for_reeval (arg0))
4225 {
4226 case 0: /* Safe. */
4227 break;
4228
4229 case 1: /* Mildly unsafe. */
4230 arg0 = unsave_expr (arg0);
4231 break;
4232
4233 case 2: /* Wildly unsafe. */
4234 return NULL_RTX;
4235 }
4236
4237 /* Expand the jump insns. */
4238 start_sequence ();
4239 do_jump (arg0, if_false_label, if_true_label);
4240 ret = get_insns ();
4241 end_sequence ();
4242
4243 /* Now that the __builtin_expect has been validated, go through and add
4244 the expect's to each of the conditional jumps. If we run into an
4245 error, just give up and generate the 'safe' code of doing a SCC
4246 operation and then doing a branch on that. */
4247 insn = ret;
4248 while (insn != NULL_RTX)
4249 {
4250 rtx next = NEXT_INSN (insn);
4251
4252 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
4253 {
4254 rtx ifelse = SET_SRC (pc_set (insn));
4255 rtx label;
4256 int taken;
4257
4258 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4259 {
4260 taken = 1;
4261 label = XEXP (XEXP (ifelse, 1), 0);
4262 }
4263 /* An inverted jump reverses the probabilities. */
4264 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4265 {
4266 taken = 0;
4267 label = XEXP (XEXP (ifelse, 2), 0);
4268 }
4269 /* We shouldn't have to worry about conditional returns during
4270 the expansion stage, but handle it gracefully anyway. */
4271 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4272 {
4273 taken = 1;
4274 label = NULL_RTX;
4275 }
4276 /* An inverted return reverses the probabilities. */
4277 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4278 {
4279 taken = 0;
4280 label = NULL_RTX;
4281 }
4282 else
4283 goto do_next_insn;
4284
4285 /* If the test is expected to fail, reverse the
4286 probabilities. */
4287 if (integer_zerop (arg1))
4288 taken = 1 - taken;
4289
4290 /* If we are jumping to the false label, reverse the
4291 probabilities. */
4292 if (label == NULL_RTX)
4293 ; /* conditional return */
4294 else if (label == if_false_label)
4295 taken = 1 - taken;
4296 else if (label != if_true_label)
4297 goto do_next_insn;
4298
4299 num_jumps++;
4300 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4301 }
4302
4303 do_next_insn:
4304 insn = next;
4305 }
4306
4307 /* If no jumps were modified, fail and do __builtin_expect the normal
4308 way. */
4309 if (num_jumps == 0)
4310 ret = NULL_RTX;
4311 }
4312
4313 return ret;
4314 }
4315
4316 void
4317 expand_builtin_trap ()
4318 {
4319 #ifdef HAVE_trap
4320 if (HAVE_trap)
4321 emit_insn (gen_trap ());
4322 else
4323 #endif
4324 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4325 emit_barrier ();
4326 }
4327
4328 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4329 Return 0 if a normal call should be emitted rather than expanding
4330 the function inline. If convenient, the result should be placed
4331 in TARGET. SUBTARGET may be used as the target for computing
4332 the operand. */
4333
4334 static rtx
4335 expand_builtin_fabs (arglist, target, subtarget)
4336 tree arglist;
4337 rtx target, subtarget;
4338 {
4339 enum machine_mode mode;
4340 tree arg;
4341 rtx op0;
4342
4343 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4344 return 0;
4345
4346 arg = TREE_VALUE (arglist);
4347 mode = TYPE_MODE (TREE_TYPE (arg));
4348 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4349 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4350 }
4351
4352 /* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
4353 Return 0 if a normal call should be emitted rather than expanding
4354 the function inline. If convenient, the result should be placed
4355 in target. */
4356
4357 static rtx
4358 expand_builtin_cabs (arglist, target)
4359 tree arglist;
4360 rtx target;
4361 {
4362 enum machine_mode mode;
4363 tree arg;
4364 rtx op0;
4365
4366 if (arglist == 0 || TREE_CHAIN (arglist))
4367 return 0;
4368 arg = TREE_VALUE (arglist);
4369 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
4370 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
4371 return 0;
4372
4373 mode = TYPE_MODE (TREE_TYPE (arg));
4374 op0 = expand_expr (arg, NULL_RTX, VOIDmode, 0);
4375 return expand_complex_abs (mode, op0, target, 0);
4376 }
4377
4378 \f
4379 /* Expand an expression EXP that calls a built-in function,
4380 with result going to TARGET if that's convenient
4381 (and in mode MODE if that's convenient).
4382 SUBTARGET may be used as the target for computing one of EXP's operands.
4383 IGNORE is nonzero if the value is to be ignored. */
4384
4385 rtx
4386 expand_builtin (exp, target, subtarget, mode, ignore)
4387 tree exp;
4388 rtx target;
4389 rtx subtarget;
4390 enum machine_mode mode;
4391 int ignore;
4392 {
4393 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4394 tree arglist = TREE_OPERAND (exp, 1);
4395 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4396 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4397
4398 /* Perform postincrements before expanding builtin functions.  */
4399 emit_queue ();
4400
4401 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4402 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4403
4404 /* When not optimizing, generate calls to library functions for a certain
4405 set of builtins. */
4406 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4407 switch (fcode)
4408 {
4409 case BUILT_IN_SQRT:
4410 case BUILT_IN_SQRTF:
4411 case BUILT_IN_SQRTL:
4412 case BUILT_IN_SIN:
4413 case BUILT_IN_SINF:
4414 case BUILT_IN_SINL:
4415 case BUILT_IN_COS:
4416 case BUILT_IN_COSF:
4417 case BUILT_IN_COSL:
4418 case BUILT_IN_EXP:
4419 case BUILT_IN_EXPF:
4420 case BUILT_IN_EXPL:
4421 case BUILT_IN_LOG:
4422 case BUILT_IN_LOGF:
4423 case BUILT_IN_LOGL:
4424 case BUILT_IN_TAN:
4425 case BUILT_IN_TANF:
4426 case BUILT_IN_TANL:
4427 case BUILT_IN_ATAN:
4428 case BUILT_IN_ATANF:
4429 case BUILT_IN_ATANL:
4430 case BUILT_IN_POW:
4431 case BUILT_IN_POWF:
4432 case BUILT_IN_POWL:
4433 case BUILT_IN_ATAN2:
4434 case BUILT_IN_ATAN2F:
4435 case BUILT_IN_ATAN2L:
4436 case BUILT_IN_MEMSET:
4437 case BUILT_IN_MEMCPY:
4438 case BUILT_IN_MEMCMP:
4439 case BUILT_IN_MEMPCPY:
4440 case BUILT_IN_MEMMOVE:
4441 case BUILT_IN_BCMP:
4442 case BUILT_IN_BZERO:
4443 case BUILT_IN_BCOPY:
4444 case BUILT_IN_INDEX:
4445 case BUILT_IN_RINDEX:
4446 case BUILT_IN_STPCPY:
4447 case BUILT_IN_STRCHR:
4448 case BUILT_IN_STRRCHR:
4449 case BUILT_IN_STRLEN:
4450 case BUILT_IN_STRCPY:
4451 case BUILT_IN_STRNCPY:
4452 case BUILT_IN_STRNCMP:
4453 case BUILT_IN_STRSTR:
4454 case BUILT_IN_STRPBRK:
4455 case BUILT_IN_STRCAT:
4456 case BUILT_IN_STRNCAT:
4457 case BUILT_IN_STRSPN:
4458 case BUILT_IN_STRCSPN:
4459 case BUILT_IN_STRCMP:
4460 case BUILT_IN_FFS:
4461 case BUILT_IN_PUTCHAR:
4462 case BUILT_IN_PUTS:
4463 case BUILT_IN_PRINTF:
4464 case BUILT_IN_FPUTC:
4465 case BUILT_IN_FPUTS:
4466 case BUILT_IN_FWRITE:
4467 case BUILT_IN_PUTCHAR_UNLOCKED:
4468 case BUILT_IN_PUTS_UNLOCKED:
4469 case BUILT_IN_PRINTF_UNLOCKED:
4470 case BUILT_IN_FPUTC_UNLOCKED:
4471 case BUILT_IN_FPUTS_UNLOCKED:
4472 case BUILT_IN_FWRITE_UNLOCKED:
4473 case BUILT_IN_FLOOR:
4474 case BUILT_IN_FLOORF:
4475 case BUILT_IN_FLOORL:
4476 case BUILT_IN_CEIL:
4477 case BUILT_IN_CEILF:
4478 case BUILT_IN_CEILL:
4479 case BUILT_IN_TRUNC:
4480 case BUILT_IN_TRUNCF:
4481 case BUILT_IN_TRUNCL:
4482 case BUILT_IN_ROUND:
4483 case BUILT_IN_ROUNDF:
4484 case BUILT_IN_ROUNDL:
4485 case BUILT_IN_NEARBYINT:
4486 case BUILT_IN_NEARBYINTF:
4487 case BUILT_IN_NEARBYINTL:
4488 return expand_call (exp, target, ignore);
4489
4490 default:
4491 break;
4492 }
4493
4494 /* The built-in function expanders test for target == const0_rtx
4495 to determine whether the function's result will be ignored. */
4496 if (ignore)
4497 target = const0_rtx;
4498
4499 /* If the result of a pure or const built-in function is ignored, and
4500 none of its arguments are volatile, we can avoid expanding the
4501 built-in call and just evaluate the arguments for side-effects. */
4502 if (target == const0_rtx
4503 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4504 {
4505 bool volatilep = false;
4506 tree arg;
4507
4508 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4509 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4510 {
4511 volatilep = true;
4512 break;
4513 }
4514
4515 if (! volatilep)
4516 {
4517 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4518 expand_expr (TREE_VALUE (arg), const0_rtx,
4519 VOIDmode, EXPAND_NORMAL);
4520 return const0_rtx;
4521 }
4522 }
4523
4524 switch (fcode)
4525 {
4526 case BUILT_IN_ABS:
4527 case BUILT_IN_LABS:
4528 case BUILT_IN_LLABS:
4529 case BUILT_IN_IMAXABS:
4530 /* build_function_call changes these into ABS_EXPR. */
4531 abort ();
4532
4533 case BUILT_IN_FABS:
4534 case BUILT_IN_FABSF:
4535 case BUILT_IN_FABSL:
4536 target = expand_builtin_fabs (arglist, target, subtarget);
4537 if (target)
4538 return target;
4539 break;
4540
4541 case BUILT_IN_CABS:
4542 case BUILT_IN_CABSF:
4543 case BUILT_IN_CABSL:
4544 if (flag_unsafe_math_optimizations)
4545 {
4546 target = expand_builtin_cabs (arglist, target);
4547 if (target)
4548 return target;
4549 }
4550 break;
4551
4552 case BUILT_IN_CONJ:
4553 case BUILT_IN_CONJF:
4554 case BUILT_IN_CONJL:
4555 case BUILT_IN_CREAL:
4556 case BUILT_IN_CREALF:
4557 case BUILT_IN_CREALL:
4558 case BUILT_IN_CIMAG:
4559 case BUILT_IN_CIMAGF:
4560 case BUILT_IN_CIMAGL:
4561 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4562 and IMAGPART_EXPR. */
4563 abort ();
4564
4565 case BUILT_IN_SIN:
4566 case BUILT_IN_SINF:
4567 case BUILT_IN_SINL:
4568 case BUILT_IN_COS:
4569 case BUILT_IN_COSF:
4570 case BUILT_IN_COSL:
4571 case BUILT_IN_EXP:
4572 case BUILT_IN_EXPF:
4573 case BUILT_IN_EXPL:
4574 case BUILT_IN_LOG:
4575 case BUILT_IN_LOGF:
4576 case BUILT_IN_LOGL:
4577 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4578 because of possible accuracy problems. */
4579 if (! flag_unsafe_math_optimizations)
4580 break;
4581 case BUILT_IN_SQRT:
4582 case BUILT_IN_SQRTF:
4583 case BUILT_IN_SQRTL:
4584 case BUILT_IN_FLOOR:
4585 case BUILT_IN_FLOORF:
4586 case BUILT_IN_FLOORL:
4587 case BUILT_IN_CEIL:
4588 case BUILT_IN_CEILF:
4589 case BUILT_IN_CEILL:
4590 case BUILT_IN_TRUNC:
4591 case BUILT_IN_TRUNCF:
4592 case BUILT_IN_TRUNCL:
4593 case BUILT_IN_ROUND:
4594 case BUILT_IN_ROUNDF:
4595 case BUILT_IN_ROUNDL:
4596 case BUILT_IN_NEARBYINT:
4597 case BUILT_IN_NEARBYINTF:
4598 case BUILT_IN_NEARBYINTL:
4599 target = expand_builtin_mathfn (exp, target, subtarget);
4600 if (target)
4601 return target;
4602 break;
4603
4604 case BUILT_IN_POW:
4605 case BUILT_IN_POWF:
4606 case BUILT_IN_POWL:
4607 case BUILT_IN_ATAN2:
4608 case BUILT_IN_ATAN2F:
4609 case BUILT_IN_ATAN2L:
4610 if (! flag_unsafe_math_optimizations)
4611 break;
4612 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4613 if (target)
4614 return target;
4615 break;
4616
4617 case BUILT_IN_APPLY_ARGS:
4618 return expand_builtin_apply_args ();
4619
4620 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4621 FUNCTION with a copy of the parameters described by
4622 ARGUMENTS, and ARGSIZE. It returns a block of memory
4623 allocated on the stack into which is stored all the registers
4624 that might possibly be used for returning the result of a
4625 function. ARGUMENTS is the value returned by
4626 __builtin_apply_args. ARGSIZE is the number of bytes of
4627 arguments that must be copied. ??? How should this value be
4628 computed? We'll also need a safe worst case value for varargs
4629 functions. */
4630 case BUILT_IN_APPLY:
4631 if (!validate_arglist (arglist, POINTER_TYPE,
4632 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4633 && !validate_arglist (arglist, REFERENCE_TYPE,
4634 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4635 return const0_rtx;
4636 else
4637 {
4638 int i;
4639 tree t;
4640 rtx ops[3];
4641
4642 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4643 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4644
4645 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4646 }
4647
4648 /* __builtin_return (RESULT) causes the function to return the
4649 value described by RESULT. RESULT is address of the block of
4650 memory returned by __builtin_apply. */
4651 case BUILT_IN_RETURN:
4652 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4653 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4654 NULL_RTX, VOIDmode, 0));
4655 return const0_rtx;
4656
4657 case BUILT_IN_SAVEREGS:
4658 return expand_builtin_saveregs ();
4659
4660 case BUILT_IN_ARGS_INFO:
4661 return expand_builtin_args_info (arglist);
4662
4663 /* Return the address of the first anonymous stack arg. */
4664 case BUILT_IN_NEXT_ARG:
4665 return expand_builtin_next_arg (arglist);
4666
4667 case BUILT_IN_CLASSIFY_TYPE:
4668 return expand_builtin_classify_type (arglist);
4669
4670 case BUILT_IN_CONSTANT_P:
4671 return expand_builtin_constant_p (arglist, target_mode);
4672
4673 case BUILT_IN_FRAME_ADDRESS:
4674 case BUILT_IN_RETURN_ADDRESS:
4675 return expand_builtin_frame_address (fndecl, arglist);
4676
4677 /* Returns the address of the area where the structure is returned.
4678 0 otherwise. */
4679 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4680 if (arglist != 0
4681 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4682 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4683 return const0_rtx;
4684 else
4685 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4686
4687 case BUILT_IN_ALLOCA:
4688 target = expand_builtin_alloca (arglist, target);
4689 if (target)
4690 return target;
4691 break;
4692
4693 case BUILT_IN_FFS:
4694 case BUILT_IN_FFSL:
4695 case BUILT_IN_FFSLL:
4696 target = expand_builtin_unop (target_mode, arglist, target,
4697 subtarget, ffs_optab);
4698 if (target)
4699 return target;
4700 break;
4701
4702 case BUILT_IN_CLZ:
4703 case BUILT_IN_CLZL:
4704 case BUILT_IN_CLZLL:
4705 target = expand_builtin_unop (target_mode, arglist, target,
4706 subtarget, clz_optab);
4707 if (target)
4708 return target;
4709 break;
4710
4711 case BUILT_IN_CTZ:
4712 case BUILT_IN_CTZL:
4713 case BUILT_IN_CTZLL:
4714 target = expand_builtin_unop (target_mode, arglist, target,
4715 subtarget, ctz_optab);
4716 if (target)
4717 return target;
4718 break;
4719
4720 case BUILT_IN_POPCOUNT:
4721 case BUILT_IN_POPCOUNTL:
4722 case BUILT_IN_POPCOUNTLL:
4723 target = expand_builtin_unop (target_mode, arglist, target,
4724 subtarget, popcount_optab);
4725 if (target)
4726 return target;
4727 break;
4728
4729 case BUILT_IN_PARITY:
4730 case BUILT_IN_PARITYL:
4731 case BUILT_IN_PARITYLL:
4732 target = expand_builtin_unop (target_mode, arglist, target,
4733 subtarget, parity_optab);
4734 if (target)
4735 return target;
4736 break;
4737
4738 case BUILT_IN_STRLEN:
4739 target = expand_builtin_strlen (arglist, target, target_mode);
4740 if (target)
4741 return target;
4742 break;
4743
4744 case BUILT_IN_STRCPY:
4745 target = expand_builtin_strcpy (arglist, target, mode);
4746 if (target)
4747 return target;
4748 break;
4749
4750 case BUILT_IN_STRNCPY:
4751 target = expand_builtin_strncpy (arglist, target, mode);
4752 if (target)
4753 return target;
4754 break;
4755
4756 case BUILT_IN_STPCPY:
4757 target = expand_builtin_stpcpy (arglist, target, mode);
4758 if (target)
4759 return target;
4760 break;
4761
4762 case BUILT_IN_STRCAT:
4763 target = expand_builtin_strcat (arglist, target, mode);
4764 if (target)
4765 return target;
4766 break;
4767
4768 case BUILT_IN_STRNCAT:
4769 target = expand_builtin_strncat (arglist, target, mode);
4770 if (target)
4771 return target;
4772 break;
4773
4774 case BUILT_IN_STRSPN:
4775 target = expand_builtin_strspn (arglist, target, mode);
4776 if (target)
4777 return target;
4778 break;
4779
4780 case BUILT_IN_STRCSPN:
4781 target = expand_builtin_strcspn (arglist, target, mode);
4782 if (target)
4783 return target;
4784 break;
4785
4786 case BUILT_IN_STRSTR:
4787 target = expand_builtin_strstr (arglist, target, mode);
4788 if (target)
4789 return target;
4790 break;
4791
4792 case BUILT_IN_STRPBRK:
4793 target = expand_builtin_strpbrk (arglist, target, mode);
4794 if (target)
4795 return target;
4796 break;
4797
4798 case BUILT_IN_INDEX:
4799 case BUILT_IN_STRCHR:
4800 target = expand_builtin_strchr (arglist, target, mode);
4801 if (target)
4802 return target;
4803 break;
4804
4805 case BUILT_IN_RINDEX:
4806 case BUILT_IN_STRRCHR:
4807 target = expand_builtin_strrchr (arglist, target, mode);
4808 if (target)
4809 return target;
4810 break;
4811
4812 case BUILT_IN_MEMCPY:
4813 target = expand_builtin_memcpy (arglist, target, mode);
4814 if (target)
4815 return target;
4816 break;
4817
4818 case BUILT_IN_MEMPCPY:
4819 target = expand_builtin_mempcpy (arglist, target, mode, /*endp=*/ 1);
4820 if (target)
4821 return target;
4822 break;
4823
4824 case BUILT_IN_MEMMOVE:
4825 target = expand_builtin_memmove (arglist, target, mode);
4826 if (target)
4827 return target;
4828 break;
4829
4830 case BUILT_IN_BCOPY:
4831 target = expand_builtin_bcopy (arglist);
4832 if (target)
4833 return target;
4834 break;
4835
4836 case BUILT_IN_MEMSET:
4837 target = expand_builtin_memset (arglist, target, mode);
4838 if (target)
4839 return target;
4840 break;
4841
4842 case BUILT_IN_BZERO:
4843 target = expand_builtin_bzero (arglist);
4844 if (target)
4845 return target;
4846 break;
4847
4848 case BUILT_IN_STRCMP:
4849 target = expand_builtin_strcmp (exp, target, mode);
4850 if (target)
4851 return target;
4852 break;
4853
4854 case BUILT_IN_STRNCMP:
4855 target = expand_builtin_strncmp (exp, target, mode);
4856 if (target)
4857 return target;
4858 break;
4859
4860 case BUILT_IN_BCMP:
4861 case BUILT_IN_MEMCMP:
4862 target = expand_builtin_memcmp (exp, arglist, target, mode);
4863 if (target)
4864 return target;
4865 break;
4866
4867 case BUILT_IN_SETJMP:
4868 target = expand_builtin_setjmp (arglist, target);
4869 if (target)
4870 return target;
4871 break;
4872
4873 /* __builtin_longjmp is passed a pointer to an array of five words.
4874 It's similar to the C library longjmp function but works with
4875 __builtin_setjmp above. */
4876 case BUILT_IN_LONGJMP:
4877 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4878 break;
4879 else
4880 {
4881 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4882 VOIDmode, 0);
4883 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4884 NULL_RTX, VOIDmode, 0);
4885
4886 if (value != const1_rtx)
4887 {
4888 error ("__builtin_longjmp second argument must be 1");
4889 return const0_rtx;
4890 }
4891
4892 expand_builtin_longjmp (buf_addr, value);
4893 return const0_rtx;
4894 }
4895
4896 case BUILT_IN_TRAP:
4897 expand_builtin_trap ();
4898 return const0_rtx;
4899
4900 case BUILT_IN_FPUTS:
4901 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4902 if (target)
4903 return target;
4904 break;
4905 case BUILT_IN_FPUTS_UNLOCKED:
4906 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4907 if (target)
4908 return target;
4909 break;
4910
4911 /* Various hooks for the DWARF 2 __throw routine. */
4912 case BUILT_IN_UNWIND_INIT:
4913 expand_builtin_unwind_init ();
4914 return const0_rtx;
4915 case BUILT_IN_DWARF_CFA:
4916 return virtual_cfa_rtx;
4917 #ifdef DWARF2_UNWIND_INFO
4918 case BUILT_IN_DWARF_SP_COLUMN:
4919 return expand_builtin_dwarf_sp_column ();
4920 case BUILT_IN_INIT_DWARF_REG_SIZES:
4921 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4922 return const0_rtx;
4923 #endif
4924 case BUILT_IN_FROB_RETURN_ADDR:
4925 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4926 case BUILT_IN_EXTRACT_RETURN_ADDR:
4927 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4928 case BUILT_IN_EH_RETURN:
4929 expand_builtin_eh_return (TREE_VALUE (arglist),
4930 TREE_VALUE (TREE_CHAIN (arglist)));
4931 return const0_rtx;
4932 #ifdef EH_RETURN_DATA_REGNO
4933 case BUILT_IN_EH_RETURN_DATA_REGNO:
4934 return expand_builtin_eh_return_data_regno (arglist);
4935 #endif
4936 case BUILT_IN_VA_START:
4937 case BUILT_IN_STDARG_START:
4938 return expand_builtin_va_start (arglist);
4939 case BUILT_IN_VA_END:
4940 return expand_builtin_va_end (arglist);
4941 case BUILT_IN_VA_COPY:
4942 return expand_builtin_va_copy (arglist);
4943 case BUILT_IN_EXPECT:
4944 return expand_builtin_expect (arglist, target);
4945 case BUILT_IN_PREFETCH:
4946 expand_builtin_prefetch (arglist);
4947 return const0_rtx;
4948
4949
4950 default: /* just do library call, if unknown builtin */
4951 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4952 error ("built-in function `%s' not currently supported",
4953 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4954 }
4955
4956 /* The switch statement above can drop through to cause the function
4957 to be called normally. */
4958 return expand_call (exp, target, ignore);
4959 }
4960
4961 /* Determine whether a tree node represents a call to a built-in
4962 math function. If the tree T is a call to a built-in function
4963 taking a single real argument, then the return value is the
4964 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
4965 the return value is END_BUILTINS. */
4966
4967 enum built_in_function
4968 builtin_mathfn_code (t)
4969 tree t;
4970 {
4971 tree fndecl, arglist;
4972
4973 if (TREE_CODE (t) != CALL_EXPR
4974 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
4975 return END_BUILTINS;
4976
4977 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4978 if (TREE_CODE (fndecl) != FUNCTION_DECL
4979 || ! DECL_BUILT_IN (fndecl)
4980 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4981 return END_BUILTINS;
4982
4983 arglist = TREE_OPERAND (t, 1);
4984 if (! arglist
4985 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4986 return END_BUILTINS;
4987
4988 arglist = TREE_CHAIN (arglist);
4989 switch (DECL_FUNCTION_CODE (fndecl))
4990 {
4991 case BUILT_IN_POW:
4992 case BUILT_IN_POWF:
4993 case BUILT_IN_POWL:
4994 case BUILT_IN_ATAN2:
4995 case BUILT_IN_ATAN2F:
4996 case BUILT_IN_ATAN2L:
4997 if (! arglist
4998 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
4999 || TREE_CHAIN (arglist))
5000 return END_BUILTINS;
5001 break;
5002
5003 default:
5004 if (arglist)
5005 return END_BUILTINS;
5006 break;
5007 }
5008
5009 return DECL_FUNCTION_CODE (fndecl);
5010 }
5011
5012 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
5013 constant. ARGLIST is the argument list of the call. */
5014
5015 static tree
5016 fold_builtin_constant_p (arglist)
5017 tree arglist;
5018 {
5019 if (arglist == 0)
5020 return 0;
5021
5022 arglist = TREE_VALUE (arglist);
5023
5024 /* We return 1 for a numeric type that's known to be a constant
5025 value at compile-time or for an aggregate type that's a
5026 literal constant. */
5027 STRIP_NOPS (arglist);
5028
5029 /* If we know this is a constant, emit the constant of one. */
5030 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
5031 || (TREE_CODE (arglist) == CONSTRUCTOR
5032 && TREE_CONSTANT (arglist))
5033 || (TREE_CODE (arglist) == ADDR_EXPR
5034 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
5035 return integer_one_node;
5036
5037 /* If we aren't going to be running CSE or this expression
5038 has side effects, show we don't know it to be a constant.
5039 Likewise if it's a pointer or aggregate type since in those
5040 case we only want literals, since those are only optimized
5041 when generating RTL, not later.
5042 And finally, if we are compiling an initializer, not code, we
5043 need to return a definite result now; there's not going to be any
5044 more optimization done. */
5045 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
5046 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
5047 || POINTER_TYPE_P (TREE_TYPE (arglist))
5048 || cfun == 0)
5049 return integer_zero_node;
5050
5051 return 0;
5052 }
5053
5054 /* Fold a call to __builtin_classify_type. */
5055
5056 static tree
5057 fold_builtin_classify_type (arglist)
5058 tree arglist;
5059 {
5060 if (arglist == 0)
5061 return build_int_2 (no_type_class, 0);
5062
5063 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
5064 }
5065
5066 /* Fold a call to __builtin_inf or __builtin_huge_val. */
5067
5068 static tree
5069 fold_builtin_inf (type, warn)
5070 tree type;
5071 int warn;
5072 {
5073 REAL_VALUE_TYPE real;
5074
5075 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
5076 warning ("target format does not support infinity");
5077
5078 real_inf (&real);
5079 return build_real (type, real);
5080 }
5081
5082 /* Fold a call to __builtin_nan or __builtin_nans. */
5083
5084 static tree
5085 fold_builtin_nan (arglist, type, quiet)
5086 tree arglist, type;
5087 int quiet;
5088 {
5089 REAL_VALUE_TYPE real;
5090 const char *str;
5091
5092 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5093 return 0;
5094 str = c_getstr (TREE_VALUE (arglist));
5095 if (!str)
5096 return 0;
5097
5098 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
5099 return 0;
5100
5101 return build_real (type, real);
5102 }
5103
5104 /* EXP is assumed to me builtin call where truncation can be propagated
5105 across (for instance floor((double)f) == (double)floorf (f).
5106 Do the transformation. */
5107 static tree
5108 fold_trunc_transparent_mathfn (exp)
5109 tree exp;
5110 {
5111 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5112 tree arglist = TREE_OPERAND (exp, 1);
5113 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5114
5115 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5116 {
5117 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
5118 tree ftype = TREE_TYPE (exp);
5119 tree newtype = TREE_TYPE (arg0);
5120 tree decl;
5121
5122 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
5123 && (decl = mathfn_built_in (newtype, fcode)))
5124 {
5125 arglist =
5126 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
5127 return convert (ftype,
5128 build_function_call_expr (decl, arglist));
5129 }
5130 }
5131 return 0;
5132 }
5133
5134 /* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
5135 function's DECL, ARGLIST is the argument list and TYPE is the return
5136 type. Return NULL_TREE if no simplification can be made. */
5137
5138 static tree
5139 fold_builtin_cabs (fndecl, arglist, type)
5140 tree fndecl, arglist, type;
5141 {
5142 tree arg;
5143
5144 if (!arglist || TREE_CHAIN (arglist))
5145 return NULL_TREE;
5146
5147 arg = TREE_VALUE (arglist);
5148 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
5149 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
5150 return NULL_TREE;
5151
5152 /* Evaluate cabs of a constant at compile-time. */
5153 if (flag_unsafe_math_optimizations
5154 && TREE_CODE (arg) == COMPLEX_CST
5155 && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
5156 && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
5157 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
5158 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
5159 {
5160 REAL_VALUE_TYPE r, i;
5161
5162 r = TREE_REAL_CST (TREE_REALPART (arg));
5163 i = TREE_REAL_CST (TREE_IMAGPART (arg));
5164
5165 real_arithmetic (&r, MULT_EXPR, &r, &r);
5166 real_arithmetic (&i, MULT_EXPR, &i, &i);
5167 real_arithmetic (&r, PLUS_EXPR, &r, &i);
5168 if (real_sqrt (&r, TYPE_MODE (type), &r)
5169 || ! flag_trapping_math)
5170 return build_real (type, r);
5171 }
5172
5173 /* If either part is zero, cabs is fabs of the other. */
5174 if (TREE_CODE (arg) == COMPLEX_EXPR
5175 && real_zerop (TREE_OPERAND (arg, 0)))
5176 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
5177 if (TREE_CODE (arg) == COMPLEX_EXPR
5178 && real_zerop (TREE_OPERAND (arg, 1)))
5179 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
5180
5181 if (flag_unsafe_math_optimizations)
5182 {
5183 enum built_in_function fcode;
5184 tree sqrtfn;
5185
5186 fcode = DECL_FUNCTION_CODE (fndecl);
5187 if (fcode == BUILT_IN_CABS)
5188 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5189 else if (fcode == BUILT_IN_CABSF)
5190 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5191 else if (fcode == BUILT_IN_CABSL)
5192 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5193 else
5194 sqrtfn = NULL_TREE;
5195
5196 if (sqrtfn != NULL_TREE)
5197 {
5198 tree rpart, ipart, result, arglist;
5199
5200 rpart = fold (build1 (REALPART_EXPR, type, arg));
5201 ipart = fold (build1 (IMAGPART_EXPR, type, arg));
5202
5203 rpart = save_expr (rpart);
5204 ipart = save_expr (ipart);
5205
5206 result = fold (build (PLUS_EXPR, type,
5207 fold (build (MULT_EXPR, type,
5208 rpart, rpart)),
5209 fold (build (MULT_EXPR, type,
5210 ipart, ipart))));
5211
5212 arglist = build_tree_list (NULL_TREE, result);
5213 return build_function_call_expr (sqrtfn, arglist);
5214 }
5215 }
5216
5217 return NULL_TREE;
5218 }
5219
5220 /* Used by constant folding to eliminate some builtin calls early. EXP is
5221 the CALL_EXPR of a call to a builtin function. */
5222
5223 tree
5224 fold_builtin (exp)
5225 tree exp;
5226 {
5227 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5228 tree arglist = TREE_OPERAND (exp, 1);
5229 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5230
5231 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5232 return 0;
5233
5234 switch (DECL_FUNCTION_CODE (fndecl))
5235 {
5236 case BUILT_IN_CONSTANT_P:
5237 return fold_builtin_constant_p (arglist);
5238
5239 case BUILT_IN_CLASSIFY_TYPE:
5240 return fold_builtin_classify_type (arglist);
5241
5242 case BUILT_IN_STRLEN:
5243 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5244 {
5245 tree len = c_strlen (TREE_VALUE (arglist));
5246 if (len)
5247 {
5248 /* Convert from the internal "sizetype" type to "size_t". */
5249 if (size_type_node)
5250 len = convert (size_type_node, len);
5251 return len;
5252 }
5253 }
5254 break;
5255
5256 case BUILT_IN_FABS:
5257 case BUILT_IN_FABSF:
5258 case BUILT_IN_FABSL:
5259 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5260 return fold (build1 (ABS_EXPR, type, TREE_VALUE (arglist)));
5261 break;
5262
5263 case BUILT_IN_CABS:
5264 case BUILT_IN_CABSF:
5265 case BUILT_IN_CABSL:
5266 return fold_builtin_cabs (fndecl, arglist, type);
5267
5268 case BUILT_IN_SQRT:
5269 case BUILT_IN_SQRTF:
5270 case BUILT_IN_SQRTL:
5271 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5272 {
5273 enum built_in_function fcode;
5274 tree arg = TREE_VALUE (arglist);
5275
5276 /* Optimize sqrt of constant value. */
5277 if (TREE_CODE (arg) == REAL_CST
5278 && ! TREE_CONSTANT_OVERFLOW (arg))
5279 {
5280 REAL_VALUE_TYPE r, x;
5281
5282 x = TREE_REAL_CST (arg);
5283 if (real_sqrt (&r, TYPE_MODE (type), &x)
5284 || (!flag_trapping_math && !flag_errno_math))
5285 return build_real (type, r);
5286 }
5287
5288 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5289 fcode = builtin_mathfn_code (arg);
5290 if (flag_unsafe_math_optimizations
5291 && (fcode == BUILT_IN_EXP
5292 || fcode == BUILT_IN_EXPF
5293 || fcode == BUILT_IN_EXPL))
5294 {
5295 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5296 arg = fold (build (MULT_EXPR, type,
5297 TREE_VALUE (TREE_OPERAND (arg, 1)),
5298 build_real (type, dconsthalf)));
5299 arglist = build_tree_list (NULL_TREE, arg);
5300 return build_function_call_expr (expfn, arglist);
5301 }
5302
5303 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5304 if (flag_unsafe_math_optimizations
5305 && (fcode == BUILT_IN_POW
5306 || fcode == BUILT_IN_POWF
5307 || fcode == BUILT_IN_POWL))
5308 {
5309 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5310 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5311 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5312 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5313 build_real (type, dconsthalf)));
5314 arglist = tree_cons (NULL_TREE, arg0,
5315 build_tree_list (NULL_TREE, narg1));
5316 return build_function_call_expr (powfn, arglist);
5317 }
5318 }
5319 break;
5320
5321 case BUILT_IN_SIN:
5322 case BUILT_IN_SINF:
5323 case BUILT_IN_SINL:
5324 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5325 {
5326 tree arg = TREE_VALUE (arglist);
5327
5328 /* Optimize sin(0.0) = 0.0. */
5329 if (real_zerop (arg))
5330 return arg;
5331 }
5332 break;
5333
5334 case BUILT_IN_COS:
5335 case BUILT_IN_COSF:
5336 case BUILT_IN_COSL:
5337 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5338 {
5339 tree arg = TREE_VALUE (arglist);
5340
5341 /* Optimize cos(0.0) = 1.0. */
5342 if (real_zerop (arg))
5343 return build_real (type, dconst1);
5344
5345 /* Optimize cos(-x) into cos(x). */
5346 if (TREE_CODE (arg) == NEGATE_EXPR)
5347 {
5348 tree arglist = build_tree_list (NULL_TREE,
5349 TREE_OPERAND (arg, 0));
5350 return build_function_call_expr (fndecl, arglist);
5351 }
5352 }
5353 break;
5354
5355 case BUILT_IN_EXP:
5356 case BUILT_IN_EXPF:
5357 case BUILT_IN_EXPL:
5358 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5359 {
5360 enum built_in_function fcode;
5361 tree arg = TREE_VALUE (arglist);
5362
5363 /* Optimize exp(0.0) = 1.0. */
5364 if (real_zerop (arg))
5365 return build_real (type, dconst1);
5366
5367 /* Optimize exp(1.0) = e. */
5368 if (real_onep (arg))
5369 {
5370 REAL_VALUE_TYPE cst;
5371
5372 if (! builtin_dconsts_init)
5373 init_builtin_dconsts ();
5374 real_convert (&cst, TYPE_MODE (type), &dconste);
5375 return build_real (type, cst);
5376 }
5377
5378 /* Attempt to evaluate exp at compile-time. */
5379 if (flag_unsafe_math_optimizations
5380 && TREE_CODE (arg) == REAL_CST
5381 && ! TREE_CONSTANT_OVERFLOW (arg))
5382 {
5383 REAL_VALUE_TYPE cint;
5384 REAL_VALUE_TYPE c;
5385 HOST_WIDE_INT n;
5386
5387 c = TREE_REAL_CST (arg);
5388 n = real_to_integer (&c);
5389 real_from_integer (&cint, VOIDmode, n,
5390 n < 0 ? -1 : 0, 0);
5391 if (real_identical (&c, &cint))
5392 {
5393 REAL_VALUE_TYPE x;
5394
5395 if (! builtin_dconsts_init)
5396 init_builtin_dconsts ();
5397 real_powi (&x, TYPE_MODE (type), &dconste, n);
5398 return build_real (type, x);
5399 }
5400 }
5401
5402 /* Optimize exp(log(x)) = x. */
5403 fcode = builtin_mathfn_code (arg);
5404 if (flag_unsafe_math_optimizations
5405 && (fcode == BUILT_IN_LOG
5406 || fcode == BUILT_IN_LOGF
5407 || fcode == BUILT_IN_LOGL))
5408 return TREE_VALUE (TREE_OPERAND (arg, 1));
5409 }
5410 break;
5411
5412 case BUILT_IN_LOG:
5413 case BUILT_IN_LOGF:
5414 case BUILT_IN_LOGL:
5415 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5416 {
5417 enum built_in_function fcode;
5418 tree arg = TREE_VALUE (arglist);
5419
5420 /* Optimize log(1.0) = 0.0. */
5421 if (real_onep (arg))
5422 return build_real (type, dconst0);
5423
5424 /* Optimize log(exp(x)) = x. */
5425 fcode = builtin_mathfn_code (arg);
5426 if (flag_unsafe_math_optimizations
5427 && (fcode == BUILT_IN_EXP
5428 || fcode == BUILT_IN_EXPF
5429 || fcode == BUILT_IN_EXPL))
5430 return TREE_VALUE (TREE_OPERAND (arg, 1));
5431
5432 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5433 if (flag_unsafe_math_optimizations
5434 && (fcode == BUILT_IN_SQRT
5435 || fcode == BUILT_IN_SQRTF
5436 || fcode == BUILT_IN_SQRTL))
5437 {
5438 tree logfn = build_function_call_expr (fndecl,
5439 TREE_OPERAND (arg, 1));
5440 return fold (build (MULT_EXPR, type, logfn,
5441 build_real (type, dconsthalf)));
5442 }
5443
5444 /* Optimize log(pow(x,y)) = y*log(x). */
5445 if (flag_unsafe_math_optimizations
5446 && (fcode == BUILT_IN_POW
5447 || fcode == BUILT_IN_POWF
5448 || fcode == BUILT_IN_POWL))
5449 {
5450 tree arg0, arg1, logfn;
5451
5452 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5453 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5454 arglist = build_tree_list (NULL_TREE, arg0);
5455 logfn = build_function_call_expr (fndecl, arglist);
5456 return fold (build (MULT_EXPR, type, arg1, logfn));
5457 }
5458 }
5459 break;
5460
5461 case BUILT_IN_TAN:
5462 case BUILT_IN_TANF:
5463 case BUILT_IN_TANL:
5464 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5465 {
5466 enum built_in_function fcode;
5467 tree arg = TREE_VALUE (arglist);
5468
5469 /* Optimize tan(0.0) = 0.0. */
5470 if (real_zerop (arg))
5471 return arg;
5472
5473 /* Optimize tan(atan(x)) = x. */
5474 fcode = builtin_mathfn_code (arg);
5475 if (flag_unsafe_math_optimizations
5476 && (fcode == BUILT_IN_ATAN
5477 || fcode == BUILT_IN_ATANF
5478 || fcode == BUILT_IN_ATANL))
5479 return TREE_VALUE (TREE_OPERAND (arg, 1));
5480 }
5481 break;
5482
5483 case BUILT_IN_ATAN:
5484 case BUILT_IN_ATANF:
5485 case BUILT_IN_ATANL:
5486 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5487 {
5488 tree arg = TREE_VALUE (arglist);
5489
5490 /* Optimize atan(0.0) = 0.0. */
5491 if (real_zerop (arg))
5492 return arg;
5493
5494 /* Optimize atan(1.0) = pi/4. */
5495 if (real_onep (arg))
5496 {
5497 REAL_VALUE_TYPE cst;
5498
5499 if (! builtin_dconsts_init)
5500 init_builtin_dconsts ();
5501 real_convert (&cst, TYPE_MODE (type), &dconstpi);
5502 cst.exp -= 2;
5503 return build_real (type, cst);
5504 }
5505 }
5506 break;
5507
5508 case BUILT_IN_POW:
5509 case BUILT_IN_POWF:
5510 case BUILT_IN_POWL:
5511 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5512 {
5513 enum built_in_function fcode;
5514 tree arg0 = TREE_VALUE (arglist);
5515 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5516
5517 /* Optimize pow(1.0,y) = 1.0. */
5518 if (real_onep (arg0))
5519 return omit_one_operand (type, build_real (type, dconst1), arg1);
5520
5521 if (TREE_CODE (arg1) == REAL_CST
5522 && ! TREE_CONSTANT_OVERFLOW (arg1))
5523 {
5524 REAL_VALUE_TYPE c;
5525 c = TREE_REAL_CST (arg1);
5526
5527 /* Optimize pow(x,0.0) = 1.0. */
5528 if (REAL_VALUES_EQUAL (c, dconst0))
5529 return omit_one_operand (type, build_real (type, dconst1),
5530 arg0);
5531
5532 /* Optimize pow(x,1.0) = x. */
5533 if (REAL_VALUES_EQUAL (c, dconst1))
5534 return arg0;
5535
5536 /* Optimize pow(x,-1.0) = 1.0/x. */
5537 if (REAL_VALUES_EQUAL (c, dconstm1))
5538 return fold (build (RDIV_EXPR, type,
5539 build_real (type, dconst1),
5540 arg0));
5541
5542 /* Optimize pow(x,2.0) = x*x. */
5543 if (REAL_VALUES_EQUAL (c, dconst2)
5544 && (*lang_hooks.decls.global_bindings_p) () == 0
5545 && ! CONTAINS_PLACEHOLDER_P (arg0))
5546 {
5547 arg0 = save_expr (arg0);
5548 return fold (build (MULT_EXPR, type, arg0, arg0));
5549 }
5550
5551 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5552 if (flag_unsafe_math_optimizations
5553 && REAL_VALUES_EQUAL (c, dconstm2)
5554 && (*lang_hooks.decls.global_bindings_p) () == 0
5555 && ! CONTAINS_PLACEHOLDER_P (arg0))
5556 {
5557 arg0 = save_expr (arg0);
5558 return fold (build (RDIV_EXPR, type,
5559 build_real (type, dconst1),
5560 fold (build (MULT_EXPR, type,
5561 arg0, arg0))));
5562 }
5563
5564 /* Optimize pow(x,0.5) = sqrt(x). */
5565 if (flag_unsafe_math_optimizations
5566 && REAL_VALUES_EQUAL (c, dconsthalf))
5567 {
5568 tree sqrtfn;
5569
5570 fcode = DECL_FUNCTION_CODE (fndecl);
5571 if (fcode == BUILT_IN_POW)
5572 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5573 else if (fcode == BUILT_IN_POWF)
5574 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5575 else if (fcode == BUILT_IN_POWL)
5576 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5577 else
5578 sqrtfn = NULL_TREE;
5579
5580 if (sqrtfn != NULL_TREE)
5581 {
5582 tree arglist = build_tree_list (NULL_TREE, arg0);
5583 return build_function_call_expr (sqrtfn, arglist);
5584 }
5585 }
5586
5587 /* Attempt to evaluate pow at compile-time. */
5588 if (TREE_CODE (arg0) == REAL_CST
5589 && ! TREE_CONSTANT_OVERFLOW (arg0))
5590 {
5591 REAL_VALUE_TYPE cint;
5592 HOST_WIDE_INT n;
5593
5594 n = real_to_integer (&c);
5595 real_from_integer (&cint, VOIDmode, n,
5596 n < 0 ? -1 : 0, 0);
5597 if (real_identical (&c, &cint))
5598 {
5599 REAL_VALUE_TYPE x;
5600 bool inexact;
5601
5602 x = TREE_REAL_CST (arg0);
5603 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
5604 if (flag_unsafe_math_optimizations || !inexact)
5605 return build_real (type, x);
5606 }
5607 }
5608 }
5609
5610 /* Optimize pow(exp(x),y) = exp(x*y). */
5611 fcode = builtin_mathfn_code (arg0);
5612 if (flag_unsafe_math_optimizations
5613 && (fcode == BUILT_IN_EXP
5614 || fcode == BUILT_IN_EXPF
5615 || fcode == BUILT_IN_EXPL))
5616 {
5617 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5618 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5619 arg = fold (build (MULT_EXPR, type, arg, arg1));
5620 arglist = build_tree_list (NULL_TREE, arg);
5621 return build_function_call_expr (expfn, arglist);
5622 }
5623
5624 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5625 if (flag_unsafe_math_optimizations
5626 && (fcode == BUILT_IN_SQRT
5627 || fcode == BUILT_IN_SQRTF
5628 || fcode == BUILT_IN_SQRTL))
5629 {
5630 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5631 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5632 build_real (type, dconsthalf)));
5633
5634 arglist = tree_cons (NULL_TREE, narg0,
5635 build_tree_list (NULL_TREE, narg1));
5636 return build_function_call_expr (fndecl, arglist);
5637 }
5638
5639 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5640 if (flag_unsafe_math_optimizations
5641 && (fcode == BUILT_IN_POW
5642 || fcode == BUILT_IN_POWF
5643 || fcode == BUILT_IN_POWL))
5644 {
5645 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5646 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
5647 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
5648 arglist = tree_cons (NULL_TREE, arg00,
5649 build_tree_list (NULL_TREE, narg1));
5650 return build_function_call_expr (fndecl, arglist);
5651 }
5652 }
5653 break;
5654
5655 case BUILT_IN_INF:
5656 case BUILT_IN_INFF:
5657 case BUILT_IN_INFL:
5658 return fold_builtin_inf (type, true);
5659
5660 case BUILT_IN_HUGE_VAL:
5661 case BUILT_IN_HUGE_VALF:
5662 case BUILT_IN_HUGE_VALL:
5663 return fold_builtin_inf (type, false);
5664
5665 case BUILT_IN_NAN:
5666 case BUILT_IN_NANF:
5667 case BUILT_IN_NANL:
5668 return fold_builtin_nan (arglist, type, true);
5669
5670 case BUILT_IN_NANS:
5671 case BUILT_IN_NANSF:
5672 case BUILT_IN_NANSL:
5673 return fold_builtin_nan (arglist, type, false);
5674
5675 case BUILT_IN_FLOOR:
5676 case BUILT_IN_FLOORF:
5677 case BUILT_IN_FLOORL:
5678 case BUILT_IN_CEIL:
5679 case BUILT_IN_CEILF:
5680 case BUILT_IN_CEILL:
5681 case BUILT_IN_TRUNC:
5682 case BUILT_IN_TRUNCF:
5683 case BUILT_IN_TRUNCL:
5684 case BUILT_IN_ROUND:
5685 case BUILT_IN_ROUNDF:
5686 case BUILT_IN_ROUNDL:
5687 case BUILT_IN_NEARBYINT:
5688 case BUILT_IN_NEARBYINTF:
5689 case BUILT_IN_NEARBYINTL:
5690 return fold_trunc_transparent_mathfn (exp);
5691
5692 default:
5693 break;
5694 }
5695
5696 return 0;
5697 }
5698
5699 /* Conveniently construct a function call expression. */
5700
5701 tree
5702 build_function_call_expr (fn, arglist)
5703 tree fn, arglist;
5704 {
5705 tree call_expr;
5706
5707 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5708 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
5709 call_expr, arglist);
5710 TREE_SIDE_EFFECTS (call_expr) = 1;
5711 return fold (call_expr);
5712 }
5713
5714 /* This function validates the types of a function call argument list
5715 represented as a tree chain of parameters against a specified list
5716 of tree_codes. If the last specifier is a 0, that represents an
5717 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5718
5719 static int
5720 validate_arglist (tree arglist, ...)
5721 {
5722 enum tree_code code;
5723 int res = 0;
5724 va_list ap;
5725
5726 va_start (ap, arglist);
5727
5728 do
5729 {
5730 code = va_arg (ap, enum tree_code);
5731 switch (code)
5732 {
5733 case 0:
5734 /* This signifies an ellipses, any further arguments are all ok. */
5735 res = 1;
5736 goto end;
5737 case VOID_TYPE:
5738 /* This signifies an endlink, if no arguments remain, return
5739 true, otherwise return false. */
5740 res = arglist == 0;
5741 goto end;
5742 default:
5743 /* If no parameters remain or the parameter's code does not
5744 match the specified code, return false. Otherwise continue
5745 checking any remaining arguments. */
5746 if (arglist == 0
5747 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
5748 goto end;
5749 break;
5750 }
5751 arglist = TREE_CHAIN (arglist);
5752 }
5753 while (1);
5754
5755 /* We need gotos here since we can only have one VA_CLOSE in a
5756 function. */
5757 end: ;
5758 va_end (ap);
5759
5760 return res;
5761 }
5762
5763 /* Default version of target-specific builtin setup that does nothing. */
5764
5765 void
5766 default_init_builtins ()
5767 {
5768 }
5769
5770 /* Default target-specific builtin expander that does nothing. */
5771
5772 rtx
5773 default_expand_builtin (exp, target, subtarget, mode, ignore)
5774 tree exp ATTRIBUTE_UNUSED;
5775 rtx target ATTRIBUTE_UNUSED;
5776 rtx subtarget ATTRIBUTE_UNUSED;
5777 enum machine_mode mode ATTRIBUTE_UNUSED;
5778 int ignore ATTRIBUTE_UNUSED;
5779 {
5780 return NULL_RTX;
5781 }
5782
5783 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
5784
5785 void
5786 purge_builtin_constant_p ()
5787 {
5788 rtx insn, set, arg, new, note;
5789
5790 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5791 if (INSN_P (insn)
5792 && (set = single_set (insn)) != NULL_RTX
5793 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
5794 || (GET_CODE (arg) == SUBREG
5795 && (GET_CODE (arg = SUBREG_REG (arg))
5796 == CONSTANT_P_RTX))))
5797 {
5798 arg = XEXP (arg, 0);
5799 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
5800 validate_change (insn, &SET_SRC (set), new, 0);
5801
5802 /* Remove the REG_EQUAL note from the insn. */
5803 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
5804 remove_note (insn, note);
5805 }
5806 }
5807
5808 /* Returns true is EXP represents data that would potentially reside
5809 in a readonly section. */
5810
5811 static bool
5812 readonly_data_expr (tree exp)
5813 {
5814 STRIP_NOPS (exp);
5815
5816 if (TREE_CODE (exp) == ADDR_EXPR)
5817 return decl_readonly_section (TREE_OPERAND (exp, 0), 0);
5818 else
5819 return false;
5820 }