builtins.c (expand_builtin): Remove.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
50
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) STRINGX(X),
68 const char *const built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 static int get_pointer_alignment PARAMS ((tree, unsigned int));
83 static tree c_strlen PARAMS ((tree));
84 static const char *c_getstr PARAMS ((tree));
85 static rtx c_readstr PARAMS ((const char *,
86 enum machine_mode));
87 static int target_char_cast PARAMS ((tree, char *));
88 static rtx get_memory_rtx PARAMS ((tree));
89 static int apply_args_size PARAMS ((void));
90 static int apply_result_size PARAMS ((void));
91 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
92 static rtx result_vector PARAMS ((int, rtx));
93 #endif
94 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
95 static void expand_builtin_prefetch PARAMS ((tree));
96 static rtx expand_builtin_apply_args PARAMS ((void));
97 static rtx expand_builtin_apply_args_1 PARAMS ((void));
98 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
99 static void expand_builtin_return PARAMS ((rtx));
100 static enum type_class type_to_class PARAMS ((tree));
101 static rtx expand_builtin_classify_type PARAMS ((tree));
102 static void expand_errno_check PARAMS ((tree, rtx));
103 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
104 static rtx expand_builtin_mathfn_2 PARAMS ((tree, rtx, rtx));
105 static rtx expand_builtin_constant_p PARAMS ((tree));
106 static rtx expand_builtin_args_info PARAMS ((tree));
107 static rtx expand_builtin_next_arg PARAMS ((tree));
108 static rtx expand_builtin_va_start PARAMS ((tree));
109 static rtx expand_builtin_va_end PARAMS ((tree));
110 static rtx expand_builtin_va_copy PARAMS ((tree));
111 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
112 enum machine_mode));
113 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
114 enum machine_mode));
115 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
116 enum machine_mode));
117 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
118 enum machine_mode));
119 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
120 enum machine_mode));
121 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
122 enum machine_mode));
123 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
124 enum machine_mode));
125 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
126 enum machine_mode));
127 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
128 enum machine_mode, int));
129 static rtx expand_builtin_memmove PARAMS ((tree, rtx,
130 enum machine_mode));
131 static rtx expand_builtin_bcopy PARAMS ((tree));
132 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
133 enum machine_mode));
134 static rtx expand_builtin_stpcpy PARAMS ((tree, rtx,
135 enum machine_mode));
136 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
137 enum machine_mode));
138 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
139 enum machine_mode));
140 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static rtx expand_builtin_memset PARAMS ((tree, rtx,
145 enum machine_mode));
146 static rtx expand_builtin_bzero PARAMS ((tree));
147 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
148 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
149 enum machine_mode));
150 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
151 enum machine_mode));
152 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
153 enum machine_mode));
154 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
155 enum machine_mode));
156 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
157 static rtx expand_builtin_unop PARAMS ((enum machine_mode,
158 tree, rtx, rtx, optab));
159 static rtx expand_builtin_frame_address PARAMS ((tree));
160 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
161 static tree stabilize_va_list PARAMS ((tree, int));
162 static rtx expand_builtin_expect PARAMS ((tree, rtx));
163 static tree fold_builtin_constant_p PARAMS ((tree));
164 static tree fold_builtin_classify_type PARAMS ((tree));
165 static tree fold_builtin_inf PARAMS ((tree, int));
166 static tree fold_builtin_nan PARAMS ((tree, tree, int));
167 static int validate_arglist PARAMS ((tree, ...));
168 static tree fold_trunc_transparent_mathfn PARAMS ((tree));
169
170 /* Return the alignment in bits of EXP, a pointer valued expression.
171 But don't return more than MAX_ALIGN no matter what.
172 The alignment returned is, by default, the alignment of the thing that
173 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
174
175 Otherwise, look at the expression to see if we can do better, i.e., if the
176 expression is actually pointing at an object whose alignment is tighter. */
177
178 static int
179 get_pointer_alignment (exp, max_align)
180 tree exp;
181 unsigned int max_align;
182 {
183 unsigned int align, inner;
184
185 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
186 return 0;
187
188 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 align = MIN (align, max_align);
190
191 while (1)
192 {
193 switch (TREE_CODE (exp))
194 {
195 case NOP_EXPR:
196 case CONVERT_EXPR:
197 case NON_LVALUE_EXPR:
198 exp = TREE_OPERAND (exp, 0);
199 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
200 return align;
201
202 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
203 align = MIN (inner, max_align);
204 break;
205
206 case PLUS_EXPR:
207 /* If sum of pointer + int, restrict our maximum alignment to that
208 imposed by the integer. If not, we can't do any better than
209 ALIGN. */
210 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
211 return align;
212
213 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
214 & (max_align / BITS_PER_UNIT - 1))
215 != 0)
216 max_align >>= 1;
217
218 exp = TREE_OPERAND (exp, 0);
219 break;
220
221 case ADDR_EXPR:
222 /* See what we are pointing at and look at its alignment. */
223 exp = TREE_OPERAND (exp, 0);
224 if (TREE_CODE (exp) == FUNCTION_DECL)
225 align = FUNCTION_BOUNDARY;
226 else if (DECL_P (exp))
227 align = DECL_ALIGN (exp);
228 #ifdef CONSTANT_ALIGNMENT
229 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
230 align = CONSTANT_ALIGNMENT (exp, align);
231 #endif
232 return MIN (align, max_align);
233
234 default:
235 return align;
236 }
237 }
238 }
239
240 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
241 way, because it could contain a zero byte in the middle.
242 TREE_STRING_LENGTH is the size of the character array, not the string.
243
244 The value returned is of type `ssizetype'.
245
246 Unfortunately, string_constant can't access the values of const char
247 arrays with initializers, so neither can we do so here. */
248
249 static tree
250 c_strlen (src)
251 tree src;
252 {
253 tree offset_node;
254 HOST_WIDE_INT offset;
255 int max;
256 const char *ptr;
257
258 src = string_constant (src, &offset_node);
259 if (src == 0)
260 return 0;
261
262 max = TREE_STRING_LENGTH (src) - 1;
263 ptr = TREE_STRING_POINTER (src);
264
265 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
266 {
267 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
268 compute the offset to the following null if we don't know where to
269 start searching for it. */
270 int i;
271
272 for (i = 0; i < max; i++)
273 if (ptr[i] == 0)
274 return 0;
275
276 /* We don't know the starting offset, but we do know that the string
277 has no internal zero bytes. We can assume that the offset falls
278 within the bounds of the string; otherwise, the programmer deserves
279 what he gets. Subtract the offset from the length of the string,
280 and return that. This would perhaps not be valid if we were dealing
281 with named arrays in addition to literal string constants. */
282
283 return size_diffop (size_int (max), offset_node);
284 }
285
286 /* We have a known offset into the string. Start searching there for
287 a null character if we can represent it as a single HOST_WIDE_INT. */
288 if (offset_node == 0)
289 offset = 0;
290 else if (! host_integerp (offset_node, 0))
291 offset = -1;
292 else
293 offset = tree_low_cst (offset_node, 0);
294
295 /* If the offset is known to be out of bounds, warn, and call strlen at
296 runtime. */
297 if (offset < 0 || offset > max)
298 {
299 warning ("offset outside bounds of constant string");
300 return 0;
301 }
302
303 /* Use strlen to search for the first zero byte. Since any strings
304 constructed with build_string will have nulls appended, we win even
305 if we get handed something like (char[4])"abcd".
306
307 Since OFFSET is our starting index into the string, no further
308 calculation is needed. */
309 return ssize_int (strlen (ptr + offset));
310 }
311
312 /* Return a char pointer for a C string if it is a string constant
313 or sum of string constant and integer constant. */
314
315 static const char *
316 c_getstr (src)
317 tree src;
318 {
319 tree offset_node;
320
321 src = string_constant (src, &offset_node);
322 if (src == 0)
323 return 0;
324
325 if (offset_node == 0)
326 return TREE_STRING_POINTER (src);
327 else if (!host_integerp (offset_node, 1)
328 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
329 return 0;
330
331 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
332 }
333
334 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
335 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
336
337 static rtx
338 c_readstr (str, mode)
339 const char *str;
340 enum machine_mode mode;
341 {
342 HOST_WIDE_INT c[2];
343 HOST_WIDE_INT ch;
344 unsigned int i, j;
345
346 if (GET_MODE_CLASS (mode) != MODE_INT)
347 abort ();
348 c[0] = 0;
349 c[1] = 0;
350 ch = 1;
351 for (i = 0; i < GET_MODE_SIZE (mode); i++)
352 {
353 j = i;
354 if (WORDS_BIG_ENDIAN)
355 j = GET_MODE_SIZE (mode) - i - 1;
356 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
357 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
358 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
359 j *= BITS_PER_UNIT;
360 if (j > 2 * HOST_BITS_PER_WIDE_INT)
361 abort ();
362 if (ch)
363 ch = (unsigned char) str[i];
364 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
365 }
366 return immed_double_const (c[0], c[1], mode);
367 }
368
369 /* Cast a target constant CST to target CHAR and if that value fits into
370 host char type, return zero and put that value into variable pointed by
371 P. */
372
373 static int
374 target_char_cast (cst, p)
375 tree cst;
376 char *p;
377 {
378 unsigned HOST_WIDE_INT val, hostval;
379
380 if (!host_integerp (cst, 1)
381 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
382 return 1;
383
384 val = tree_low_cst (cst, 1);
385 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
386 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
387
388 hostval = val;
389 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
390 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
391
392 if (val != hostval)
393 return 1;
394
395 *p = hostval;
396 return 0;
397 }
398
399 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
400 times to get the address of either a higher stack frame, or a return
401 address located within it (depending on FNDECL_CODE). */
402
403 rtx
404 expand_builtin_return_addr (fndecl_code, count, tem)
405 enum built_in_function fndecl_code;
406 int count;
407 rtx tem;
408 {
409 int i;
410
411 /* Some machines need special handling before we can access
412 arbitrary frames. For example, on the sparc, we must first flush
413 all register windows to the stack. */
414 #ifdef SETUP_FRAME_ADDRESSES
415 if (count > 0)
416 SETUP_FRAME_ADDRESSES ();
417 #endif
418
419 /* On the sparc, the return address is not in the frame, it is in a
420 register. There is no way to access it off of the current frame
421 pointer, but it can be accessed off the previous frame pointer by
422 reading the value from the register window save area. */
423 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
424 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
425 count--;
426 #endif
427
428 /* Scan back COUNT frames to the specified frame. */
429 for (i = 0; i < count; i++)
430 {
431 /* Assume the dynamic chain pointer is in the word that the
432 frame address points to, unless otherwise specified. */
433 #ifdef DYNAMIC_CHAIN_ADDRESS
434 tem = DYNAMIC_CHAIN_ADDRESS (tem);
435 #endif
436 tem = memory_address (Pmode, tem);
437 tem = gen_rtx_MEM (Pmode, tem);
438 set_mem_alias_set (tem, get_frame_alias_set ());
439 tem = copy_to_reg (tem);
440 }
441
442 /* For __builtin_frame_address, return what we've got. */
443 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
444 return tem;
445
446 /* For __builtin_return_address, Get the return address from that
447 frame. */
448 #ifdef RETURN_ADDR_RTX
449 tem = RETURN_ADDR_RTX (count, tem);
450 #else
451 tem = memory_address (Pmode,
452 plus_constant (tem, GET_MODE_SIZE (Pmode)));
453 tem = gen_rtx_MEM (Pmode, tem);
454 set_mem_alias_set (tem, get_frame_alias_set ());
455 #endif
456 return tem;
457 }
458
459 /* Alias set used for setjmp buffer. */
460 static HOST_WIDE_INT setjmp_alias_set = -1;
461
462 /* Construct the leading half of a __builtin_setjmp call. Control will
463 return to RECEIVER_LABEL. This is used directly by sjlj exception
464 handling code. */
465
466 void
467 expand_builtin_setjmp_setup (buf_addr, receiver_label)
468 rtx buf_addr;
469 rtx receiver_label;
470 {
471 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
472 rtx stack_save;
473 rtx mem;
474
475 if (setjmp_alias_set == -1)
476 setjmp_alias_set = new_alias_set ();
477
478 #ifdef POINTERS_EXTEND_UNSIGNED
479 if (GET_MODE (buf_addr) != Pmode)
480 buf_addr = convert_memory_address (Pmode, buf_addr);
481 #endif
482
483 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
484
485 emit_queue ();
486
487 /* We store the frame pointer and the address of receiver_label in
488 the buffer and use the rest of it for the stack save area, which
489 is machine-dependent. */
490
491 #ifndef BUILTIN_SETJMP_FRAME_VALUE
492 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
493 #endif
494
495 mem = gen_rtx_MEM (Pmode, buf_addr);
496 set_mem_alias_set (mem, setjmp_alias_set);
497 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
498
499 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
500 set_mem_alias_set (mem, setjmp_alias_set);
501
502 emit_move_insn (validize_mem (mem),
503 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
504
505 stack_save = gen_rtx_MEM (sa_mode,
506 plus_constant (buf_addr,
507 2 * GET_MODE_SIZE (Pmode)));
508 set_mem_alias_set (stack_save, setjmp_alias_set);
509 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
510
511 /* If there is further processing to do, do it. */
512 #ifdef HAVE_builtin_setjmp_setup
513 if (HAVE_builtin_setjmp_setup)
514 emit_insn (gen_builtin_setjmp_setup (buf_addr));
515 #endif
516
517 /* Tell optimize_save_area_alloca that extra work is going to
518 need to go on during alloca. */
519 current_function_calls_setjmp = 1;
520
521 /* Set this so all the registers get saved in our frame; we need to be
522 able to copy the saved values for any registers from frames we unwind. */
523 current_function_has_nonlocal_label = 1;
524 }
525
526 /* Construct the trailing part of a __builtin_setjmp call.
527 This is used directly by sjlj exception handling code. */
528
529 void
530 expand_builtin_setjmp_receiver (receiver_label)
531 rtx receiver_label ATTRIBUTE_UNUSED;
532 {
533 /* Clobber the FP when we get here, so we have to make sure it's
534 marked as used by this function. */
535 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
536
537 /* Mark the static chain as clobbered here so life information
538 doesn't get messed up for it. */
539 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
540
541 /* Now put in the code to restore the frame pointer, and argument
542 pointer, if needed. The code below is from expand_end_bindings
543 in stmt.c; see detailed documentation there. */
544 #ifdef HAVE_nonlocal_goto
545 if (! HAVE_nonlocal_goto)
546 #endif
547 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
548
549 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
550 if (fixed_regs[ARG_POINTER_REGNUM])
551 {
552 #ifdef ELIMINABLE_REGS
553 size_t i;
554 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
555
556 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
557 if (elim_regs[i].from == ARG_POINTER_REGNUM
558 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
559 break;
560
561 if (i == ARRAY_SIZE (elim_regs))
562 #endif
563 {
564 /* Now restore our arg pointer from the address at which it
565 was saved in our stack frame. */
566 emit_move_insn (virtual_incoming_args_rtx,
567 copy_to_reg (get_arg_pointer_save_area (cfun)));
568 }
569 }
570 #endif
571
572 #ifdef HAVE_builtin_setjmp_receiver
573 if (HAVE_builtin_setjmp_receiver)
574 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
575 else
576 #endif
577 #ifdef HAVE_nonlocal_goto_receiver
578 if (HAVE_nonlocal_goto_receiver)
579 emit_insn (gen_nonlocal_goto_receiver ());
580 else
581 #endif
582 { /* Nothing */ }
583
584 /* @@@ This is a kludge. Not all machine descriptions define a blockage
585 insn, but we must not allow the code we just generated to be reordered
586 by scheduling. Specifically, the update of the frame pointer must
587 happen immediately, not later. So emit an ASM_INPUT to act as blockage
588 insn. */
589 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
590 }
591
592 /* __builtin_setjmp is passed a pointer to an array of five words (not
593 all will be used on all machines). It operates similarly to the C
594 library function of the same name, but is more efficient. Much of
595 the code below (and for longjmp) is copied from the handling of
596 non-local gotos.
597
598 NOTE: This is intended for use by GNAT and the exception handling
599 scheme in the compiler and will only work in the method used by
600 them. */
601
602 static rtx
603 expand_builtin_setjmp (arglist, target)
604 tree arglist;
605 rtx target;
606 {
607 rtx buf_addr, next_lab, cont_lab;
608
609 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
610 return NULL_RTX;
611
612 if (target == 0 || GET_CODE (target) != REG
613 || REGNO (target) < FIRST_PSEUDO_REGISTER)
614 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
615
616 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
617
618 next_lab = gen_label_rtx ();
619 cont_lab = gen_label_rtx ();
620
621 expand_builtin_setjmp_setup (buf_addr, next_lab);
622
623 /* Set TARGET to zero and branch to the continue label. */
624 emit_move_insn (target, const0_rtx);
625 emit_jump_insn (gen_jump (cont_lab));
626 emit_barrier ();
627 emit_label (next_lab);
628
629 expand_builtin_setjmp_receiver (next_lab);
630
631 /* Set TARGET to one. */
632 emit_move_insn (target, const1_rtx);
633 emit_label (cont_lab);
634
635 /* Tell flow about the strange goings on. Putting `next_lab' on
636 `nonlocal_goto_handler_labels' to indicates that function
637 calls may traverse the arc back to this label. */
638
639 current_function_has_nonlocal_label = 1;
640 nonlocal_goto_handler_labels
641 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
642
643 return target;
644 }
645
646 /* __builtin_longjmp is passed a pointer to an array of five words (not
647 all will be used on all machines). It operates similarly to the C
648 library function of the same name, but is more efficient. Much of
649 the code below is copied from the handling of non-local gotos.
650
651 NOTE: This is intended for use by GNAT and the exception handling
652 scheme in the compiler and will only work in the method used by
653 them. */
654
655 void
656 expand_builtin_longjmp (buf_addr, value)
657 rtx buf_addr, value;
658 {
659 rtx fp, lab, stack, insn, last;
660 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
661
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
664
665 #ifdef POINTERS_EXTEND_UNSIGNED
666 if (GET_MODE (buf_addr) != Pmode)
667 buf_addr = convert_memory_address (Pmode, buf_addr);
668 #endif
669
670 buf_addr = force_reg (Pmode, buf_addr);
671
672 /* We used to store value in static_chain_rtx, but that fails if pointers
673 are smaller than integers. We instead require that the user must pass
674 a second argument of 1, because that is what builtin_setjmp will
675 return. This also makes EH slightly more efficient, since we are no
676 longer copying around a value that we don't care about. */
677 if (value != const1_rtx)
678 abort ();
679
680 current_function_calls_longjmp = 1;
681
682 last = get_last_insn ();
683 #ifdef HAVE_builtin_longjmp
684 if (HAVE_builtin_longjmp)
685 emit_insn (gen_builtin_longjmp (buf_addr));
686 else
687 #endif
688 {
689 fp = gen_rtx_MEM (Pmode, buf_addr);
690 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
691 GET_MODE_SIZE (Pmode)));
692
693 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
694 2 * GET_MODE_SIZE (Pmode)));
695 set_mem_alias_set (fp, setjmp_alias_set);
696 set_mem_alias_set (lab, setjmp_alias_set);
697 set_mem_alias_set (stack, setjmp_alias_set);
698
699 /* Pick up FP, label, and SP from the block and jump. This code is
700 from expand_goto in stmt.c; see there for detailed comments. */
701 #if HAVE_nonlocal_goto
702 if (HAVE_nonlocal_goto)
703 /* We have to pass a value to the nonlocal_goto pattern that will
704 get copied into the static_chain pointer, but it does not matter
705 what that value is, because builtin_setjmp does not use it. */
706 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
707 else
708 #endif
709 {
710 lab = copy_to_reg (lab);
711
712 emit_move_insn (hard_frame_pointer_rtx, fp);
713 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
714
715 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
716 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
717 emit_indirect_jump (lab);
718 }
719 }
720
721 /* Search backwards and mark the jump insn as a non-local goto.
722 Note that this precludes the use of __builtin_longjmp to a
723 __builtin_setjmp target in the same function. However, we've
724 already cautioned the user that these functions are for
725 internal exception handling use only. */
726 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
727 {
728 if (insn == last)
729 abort ();
730 if (GET_CODE (insn) == JUMP_INSN)
731 {
732 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
733 REG_NOTES (insn));
734 break;
735 }
736 else if (GET_CODE (insn) == CALL_INSN)
737 break;
738 }
739 }
740
741 /* Expand a call to __builtin_prefetch. For a target that does not support
742 data prefetch, evaluate the memory address argument in case it has side
743 effects. */
744
745 static void
746 expand_builtin_prefetch (arglist)
747 tree arglist;
748 {
749 tree arg0, arg1, arg2;
750 rtx op0, op1, op2;
751
752 if (!validate_arglist (arglist, POINTER_TYPE, 0))
753 return;
754
755 arg0 = TREE_VALUE (arglist);
756 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
757 zero (read) and argument 2 (locality) defaults to 3 (high degree of
758 locality). */
759 if (TREE_CHAIN (arglist))
760 {
761 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
762 if (TREE_CHAIN (TREE_CHAIN (arglist)))
763 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
764 else
765 arg2 = build_int_2 (3, 0);
766 }
767 else
768 {
769 arg1 = integer_zero_node;
770 arg2 = build_int_2 (3, 0);
771 }
772
773 /* Argument 0 is an address. */
774 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
775
776 /* Argument 1 (read/write flag) must be a compile-time constant int. */
777 if (TREE_CODE (arg1) != INTEGER_CST)
778 {
779 error ("second arg to `__builtin_prefetch' must be a constant");
780 arg1 = integer_zero_node;
781 }
782 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
783 /* Argument 1 must be either zero or one. */
784 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
785 {
786 warning ("invalid second arg to __builtin_prefetch; using zero");
787 op1 = const0_rtx;
788 }
789
790 /* Argument 2 (locality) must be a compile-time constant int. */
791 if (TREE_CODE (arg2) != INTEGER_CST)
792 {
793 error ("third arg to `__builtin_prefetch' must be a constant");
794 arg2 = integer_zero_node;
795 }
796 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
797 /* Argument 2 must be 0, 1, 2, or 3. */
798 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
799 {
800 warning ("invalid third arg to __builtin_prefetch; using zero");
801 op2 = const0_rtx;
802 }
803
804 #ifdef HAVE_prefetch
805 if (HAVE_prefetch)
806 {
807 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
808 (op0,
809 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
810 || (GET_MODE(op0) != Pmode))
811 {
812 #ifdef POINTERS_EXTEND_UNSIGNED
813 if (GET_MODE(op0) != Pmode)
814 op0 = convert_memory_address (Pmode, op0);
815 #endif
816 op0 = force_reg (Pmode, op0);
817 }
818 emit_insn (gen_prefetch (op0, op1, op2));
819 }
820 else
821 #endif
822 op0 = protect_from_queue (op0, 0);
823 /* Don't do anything with direct references to volatile memory, but
824 generate code to handle other side effects. */
825 if (GET_CODE (op0) != MEM && side_effects_p (op0))
826 emit_insn (op0);
827 }
828
829 /* Get a MEM rtx for expression EXP which is the address of an operand
830 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
831
832 static rtx
833 get_memory_rtx (exp)
834 tree exp;
835 {
836 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
837 rtx mem;
838
839 #ifdef POINTERS_EXTEND_UNSIGNED
840 if (GET_MODE (addr) != Pmode)
841 addr = convert_memory_address (Pmode, addr);
842 #endif
843
844 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
845
846 /* Get an expression we can use to find the attributes to assign to MEM.
847 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
848 we can. First remove any nops. */
849 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
850 || TREE_CODE (exp) == NON_LVALUE_EXPR)
851 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
852 exp = TREE_OPERAND (exp, 0);
853
854 if (TREE_CODE (exp) == ADDR_EXPR)
855 {
856 exp = TREE_OPERAND (exp, 0);
857 set_mem_attributes (mem, exp, 0);
858 }
859 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
860 {
861 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
862 /* memcpy, memset and other builtin stringops can alias with anything. */
863 set_mem_alias_set (mem, 0);
864 }
865
866 return mem;
867 }
868 \f
869 /* Built-in functions to perform an untyped call and return. */
870
871 /* For each register that may be used for calling a function, this
872 gives a mode used to copy the register's value. VOIDmode indicates
873 the register is not used for calling a function. If the machine
874 has register windows, this gives only the outbound registers.
875 INCOMING_REGNO gives the corresponding inbound register. */
876 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
877
878 /* For each register that may be used for returning values, this gives
879 a mode used to copy the register's value. VOIDmode indicates the
880 register is not used for returning values. If the machine has
881 register windows, this gives only the outbound registers.
882 INCOMING_REGNO gives the corresponding inbound register. */
883 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
884
885 /* For each register that may be used for calling a function, this
886 gives the offset of that register into the block returned by
887 __builtin_apply_args. 0 indicates that the register is not
888 used for calling a function. */
889 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
890
891 /* Return the offset of register REGNO into the block returned by
892 __builtin_apply_args. This is not declared static, since it is
893 needed in objc-act.c. */
894
895 int
896 apply_args_register_offset (regno)
897 int regno;
898 {
899 apply_args_size ();
900
901 /* Arguments are always put in outgoing registers (in the argument
902 block) if such make sense. */
903 #ifdef OUTGOING_REGNO
904 regno = OUTGOING_REGNO (regno);
905 #endif
906 return apply_args_reg_offset[regno];
907 }
908
909 /* Return the size required for the block returned by __builtin_apply_args,
910 and initialize apply_args_mode. */
911
912 static int
913 apply_args_size ()
914 {
915 static int size = -1;
916 int align;
917 unsigned int regno;
918 enum machine_mode mode;
919
920 /* The values computed by this function never change. */
921 if (size < 0)
922 {
923 /* The first value is the incoming arg-pointer. */
924 size = GET_MODE_SIZE (Pmode);
925
926 /* The second value is the structure value address unless this is
927 passed as an "invisible" first argument. */
928 if (struct_value_rtx)
929 size += GET_MODE_SIZE (Pmode);
930
931 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
932 if (FUNCTION_ARG_REGNO_P (regno))
933 {
934 /* Search for the proper mode for copying this register's
935 value. I'm not sure this is right, but it works so far. */
936 enum machine_mode best_mode = VOIDmode;
937
938 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
939 mode != VOIDmode;
940 mode = GET_MODE_WIDER_MODE (mode))
941 if (HARD_REGNO_MODE_OK (regno, mode)
942 && HARD_REGNO_NREGS (regno, mode) == 1)
943 best_mode = mode;
944
945 if (best_mode == VOIDmode)
946 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
947 mode != VOIDmode;
948 mode = GET_MODE_WIDER_MODE (mode))
949 if (HARD_REGNO_MODE_OK (regno, mode)
950 && have_insn_for (SET, mode))
951 best_mode = mode;
952
953 if (best_mode == VOIDmode)
954 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
955 mode != VOIDmode;
956 mode = GET_MODE_WIDER_MODE (mode))
957 if (HARD_REGNO_MODE_OK (regno, mode)
958 && have_insn_for (SET, mode))
959 best_mode = mode;
960
961 if (best_mode == VOIDmode)
962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
963 mode != VOIDmode;
964 mode = GET_MODE_WIDER_MODE (mode))
965 if (HARD_REGNO_MODE_OK (regno, mode)
966 && have_insn_for (SET, mode))
967 best_mode = mode;
968
969 mode = best_mode;
970 if (mode == VOIDmode)
971 abort ();
972
973 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
974 if (size % align != 0)
975 size = CEIL (size, align) * align;
976 apply_args_reg_offset[regno] = size;
977 size += GET_MODE_SIZE (mode);
978 apply_args_mode[regno] = mode;
979 }
980 else
981 {
982 apply_args_mode[regno] = VOIDmode;
983 apply_args_reg_offset[regno] = 0;
984 }
985 }
986 return size;
987 }
988
989 /* Return the size required for the block returned by __builtin_apply,
990 and initialize apply_result_mode. */
991
992 static int
993 apply_result_size ()
994 {
995 static int size = -1;
996 int align, regno;
997 enum machine_mode mode;
998
999 /* The values computed by this function never change. */
1000 if (size < 0)
1001 {
1002 size = 0;
1003
1004 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1005 if (FUNCTION_VALUE_REGNO_P (regno))
1006 {
1007 /* Search for the proper mode for copying this register's
1008 value. I'm not sure this is right, but it works so far. */
1009 enum machine_mode best_mode = VOIDmode;
1010
1011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1012 mode != TImode;
1013 mode = GET_MODE_WIDER_MODE (mode))
1014 if (HARD_REGNO_MODE_OK (regno, mode))
1015 best_mode = mode;
1016
1017 if (best_mode == VOIDmode)
1018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1019 mode != VOIDmode;
1020 mode = GET_MODE_WIDER_MODE (mode))
1021 if (HARD_REGNO_MODE_OK (regno, mode)
1022 && have_insn_for (SET, mode))
1023 best_mode = mode;
1024
1025 if (best_mode == VOIDmode)
1026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1027 mode != VOIDmode;
1028 mode = GET_MODE_WIDER_MODE (mode))
1029 if (HARD_REGNO_MODE_OK (regno, mode)
1030 && have_insn_for (SET, mode))
1031 best_mode = mode;
1032
1033 if (best_mode == VOIDmode)
1034 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1035 mode != VOIDmode;
1036 mode = GET_MODE_WIDER_MODE (mode))
1037 if (HARD_REGNO_MODE_OK (regno, mode)
1038 && have_insn_for (SET, mode))
1039 best_mode = mode;
1040
1041 mode = best_mode;
1042 if (mode == VOIDmode)
1043 abort ();
1044
1045 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1046 if (size % align != 0)
1047 size = CEIL (size, align) * align;
1048 size += GET_MODE_SIZE (mode);
1049 apply_result_mode[regno] = mode;
1050 }
1051 else
1052 apply_result_mode[regno] = VOIDmode;
1053
1054 /* Allow targets that use untyped_call and untyped_return to override
1055 the size so that machine-specific information can be stored here. */
1056 #ifdef APPLY_RESULT_SIZE
1057 size = APPLY_RESULT_SIZE;
1058 #endif
1059 }
1060 return size;
1061 }
1062
1063 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1064 /* Create a vector describing the result block RESULT. If SAVEP is true,
1065 the result block is used to save the values; otherwise it is used to
1066 restore the values. */
1067
1068 static rtx
1069 result_vector (savep, result)
1070 int savep;
1071 rtx result;
1072 {
1073 int regno, size, align, nelts;
1074 enum machine_mode mode;
1075 rtx reg, mem;
1076 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1077
1078 size = nelts = 0;
1079 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1080 if ((mode = apply_result_mode[regno]) != VOIDmode)
1081 {
1082 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1083 if (size % align != 0)
1084 size = CEIL (size, align) * align;
1085 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1086 mem = adjust_address (result, mode, size);
1087 savevec[nelts++] = (savep
1088 ? gen_rtx_SET (VOIDmode, mem, reg)
1089 : gen_rtx_SET (VOIDmode, reg, mem));
1090 size += GET_MODE_SIZE (mode);
1091 }
1092 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1093 }
1094 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1095
1096 /* Save the state required to perform an untyped call with the same
1097 arguments as were passed to the current function. */
1098
1099 static rtx
1100 expand_builtin_apply_args_1 ()
1101 {
1102 rtx registers;
1103 int size, align, regno;
1104 enum machine_mode mode;
1105
1106 /* Create a block where the arg-pointer, structure value address,
1107 and argument registers can be saved. */
1108 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1109
1110 /* Walk past the arg-pointer and structure value address. */
1111 size = GET_MODE_SIZE (Pmode);
1112 if (struct_value_rtx)
1113 size += GET_MODE_SIZE (Pmode);
1114
1115 /* Save each register used in calling a function to the block. */
1116 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1117 if ((mode = apply_args_mode[regno]) != VOIDmode)
1118 {
1119 rtx tem;
1120
1121 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1122 if (size % align != 0)
1123 size = CEIL (size, align) * align;
1124
1125 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1126
1127 emit_move_insn (adjust_address (registers, mode, size), tem);
1128 size += GET_MODE_SIZE (mode);
1129 }
1130
1131 /* Save the arg pointer to the block. */
1132 emit_move_insn (adjust_address (registers, Pmode, 0),
1133 copy_to_reg (virtual_incoming_args_rtx));
1134 size = GET_MODE_SIZE (Pmode);
1135
1136 /* Save the structure value address unless this is passed as an
1137 "invisible" first argument. */
1138 if (struct_value_incoming_rtx)
1139 {
1140 emit_move_insn (adjust_address (registers, Pmode, size),
1141 copy_to_reg (struct_value_incoming_rtx));
1142 size += GET_MODE_SIZE (Pmode);
1143 }
1144
1145 /* Return the address of the block. */
1146 return copy_addr_to_reg (XEXP (registers, 0));
1147 }
1148
1149 /* __builtin_apply_args returns block of memory allocated on
1150 the stack into which is stored the arg pointer, structure
1151 value address, static chain, and all the registers that might
1152 possibly be used in performing a function call. The code is
1153 moved to the start of the function so the incoming values are
1154 saved. */
1155
1156 static rtx
1157 expand_builtin_apply_args ()
1158 {
1159 /* Don't do __builtin_apply_args more than once in a function.
1160 Save the result of the first call and reuse it. */
1161 if (apply_args_value != 0)
1162 return apply_args_value;
1163 {
1164 /* When this function is called, it means that registers must be
1165 saved on entry to this function. So we migrate the
1166 call to the first insn of this function. */
1167 rtx temp;
1168 rtx seq;
1169
1170 start_sequence ();
1171 temp = expand_builtin_apply_args_1 ();
1172 seq = get_insns ();
1173 end_sequence ();
1174
1175 apply_args_value = temp;
1176
1177 /* Put the insns after the NOTE that starts the function.
1178 If this is inside a start_sequence, make the outer-level insn
1179 chain current, so the code is placed at the start of the
1180 function. */
1181 push_topmost_sequence ();
1182 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1183 pop_topmost_sequence ();
1184 return temp;
1185 }
1186 }
1187
1188 /* Perform an untyped call and save the state required to perform an
1189 untyped return of whatever value was returned by the given function. */
1190
1191 static rtx
1192 expand_builtin_apply (function, arguments, argsize)
1193 rtx function, arguments, argsize;
1194 {
1195 int size, align, regno;
1196 enum machine_mode mode;
1197 rtx incoming_args, result, reg, dest, src, call_insn;
1198 rtx old_stack_level = 0;
1199 rtx call_fusage = 0;
1200
1201 #ifdef POINTERS_EXTEND_UNSIGNED
1202 if (GET_MODE (arguments) != Pmode)
1203 arguments = convert_memory_address (Pmode, arguments);
1204 #endif
1205
1206 /* Create a block where the return registers can be saved. */
1207 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1208
1209 /* Fetch the arg pointer from the ARGUMENTS block. */
1210 incoming_args = gen_reg_rtx (Pmode);
1211 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1212 #ifndef STACK_GROWS_DOWNWARD
1213 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1214 incoming_args, 0, OPTAB_LIB_WIDEN);
1215 #endif
1216
1217 /* Perform postincrements before actually calling the function. */
1218 emit_queue ();
1219
1220 /* Push a new argument block and copy the arguments. Do not allow
1221 the (potential) memcpy call below to interfere with our stack
1222 manipulations. */
1223 do_pending_stack_adjust ();
1224 NO_DEFER_POP;
1225
1226 /* Save the stack with nonlocal if available */
1227 #ifdef HAVE_save_stack_nonlocal
1228 if (HAVE_save_stack_nonlocal)
1229 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1230 else
1231 #endif
1232 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1233
1234 /* Push a block of memory onto the stack to store the memory arguments.
1235 Save the address in a register, and copy the memory arguments. ??? I
1236 haven't figured out how the calling convention macros effect this,
1237 but it's likely that the source and/or destination addresses in
1238 the block copy will need updating in machine specific ways. */
1239 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1240 dest = gen_rtx_MEM (BLKmode, dest);
1241 set_mem_align (dest, PARM_BOUNDARY);
1242 src = gen_rtx_MEM (BLKmode, incoming_args);
1243 set_mem_align (src, PARM_BOUNDARY);
1244 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1245
1246 /* Refer to the argument block. */
1247 apply_args_size ();
1248 arguments = gen_rtx_MEM (BLKmode, arguments);
1249 set_mem_align (arguments, PARM_BOUNDARY);
1250
1251 /* Walk past the arg-pointer and structure value address. */
1252 size = GET_MODE_SIZE (Pmode);
1253 if (struct_value_rtx)
1254 size += GET_MODE_SIZE (Pmode);
1255
1256 /* Restore each of the registers previously saved. Make USE insns
1257 for each of these registers for use in making the call. */
1258 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1259 if ((mode = apply_args_mode[regno]) != VOIDmode)
1260 {
1261 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1262 if (size % align != 0)
1263 size = CEIL (size, align) * align;
1264 reg = gen_rtx_REG (mode, regno);
1265 emit_move_insn (reg, adjust_address (arguments, mode, size));
1266 use_reg (&call_fusage, reg);
1267 size += GET_MODE_SIZE (mode);
1268 }
1269
1270 /* Restore the structure value address unless this is passed as an
1271 "invisible" first argument. */
1272 size = GET_MODE_SIZE (Pmode);
1273 if (struct_value_rtx)
1274 {
1275 rtx value = gen_reg_rtx (Pmode);
1276 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1277 emit_move_insn (struct_value_rtx, value);
1278 if (GET_CODE (struct_value_rtx) == REG)
1279 use_reg (&call_fusage, struct_value_rtx);
1280 size += GET_MODE_SIZE (Pmode);
1281 }
1282
1283 /* All arguments and registers used for the call are set up by now! */
1284 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1285
1286 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1287 and we don't want to load it into a register as an optimization,
1288 because prepare_call_address already did it if it should be done. */
1289 if (GET_CODE (function) != SYMBOL_REF)
1290 function = memory_address (FUNCTION_MODE, function);
1291
1292 /* Generate the actual call instruction and save the return value. */
1293 #ifdef HAVE_untyped_call
1294 if (HAVE_untyped_call)
1295 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1296 result, result_vector (1, result)));
1297 else
1298 #endif
1299 #ifdef HAVE_call_value
1300 if (HAVE_call_value)
1301 {
1302 rtx valreg = 0;
1303
1304 /* Locate the unique return register. It is not possible to
1305 express a call that sets more than one return register using
1306 call_value; use untyped_call for that. In fact, untyped_call
1307 only needs to save the return registers in the given block. */
1308 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1309 if ((mode = apply_result_mode[regno]) != VOIDmode)
1310 {
1311 if (valreg)
1312 abort (); /* HAVE_untyped_call required. */
1313 valreg = gen_rtx_REG (mode, regno);
1314 }
1315
1316 emit_call_insn (GEN_CALL_VALUE (valreg,
1317 gen_rtx_MEM (FUNCTION_MODE, function),
1318 const0_rtx, NULL_RTX, const0_rtx));
1319
1320 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1321 }
1322 else
1323 #endif
1324 abort ();
1325
1326 /* Find the CALL insn we just emitted, and attach the register usage
1327 information. */
1328 call_insn = last_call_insn ();
1329 add_function_usage_to (call_insn, call_fusage);
1330
1331 /* Restore the stack. */
1332 #ifdef HAVE_save_stack_nonlocal
1333 if (HAVE_save_stack_nonlocal)
1334 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1335 else
1336 #endif
1337 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1338
1339 OK_DEFER_POP;
1340
1341 /* Return the address of the result block. */
1342 return copy_addr_to_reg (XEXP (result, 0));
1343 }
1344
1345 /* Perform an untyped return. */
1346
1347 static void
1348 expand_builtin_return (result)
1349 rtx result;
1350 {
1351 int size, align, regno;
1352 enum machine_mode mode;
1353 rtx reg;
1354 rtx call_fusage = 0;
1355
1356 #ifdef POINTERS_EXTEND_UNSIGNED
1357 if (GET_MODE (result) != Pmode)
1358 result = convert_memory_address (Pmode, result);
1359 #endif
1360
1361 apply_result_size ();
1362 result = gen_rtx_MEM (BLKmode, result);
1363
1364 #ifdef HAVE_untyped_return
1365 if (HAVE_untyped_return)
1366 {
1367 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1368 emit_barrier ();
1369 return;
1370 }
1371 #endif
1372
1373 /* Restore the return value and note that each value is used. */
1374 size = 0;
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if ((mode = apply_result_mode[regno]) != VOIDmode)
1377 {
1378 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1379 if (size % align != 0)
1380 size = CEIL (size, align) * align;
1381 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1382 emit_move_insn (reg, adjust_address (result, mode, size));
1383
1384 push_to_sequence (call_fusage);
1385 emit_insn (gen_rtx_USE (VOIDmode, reg));
1386 call_fusage = get_insns ();
1387 end_sequence ();
1388 size += GET_MODE_SIZE (mode);
1389 }
1390
1391 /* Put the USE insns before the return. */
1392 emit_insn (call_fusage);
1393
1394 /* Return whatever values was restored by jumping directly to the end
1395 of the function. */
1396 expand_null_return ();
1397 }
1398
1399 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1400
1401 static enum type_class
1402 type_to_class (type)
1403 tree type;
1404 {
1405 switch (TREE_CODE (type))
1406 {
1407 case VOID_TYPE: return void_type_class;
1408 case INTEGER_TYPE: return integer_type_class;
1409 case CHAR_TYPE: return char_type_class;
1410 case ENUMERAL_TYPE: return enumeral_type_class;
1411 case BOOLEAN_TYPE: return boolean_type_class;
1412 case POINTER_TYPE: return pointer_type_class;
1413 case REFERENCE_TYPE: return reference_type_class;
1414 case OFFSET_TYPE: return offset_type_class;
1415 case REAL_TYPE: return real_type_class;
1416 case COMPLEX_TYPE: return complex_type_class;
1417 case FUNCTION_TYPE: return function_type_class;
1418 case METHOD_TYPE: return method_type_class;
1419 case RECORD_TYPE: return record_type_class;
1420 case UNION_TYPE:
1421 case QUAL_UNION_TYPE: return union_type_class;
1422 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1423 ? string_type_class : array_type_class);
1424 case SET_TYPE: return set_type_class;
1425 case FILE_TYPE: return file_type_class;
1426 case LANG_TYPE: return lang_type_class;
1427 default: return no_type_class;
1428 }
1429 }
1430
1431 /* Expand a call to __builtin_classify_type with arguments found in
1432 ARGLIST. */
1433
1434 static rtx
1435 expand_builtin_classify_type (arglist)
1436 tree arglist;
1437 {
1438 if (arglist != 0)
1439 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1440 return GEN_INT (no_type_class);
1441 }
1442
1443 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1444
1445 static rtx
1446 expand_builtin_constant_p (exp)
1447 tree exp;
1448 {
1449 tree arglist = TREE_OPERAND (exp, 1);
1450 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1451 rtx tmp;
1452
1453 if (arglist == 0)
1454 return const0_rtx;
1455 arglist = TREE_VALUE (arglist);
1456
1457 /* We have taken care of the easy cases during constant folding. This
1458 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1459 get a chance to see if it can deduce whether ARGLIST is constant. */
1460
1461 current_function_calls_constant_p = 1;
1462
1463 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1464 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1465 return tmp;
1466 }
1467
1468 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1469 if available. */
1470 tree
1471 mathfn_built_in (type, fn)
1472 tree type;
1473 enum built_in_function fn;
1474 {
1475 enum built_in_function fcode = NOT_BUILT_IN;
1476 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1477 switch (fn)
1478 {
1479 case BUILT_IN_SQRT:
1480 case BUILT_IN_SQRTF:
1481 case BUILT_IN_SQRTL:
1482 fcode = BUILT_IN_SQRT;
1483 break;
1484 case BUILT_IN_SIN:
1485 case BUILT_IN_SINF:
1486 case BUILT_IN_SINL:
1487 fcode = BUILT_IN_SIN;
1488 break;
1489 case BUILT_IN_COS:
1490 case BUILT_IN_COSF:
1491 case BUILT_IN_COSL:
1492 fcode = BUILT_IN_COS;
1493 break;
1494 case BUILT_IN_EXP:
1495 case BUILT_IN_EXPF:
1496 case BUILT_IN_EXPL:
1497 fcode = BUILT_IN_EXP;
1498 break;
1499 case BUILT_IN_LOG:
1500 case BUILT_IN_LOGF:
1501 case BUILT_IN_LOGL:
1502 fcode = BUILT_IN_LOG;
1503 break;
1504 case BUILT_IN_FLOOR:
1505 case BUILT_IN_FLOORF:
1506 case BUILT_IN_FLOORL:
1507 fcode = BUILT_IN_FLOOR;
1508 break;
1509 case BUILT_IN_CEIL:
1510 case BUILT_IN_CEILF:
1511 case BUILT_IN_CEILL:
1512 fcode = BUILT_IN_CEIL;
1513 break;
1514 case BUILT_IN_TRUNC:
1515 case BUILT_IN_TRUNCF:
1516 case BUILT_IN_TRUNCL:
1517 fcode = BUILT_IN_TRUNC;
1518 break;
1519 case BUILT_IN_ROUND:
1520 case BUILT_IN_ROUNDF:
1521 case BUILT_IN_ROUNDL:
1522 fcode = BUILT_IN_ROUND;
1523 break;
1524 case BUILT_IN_NEARBYINT:
1525 case BUILT_IN_NEARBYINTF:
1526 case BUILT_IN_NEARBYINTL:
1527 fcode = BUILT_IN_NEARBYINT;
1528 break;
1529 default:
1530 abort ();
1531 }
1532 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1533 switch (fn)
1534 {
1535 case BUILT_IN_SQRT:
1536 case BUILT_IN_SQRTF:
1537 case BUILT_IN_SQRTL:
1538 fcode = BUILT_IN_SQRTF;
1539 break;
1540 case BUILT_IN_SIN:
1541 case BUILT_IN_SINF:
1542 case BUILT_IN_SINL:
1543 fcode = BUILT_IN_SINF;
1544 break;
1545 case BUILT_IN_COS:
1546 case BUILT_IN_COSF:
1547 case BUILT_IN_COSL:
1548 fcode = BUILT_IN_COSF;
1549 break;
1550 case BUILT_IN_EXP:
1551 case BUILT_IN_EXPF:
1552 case BUILT_IN_EXPL:
1553 fcode = BUILT_IN_EXPF;
1554 break;
1555 case BUILT_IN_LOG:
1556 case BUILT_IN_LOGF:
1557 case BUILT_IN_LOGL:
1558 fcode = BUILT_IN_LOGF;
1559 break;
1560 case BUILT_IN_FLOOR:
1561 case BUILT_IN_FLOORF:
1562 case BUILT_IN_FLOORL:
1563 fcode = BUILT_IN_FLOORF;
1564 break;
1565 case BUILT_IN_CEIL:
1566 case BUILT_IN_CEILF:
1567 case BUILT_IN_CEILL:
1568 fcode = BUILT_IN_CEILF;
1569 break;
1570 case BUILT_IN_TRUNC:
1571 case BUILT_IN_TRUNCF:
1572 case BUILT_IN_TRUNCL:
1573 fcode = BUILT_IN_TRUNCF;
1574 break;
1575 case BUILT_IN_ROUND:
1576 case BUILT_IN_ROUNDF:
1577 case BUILT_IN_ROUNDL:
1578 fcode = BUILT_IN_ROUNDF;
1579 break;
1580 case BUILT_IN_NEARBYINT:
1581 case BUILT_IN_NEARBYINTF:
1582 case BUILT_IN_NEARBYINTL:
1583 fcode = BUILT_IN_NEARBYINTF;
1584 break;
1585 default:
1586 abort ();
1587 }
1588 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1589 switch (fn)
1590 {
1591 case BUILT_IN_SQRT:
1592 case BUILT_IN_SQRTF:
1593 case BUILT_IN_SQRTL:
1594 fcode = BUILT_IN_SQRTL;
1595 break;
1596 case BUILT_IN_SIN:
1597 case BUILT_IN_SINF:
1598 case BUILT_IN_SINL:
1599 fcode = BUILT_IN_SINL;
1600 break;
1601 case BUILT_IN_COS:
1602 case BUILT_IN_COSF:
1603 case BUILT_IN_COSL:
1604 fcode = BUILT_IN_COSL;
1605 break;
1606 case BUILT_IN_EXP:
1607 case BUILT_IN_EXPF:
1608 case BUILT_IN_EXPL:
1609 fcode = BUILT_IN_EXPL;
1610 break;
1611 case BUILT_IN_LOG:
1612 case BUILT_IN_LOGF:
1613 case BUILT_IN_LOGL:
1614 fcode = BUILT_IN_LOGL;
1615 break;
1616 case BUILT_IN_FLOOR:
1617 case BUILT_IN_FLOORF:
1618 case BUILT_IN_FLOORL:
1619 fcode = BUILT_IN_FLOORL;
1620 break;
1621 case BUILT_IN_CEIL:
1622 case BUILT_IN_CEILF:
1623 case BUILT_IN_CEILL:
1624 fcode = BUILT_IN_CEILL;
1625 break;
1626 case BUILT_IN_TRUNC:
1627 case BUILT_IN_TRUNCF:
1628 case BUILT_IN_TRUNCL:
1629 fcode = BUILT_IN_TRUNCL;
1630 break;
1631 case BUILT_IN_ROUND:
1632 case BUILT_IN_ROUNDF:
1633 case BUILT_IN_ROUNDL:
1634 fcode = BUILT_IN_ROUNDL;
1635 break;
1636 case BUILT_IN_NEARBYINT:
1637 case BUILT_IN_NEARBYINTF:
1638 case BUILT_IN_NEARBYINTL:
1639 fcode = BUILT_IN_NEARBYINTL;
1640 break;
1641 default:
1642 abort ();
1643 }
1644 return implicit_built_in_decls[fcode];
1645 }
1646
1647 /* If errno must be maintained, expand the RTL to check if the result,
1648 TARGET, of a built-in function call, EXP, is NaN, and if so set
1649 errno to EDOM. */
1650
1651 static void
1652 expand_errno_check (exp, target)
1653 tree exp;
1654 rtx target;
1655 {
1656 rtx lab;
1657
1658 if (flag_errno_math && HONOR_NANS (GET_MODE (target)))
1659 {
1660 lab = gen_label_rtx ();
1661
1662 /* Test the result; if it is NaN, set errno=EDOM because
1663 the argument was not in the domain. */
1664 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1665 0, lab);
1666
1667 #ifdef TARGET_EDOM
1668 {
1669 #ifdef GEN_ERRNO_RTX
1670 rtx errno_rtx = GEN_ERRNO_RTX;
1671 #else
1672 rtx errno_rtx
1673 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1674 #endif
1675
1676 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1677 }
1678 #else
1679 /* We can't set errno=EDOM directly; let the library call do it.
1680 Pop the arguments right away in case the call gets deleted. */
1681 NO_DEFER_POP;
1682 expand_call (exp, target, 0);
1683 OK_DEFER_POP;
1684 #endif
1685
1686 emit_label (lab);
1687 }
1688 }
1689
1690
1691 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1692 Return 0 if a normal call should be emitted rather than expanding the
1693 function in-line. EXP is the expression that is a call to the builtin
1694 function; if convenient, the result should be placed in TARGET.
1695 SUBTARGET may be used as the target for computing one of EXP's operands. */
1696
1697 static rtx
1698 expand_builtin_mathfn (exp, target, subtarget)
1699 tree exp;
1700 rtx target, subtarget;
1701 {
1702 optab builtin_optab;
1703 rtx op0, insns;
1704 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1705 tree arglist = TREE_OPERAND (exp, 1);
1706 enum machine_mode argmode;
1707 bool errno_set = true;
1708
1709 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1710 return 0;
1711
1712 /* Stabilize and compute the argument. */
1713 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1714 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1715 {
1716 exp = copy_node (exp);
1717 TREE_OPERAND (exp, 1) = arglist;
1718 /* Wrap the computation of the argument in a SAVE_EXPR. That
1719 way, if we need to expand the argument again (as in the
1720 flag_errno_math case below where we cannot directly set
1721 errno), we will not perform side-effects more than once.
1722 Note that here we're mutating the original EXP as well as the
1723 copy; that's the right thing to do in case the original EXP
1724 is expanded later. */
1725 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1726 arglist = copy_node (arglist);
1727 }
1728 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1729
1730 /* Make a suitable register to place result in. */
1731 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1732
1733 emit_queue ();
1734 start_sequence ();
1735
1736 switch (DECL_FUNCTION_CODE (fndecl))
1737 {
1738 case BUILT_IN_SIN:
1739 case BUILT_IN_SINF:
1740 case BUILT_IN_SINL:
1741 builtin_optab = sin_optab; break;
1742 case BUILT_IN_COS:
1743 case BUILT_IN_COSF:
1744 case BUILT_IN_COSL:
1745 builtin_optab = cos_optab; break;
1746 case BUILT_IN_SQRT:
1747 case BUILT_IN_SQRTF:
1748 case BUILT_IN_SQRTL:
1749 builtin_optab = sqrt_optab; break;
1750 case BUILT_IN_EXP:
1751 case BUILT_IN_EXPF:
1752 case BUILT_IN_EXPL:
1753 builtin_optab = exp_optab; break;
1754 case BUILT_IN_LOG:
1755 case BUILT_IN_LOGF:
1756 case BUILT_IN_LOGL:
1757 builtin_optab = log_optab; break;
1758 case BUILT_IN_FLOOR:
1759 case BUILT_IN_FLOORF:
1760 case BUILT_IN_FLOORL:
1761 errno_set = false ; builtin_optab = floor_optab; break;
1762 case BUILT_IN_CEIL:
1763 case BUILT_IN_CEILF:
1764 case BUILT_IN_CEILL:
1765 errno_set = false ; builtin_optab = ceil_optab; break;
1766 case BUILT_IN_TRUNC:
1767 case BUILT_IN_TRUNCF:
1768 case BUILT_IN_TRUNCL:
1769 errno_set = false ; builtin_optab = trunc_optab; break;
1770 case BUILT_IN_ROUND:
1771 case BUILT_IN_ROUNDF:
1772 case BUILT_IN_ROUNDL:
1773 errno_set = false ; builtin_optab = round_optab; break;
1774 case BUILT_IN_NEARBYINT:
1775 case BUILT_IN_NEARBYINTF:
1776 case BUILT_IN_NEARBYINTL:
1777 errno_set = false ; builtin_optab = nearbyint_optab; break;
1778 default:
1779 abort ();
1780 }
1781
1782 /* Compute into TARGET.
1783 Set TARGET to wherever the result comes back. */
1784 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1785 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1786
1787 /* If we were unable to expand via the builtin, stop the
1788 sequence (without outputting the insns) and return 0, causing
1789 a call to the library function. */
1790 if (target == 0)
1791 {
1792 end_sequence ();
1793 return 0;
1794 }
1795
1796 if (errno_set)
1797 expand_errno_check (exp, target);
1798
1799 /* Output the entire sequence. */
1800 insns = get_insns ();
1801 end_sequence ();
1802 emit_insn (insns);
1803
1804 return target;
1805 }
1806
1807 /* Expand a call to the builtin binary math functions (pow and atan2).
1808 Return 0 if a normal call should be emitted rather than expanding the
1809 function in-line. EXP is the expression that is a call to the builtin
1810 function; if convenient, the result should be placed in TARGET.
1811 SUBTARGET may be used as the target for computing one of EXP's
1812 operands. */
1813
1814 static rtx
1815 expand_builtin_mathfn_2 (exp, target, subtarget)
1816 tree exp;
1817 rtx target, subtarget;
1818 {
1819 optab builtin_optab;
1820 rtx op0, op1, insns;
1821 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1822 tree arglist = TREE_OPERAND (exp, 1);
1823 tree arg0, arg1;
1824 enum machine_mode argmode;
1825 bool errno_set = true;
1826 bool stable = true;
1827
1828 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1829 return 0;
1830
1831 arg0 = TREE_VALUE (arglist);
1832 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1833
1834 /* Stabilize the arguments. */
1835 if (TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
1836 {
1837 arg0 = save_expr (arg0);
1838 TREE_VALUE (arglist) = arg0;
1839 stable = false;
1840 }
1841 if (TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
1842 {
1843 arg1 = save_expr (arg1);
1844 TREE_VALUE (TREE_CHAIN (arglist)) = arg1;
1845 stable = false;
1846 }
1847
1848 if (! stable)
1849 {
1850 exp = copy_node (exp);
1851 arglist = tree_cons (NULL_TREE, arg0,
1852 build_tree_list (NULL_TREE, arg1));
1853 TREE_OPERAND (exp, 1) = arglist;
1854 }
1855
1856 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1857 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1858
1859 /* Make a suitable register to place result in. */
1860 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1861
1862 emit_queue ();
1863 start_sequence ();
1864
1865 switch (DECL_FUNCTION_CODE (fndecl))
1866 {
1867 case BUILT_IN_POW:
1868 case BUILT_IN_POWF:
1869 case BUILT_IN_POWL:
1870 builtin_optab = pow_optab; break;
1871 case BUILT_IN_ATAN2:
1872 case BUILT_IN_ATAN2F:
1873 case BUILT_IN_ATAN2L:
1874 builtin_optab = atan2_optab; break;
1875 default:
1876 abort ();
1877 }
1878
1879 /* Compute into TARGET.
1880 Set TARGET to wherever the result comes back. */
1881 argmode = TYPE_MODE (TREE_TYPE (arg0));
1882 target = expand_binop (argmode, builtin_optab, op0, op1,
1883 target, 0, OPTAB_DIRECT);
1884
1885 /* If we were unable to expand via the builtin, stop the
1886 sequence (without outputting the insns) and return 0, causing
1887 a call to the library function. */
1888 if (target == 0)
1889 {
1890 end_sequence ();
1891 return 0;
1892 }
1893
1894 if (errno_set)
1895 expand_errno_check (exp, target);
1896
1897 /* Output the entire sequence. */
1898 insns = get_insns ();
1899 end_sequence ();
1900 emit_insn (insns);
1901
1902 return target;
1903 }
1904
1905 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1906 if we failed the caller should emit a normal call, otherwise
1907 try to get the result in TARGET, if convenient. */
1908
1909 static rtx
1910 expand_builtin_strlen (exp, target)
1911 tree exp;
1912 rtx target;
1913 {
1914 tree arglist = TREE_OPERAND (exp, 1);
1915 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1916
1917 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1918 return 0;
1919 else
1920 {
1921 rtx pat;
1922 tree len, src = TREE_VALUE (arglist);
1923 rtx result, src_reg, char_rtx, before_strlen;
1924 enum machine_mode insn_mode = value_mode, char_mode;
1925 enum insn_code icode = CODE_FOR_nothing;
1926 int align;
1927
1928 /* If the length can be computed at compile-time, return it. */
1929 len = c_strlen (src);
1930 if (len)
1931 return expand_expr (len, target, value_mode, EXPAND_NORMAL);
1932
1933 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1934
1935 /* If SRC is not a pointer type, don't do this operation inline. */
1936 if (align == 0)
1937 return 0;
1938
1939 /* Bail out if we can't compute strlen in the right mode. */
1940 while (insn_mode != VOIDmode)
1941 {
1942 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1943 if (icode != CODE_FOR_nothing)
1944 break;
1945
1946 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1947 }
1948 if (insn_mode == VOIDmode)
1949 return 0;
1950
1951 /* Make a place to write the result of the instruction. */
1952 result = target;
1953 if (! (result != 0
1954 && GET_CODE (result) == REG
1955 && GET_MODE (result) == insn_mode
1956 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1957 result = gen_reg_rtx (insn_mode);
1958
1959 /* Make a place to hold the source address. We will not expand
1960 the actual source until we are sure that the expansion will
1961 not fail -- there are trees that cannot be expanded twice. */
1962 src_reg = gen_reg_rtx (Pmode);
1963
1964 /* Mark the beginning of the strlen sequence so we can emit the
1965 source operand later. */
1966 before_strlen = get_last_insn ();
1967
1968 char_rtx = const0_rtx;
1969 char_mode = insn_data[(int) icode].operand[2].mode;
1970 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1971 char_mode))
1972 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1973
1974 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1975 char_rtx, GEN_INT (align));
1976 if (! pat)
1977 return 0;
1978 emit_insn (pat);
1979
1980 /* Now that we are assured of success, expand the source. */
1981 start_sequence ();
1982 pat = memory_address (BLKmode,
1983 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1984 if (pat != src_reg)
1985 emit_move_insn (src_reg, pat);
1986 pat = get_insns ();
1987 end_sequence ();
1988
1989 if (before_strlen)
1990 emit_insn_after (pat, before_strlen);
1991 else
1992 emit_insn_before (pat, get_insns ());
1993
1994 /* Return the value in the proper mode for this function. */
1995 if (GET_MODE (result) == value_mode)
1996 target = result;
1997 else if (target != 0)
1998 convert_move (target, result, 0);
1999 else
2000 target = convert_to_mode (value_mode, result, 0);
2001
2002 return target;
2003 }
2004 }
2005
2006 /* Expand a call to the strstr builtin. Return 0 if we failed the
2007 caller should emit a normal call, otherwise try to get the result
2008 in TARGET, if convenient (and in mode MODE if that's convenient). */
2009
2010 static rtx
2011 expand_builtin_strstr (arglist, target, mode)
2012 tree arglist;
2013 rtx target;
2014 enum machine_mode mode;
2015 {
2016 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2017 return 0;
2018 else
2019 {
2020 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2021 tree fn;
2022 const char *p1, *p2;
2023
2024 p2 = c_getstr (s2);
2025 if (p2 == NULL)
2026 return 0;
2027
2028 p1 = c_getstr (s1);
2029 if (p1 != NULL)
2030 {
2031 const char *r = strstr (p1, p2);
2032
2033 if (r == NULL)
2034 return const0_rtx;
2035
2036 /* Return an offset into the constant string argument. */
2037 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2038 s1, ssize_int (r - p1))),
2039 target, mode, EXPAND_NORMAL);
2040 }
2041
2042 if (p2[0] == '\0')
2043 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2044
2045 if (p2[1] != '\0')
2046 return 0;
2047
2048 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2049 if (!fn)
2050 return 0;
2051
2052 /* New argument list transforming strstr(s1, s2) to
2053 strchr(s1, s2[0]). */
2054 arglist =
2055 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2056 arglist = tree_cons (NULL_TREE, s1, arglist);
2057 return expand_expr (build_function_call_expr (fn, arglist),
2058 target, mode, EXPAND_NORMAL);
2059 }
2060 }
2061
2062 /* Expand a call to the strchr builtin. Return 0 if we failed the
2063 caller should emit a normal call, otherwise try to get the result
2064 in TARGET, if convenient (and in mode MODE if that's convenient). */
2065
2066 static rtx
2067 expand_builtin_strchr (arglist, target, mode)
2068 tree arglist;
2069 rtx target;
2070 enum machine_mode mode;
2071 {
2072 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2073 return 0;
2074 else
2075 {
2076 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2077 const char *p1;
2078
2079 if (TREE_CODE (s2) != INTEGER_CST)
2080 return 0;
2081
2082 p1 = c_getstr (s1);
2083 if (p1 != NULL)
2084 {
2085 char c;
2086 const char *r;
2087
2088 if (target_char_cast (s2, &c))
2089 return 0;
2090
2091 r = strchr (p1, c);
2092
2093 if (r == NULL)
2094 return const0_rtx;
2095
2096 /* Return an offset into the constant string argument. */
2097 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2098 s1, ssize_int (r - p1))),
2099 target, mode, EXPAND_NORMAL);
2100 }
2101
2102 /* FIXME: Should use here strchrM optab so that ports can optimize
2103 this. */
2104 return 0;
2105 }
2106 }
2107
2108 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2109 caller should emit a normal call, otherwise try to get the result
2110 in TARGET, if convenient (and in mode MODE if that's convenient). */
2111
2112 static rtx
2113 expand_builtin_strrchr (arglist, target, mode)
2114 tree arglist;
2115 rtx target;
2116 enum machine_mode mode;
2117 {
2118 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2119 return 0;
2120 else
2121 {
2122 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2123 tree fn;
2124 const char *p1;
2125
2126 if (TREE_CODE (s2) != INTEGER_CST)
2127 return 0;
2128
2129 p1 = c_getstr (s1);
2130 if (p1 != NULL)
2131 {
2132 char c;
2133 const char *r;
2134
2135 if (target_char_cast (s2, &c))
2136 return 0;
2137
2138 r = strrchr (p1, c);
2139
2140 if (r == NULL)
2141 return const0_rtx;
2142
2143 /* Return an offset into the constant string argument. */
2144 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2145 s1, ssize_int (r - p1))),
2146 target, mode, EXPAND_NORMAL);
2147 }
2148
2149 if (! integer_zerop (s2))
2150 return 0;
2151
2152 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2153 if (!fn)
2154 return 0;
2155
2156 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2157 return expand_expr (build_function_call_expr (fn, arglist),
2158 target, mode, EXPAND_NORMAL);
2159 }
2160 }
2161
2162 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2163 caller should emit a normal call, otherwise try to get the result
2164 in TARGET, if convenient (and in mode MODE if that's convenient). */
2165
2166 static rtx
2167 expand_builtin_strpbrk (arglist, target, mode)
2168 tree arglist;
2169 rtx target;
2170 enum machine_mode mode;
2171 {
2172 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2173 return 0;
2174 else
2175 {
2176 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2177 tree fn;
2178 const char *p1, *p2;
2179
2180 p2 = c_getstr (s2);
2181 if (p2 == NULL)
2182 return 0;
2183
2184 p1 = c_getstr (s1);
2185 if (p1 != NULL)
2186 {
2187 const char *r = strpbrk (p1, p2);
2188
2189 if (r == NULL)
2190 return const0_rtx;
2191
2192 /* Return an offset into the constant string argument. */
2193 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2194 s1, ssize_int (r - p1))),
2195 target, mode, EXPAND_NORMAL);
2196 }
2197
2198 if (p2[0] == '\0')
2199 {
2200 /* strpbrk(x, "") == NULL.
2201 Evaluate and ignore the arguments in case they had
2202 side-effects. */
2203 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2204 return const0_rtx;
2205 }
2206
2207 if (p2[1] != '\0')
2208 return 0; /* Really call strpbrk. */
2209
2210 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2211 if (!fn)
2212 return 0;
2213
2214 /* New argument list transforming strpbrk(s1, s2) to
2215 strchr(s1, s2[0]). */
2216 arglist =
2217 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2218 arglist = tree_cons (NULL_TREE, s1, arglist);
2219 return expand_expr (build_function_call_expr (fn, arglist),
2220 target, mode, EXPAND_NORMAL);
2221 }
2222 }
2223
2224 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2225 bytes from constant string DATA + OFFSET and return it as target
2226 constant. */
2227
2228 static rtx
2229 builtin_memcpy_read_str (data, offset, mode)
2230 PTR data;
2231 HOST_WIDE_INT offset;
2232 enum machine_mode mode;
2233 {
2234 const char *str = (const char *) data;
2235
2236 if (offset < 0
2237 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2238 > strlen (str) + 1))
2239 abort (); /* Attempt to read past the end of constant string. */
2240
2241 return c_readstr (str + offset, mode);
2242 }
2243
2244 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2245 Return 0 if we failed, the caller should emit a normal call,
2246 otherwise try to get the result in TARGET, if convenient (and in
2247 mode MODE if that's convenient). If ENDP is 0 return the
2248 destination pointer, if ENDP is 1 return the end pointer ala
2249 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2250 stpcpy. */
2251 static rtx
2252 expand_builtin_memcpy (arglist, target, mode, endp)
2253 tree arglist;
2254 rtx target;
2255 enum machine_mode mode;
2256 int endp;
2257 {
2258 if (!validate_arglist (arglist,
2259 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2260 return 0;
2261 else
2262 {
2263 tree dest = TREE_VALUE (arglist);
2264 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2265 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2266 const char *src_str;
2267
2268 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2269 unsigned int dest_align
2270 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2271 rtx dest_mem, src_mem, dest_addr, len_rtx;
2272
2273 /* If DEST is not a pointer type, call the normal function. */
2274 if (dest_align == 0)
2275 return 0;
2276
2277 /* If the LEN parameter is zero, return DEST. */
2278 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2279 {
2280 /* Evaluate and ignore SRC in case it has side-effects. */
2281 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2282 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2283 }
2284
2285 /* If either SRC is not a pointer type, don't do this
2286 operation in-line. */
2287 if (src_align == 0)
2288 return 0;
2289
2290 dest_mem = get_memory_rtx (dest);
2291 set_mem_align (dest_mem, dest_align);
2292 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2293 src_str = c_getstr (src);
2294
2295 /* If SRC is a string constant and block move would be done
2296 by pieces, we can avoid loading the string from memory
2297 and only stored the computed constants. */
2298 if (src_str
2299 && GET_CODE (len_rtx) == CONST_INT
2300 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2301 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2302 (PTR) src_str, dest_align))
2303 {
2304 store_by_pieces (dest_mem, INTVAL (len_rtx),
2305 builtin_memcpy_read_str,
2306 (PTR) src_str, dest_align);
2307 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2308 #ifdef POINTERS_EXTEND_UNSIGNED
2309 if (GET_MODE (dest_mem) != ptr_mode)
2310 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2311 #endif
2312 if (endp)
2313 {
2314 rtx result = gen_rtx_PLUS (GET_MODE (dest_mem), dest_mem, len_rtx);
2315 if (endp == 2)
2316 result = simplify_gen_binary (MINUS, GET_MODE (result), result, const1_rtx);
2317 return result;
2318 }
2319 else
2320 return dest_mem;
2321 }
2322
2323 src_mem = get_memory_rtx (src);
2324 set_mem_align (src_mem, src_align);
2325
2326 /* Copy word part most expediently. */
2327 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2328 BLOCK_OP_NORMAL);
2329
2330 if (dest_addr == 0)
2331 {
2332 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2333 #ifdef POINTERS_EXTEND_UNSIGNED
2334 if (GET_MODE (dest_addr) != ptr_mode)
2335 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2336 #endif
2337 }
2338
2339 if (endp)
2340 {
2341 rtx result = gen_rtx_PLUS (GET_MODE (dest_addr), dest_addr, len_rtx);
2342 if (endp == 2)
2343 result = simplify_gen_binary (MINUS, GET_MODE (result), result, const1_rtx);
2344 return result;
2345 }
2346 else
2347 return dest_addr;
2348 }
2349 }
2350
2351 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2352 if we failed the caller should emit a normal call. */
2353
2354 static rtx
2355 expand_builtin_memmove (arglist, target, mode)
2356 tree arglist;
2357 rtx target;
2358 enum machine_mode mode;
2359 {
2360 if (!validate_arglist (arglist,
2361 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2362 return 0;
2363 else
2364 {
2365 tree dest = TREE_VALUE (arglist);
2366 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2367 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2368
2369 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2370 unsigned int dest_align
2371 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2372
2373 /* If DEST is not a pointer type, call the normal function. */
2374 if (dest_align == 0)
2375 return 0;
2376
2377 /* If the LEN parameter is zero, return DEST. */
2378 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2379 {
2380 /* Evaluate and ignore SRC in case it has side-effects. */
2381 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2382 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2383 }
2384
2385 /* If either SRC is not a pointer type, don't do this
2386 operation in-line. */
2387 if (src_align == 0)
2388 return 0;
2389
2390 /* If src is a string constant and strings are not writable,
2391 we can use normal memcpy. */
2392 if (!flag_writable_strings && c_getstr (src))
2393 return expand_builtin_memcpy (arglist, target, mode, 0);
2394
2395 /* Otherwise, call the normal function. */
2396 return 0;
2397 }
2398 }
2399
2400 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2401 if we failed the caller should emit a normal call. */
2402
2403 static rtx
2404 expand_builtin_bcopy (arglist)
2405 tree arglist;
2406 {
2407 tree src, dest, size, newarglist;
2408
2409 if (!validate_arglist (arglist,
2410 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2411 return NULL_RTX;
2412
2413 src = TREE_VALUE (arglist);
2414 dest = TREE_VALUE (TREE_CHAIN (arglist));
2415 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2416
2417 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2418 memmove(ptr y, ptr x, size_t z). This is done this way
2419 so that if it isn't expanded inline, we fallback to
2420 calling bcopy instead of memmove. */
2421
2422 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2423 newarglist = tree_cons (NULL_TREE, src, newarglist);
2424 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2425
2426 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2427 }
2428
2429 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2430 if we failed the caller should emit a normal call, otherwise try to get
2431 the result in TARGET, if convenient (and in mode MODE if that's
2432 convenient). */
2433
2434 static rtx
2435 expand_builtin_strcpy (exp, target, mode)
2436 tree exp;
2437 rtx target;
2438 enum machine_mode mode;
2439 {
2440 tree arglist = TREE_OPERAND (exp, 1);
2441 tree fn, len;
2442
2443 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2444 return 0;
2445
2446 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2447 if (!fn)
2448 return 0;
2449
2450 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2451 if (len == 0)
2452 return 0;
2453
2454 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2455 chainon (arglist, build_tree_list (NULL_TREE, len));
2456 return expand_expr (build_function_call_expr (fn, arglist),
2457 target, mode, EXPAND_NORMAL);
2458 }
2459
2460 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2461 Return 0 if we failed the caller should emit a normal call,
2462 otherwise try to get the result in TARGET, if convenient (and in
2463 mode MODE if that's convenient). */
2464
2465 static rtx
2466 expand_builtin_stpcpy (arglist, target, mode)
2467 tree arglist;
2468 rtx target;
2469 enum machine_mode mode;
2470 {
2471 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2472 return 0;
2473 else
2474 {
2475 tree newarglist;
2476 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2477 if (len == 0)
2478 return 0;
2479
2480 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2481 newarglist = copy_list (arglist);
2482 chainon (newarglist, build_tree_list (NULL_TREE, len));
2483 return expand_builtin_memcpy (newarglist, target, mode, /*endp=*/2);
2484 }
2485 }
2486
2487 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2488 bytes from constant string DATA + OFFSET and return it as target
2489 constant. */
2490
2491 static rtx
2492 builtin_strncpy_read_str (data, offset, mode)
2493 PTR data;
2494 HOST_WIDE_INT offset;
2495 enum machine_mode mode;
2496 {
2497 const char *str = (const char *) data;
2498
2499 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2500 return const0_rtx;
2501
2502 return c_readstr (str + offset, mode);
2503 }
2504
2505 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2506 if we failed the caller should emit a normal call. */
2507
2508 static rtx
2509 expand_builtin_strncpy (arglist, target, mode)
2510 tree arglist;
2511 rtx target;
2512 enum machine_mode mode;
2513 {
2514 if (!validate_arglist (arglist,
2515 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2516 return 0;
2517 else
2518 {
2519 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2520 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2521 tree fn;
2522
2523 /* We must be passed a constant len parameter. */
2524 if (TREE_CODE (len) != INTEGER_CST)
2525 return 0;
2526
2527 /* If the len parameter is zero, return the dst parameter. */
2528 if (integer_zerop (len))
2529 {
2530 /* Evaluate and ignore the src argument in case it has
2531 side-effects. */
2532 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2533 VOIDmode, EXPAND_NORMAL);
2534 /* Return the dst parameter. */
2535 return expand_expr (TREE_VALUE (arglist), target, mode,
2536 EXPAND_NORMAL);
2537 }
2538
2539 /* Now, we must be passed a constant src ptr parameter. */
2540 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2541 return 0;
2542
2543 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2544
2545 /* We're required to pad with trailing zeros if the requested
2546 len is greater than strlen(s2)+1. In that case try to
2547 use store_by_pieces, if it fails, punt. */
2548 if (tree_int_cst_lt (slen, len))
2549 {
2550 tree dest = TREE_VALUE (arglist);
2551 unsigned int dest_align
2552 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2553 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2554 rtx dest_mem;
2555
2556 if (!p || dest_align == 0 || !host_integerp (len, 1)
2557 || !can_store_by_pieces (tree_low_cst (len, 1),
2558 builtin_strncpy_read_str,
2559 (PTR) p, dest_align))
2560 return 0;
2561
2562 dest_mem = get_memory_rtx (dest);
2563 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2564 builtin_strncpy_read_str,
2565 (PTR) p, dest_align);
2566 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2567 #ifdef POINTERS_EXTEND_UNSIGNED
2568 if (GET_MODE (dest_mem) != ptr_mode)
2569 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2570 #endif
2571 return dest_mem;
2572 }
2573
2574 /* OK transform into builtin memcpy. */
2575 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2576 if (!fn)
2577 return 0;
2578 return expand_expr (build_function_call_expr (fn, arglist),
2579 target, mode, EXPAND_NORMAL);
2580 }
2581 }
2582
2583 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2584 bytes from constant string DATA + OFFSET and return it as target
2585 constant. */
2586
2587 static rtx
2588 builtin_memset_read_str (data, offset, mode)
2589 PTR data;
2590 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2591 enum machine_mode mode;
2592 {
2593 const char *c = (const char *) data;
2594 char *p = alloca (GET_MODE_SIZE (mode));
2595
2596 memset (p, *c, GET_MODE_SIZE (mode));
2597
2598 return c_readstr (p, mode);
2599 }
2600
2601 /* Callback routine for store_by_pieces. Return the RTL of a register
2602 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2603 char value given in the RTL register data. For example, if mode is
2604 4 bytes wide, return the RTL for 0x01010101*data. */
2605
2606 static rtx
2607 builtin_memset_gen_str (data, offset, mode)
2608 PTR data;
2609 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2610 enum machine_mode mode;
2611 {
2612 rtx target, coeff;
2613 size_t size;
2614 char *p;
2615
2616 size = GET_MODE_SIZE (mode);
2617 if (size == 1)
2618 return (rtx) data;
2619
2620 p = alloca (size);
2621 memset (p, 1, size);
2622 coeff = c_readstr (p, mode);
2623
2624 target = convert_to_mode (mode, (rtx) data, 1);
2625 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2626 return force_reg (mode, target);
2627 }
2628
2629 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2630 if we failed the caller should emit a normal call, otherwise try to get
2631 the result in TARGET, if convenient (and in mode MODE if that's
2632 convenient). */
2633
2634 static rtx
2635 expand_builtin_memset (exp, target, mode)
2636 tree exp;
2637 rtx target;
2638 enum machine_mode mode;
2639 {
2640 tree arglist = TREE_OPERAND (exp, 1);
2641
2642 if (!validate_arglist (arglist,
2643 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2644 return 0;
2645 else
2646 {
2647 tree dest = TREE_VALUE (arglist);
2648 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2649 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2650 char c;
2651
2652 unsigned int dest_align
2653 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2654 rtx dest_mem, dest_addr, len_rtx;
2655
2656 /* If DEST is not a pointer type, don't do this
2657 operation in-line. */
2658 if (dest_align == 0)
2659 return 0;
2660
2661 /* If the LEN parameter is zero, return DEST. */
2662 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2663 {
2664 /* Evaluate and ignore VAL in case it has side-effects. */
2665 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2666 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2667 }
2668
2669 if (TREE_CODE (val) != INTEGER_CST)
2670 {
2671 rtx val_rtx;
2672
2673 if (!host_integerp (len, 1))
2674 return 0;
2675
2676 if (optimize_size && tree_low_cst (len, 1) > 1)
2677 return 0;
2678
2679 /* Assume that we can memset by pieces if we can store the
2680 * the coefficients by pieces (in the required modes).
2681 * We can't pass builtin_memset_gen_str as that emits RTL. */
2682 c = 1;
2683 if (!can_store_by_pieces (tree_low_cst (len, 1),
2684 builtin_memset_read_str,
2685 (PTR) &c, dest_align))
2686 return 0;
2687
2688 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2689 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2690 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2691 val_rtx);
2692 dest_mem = get_memory_rtx (dest);
2693 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2694 builtin_memset_gen_str,
2695 (PTR) val_rtx, dest_align);
2696 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2697 #ifdef POINTERS_EXTEND_UNSIGNED
2698 if (GET_MODE (dest_mem) != ptr_mode)
2699 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2700 #endif
2701 return dest_mem;
2702 }
2703
2704 if (target_char_cast (val, &c))
2705 return 0;
2706
2707 if (c)
2708 {
2709 if (!host_integerp (len, 1))
2710 return 0;
2711 if (!can_store_by_pieces (tree_low_cst (len, 1),
2712 builtin_memset_read_str, (PTR) &c,
2713 dest_align))
2714 return 0;
2715
2716 dest_mem = get_memory_rtx (dest);
2717 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2718 builtin_memset_read_str,
2719 (PTR) &c, dest_align);
2720 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2721 #ifdef POINTERS_EXTEND_UNSIGNED
2722 if (GET_MODE (dest_mem) != ptr_mode)
2723 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2724 #endif
2725 return dest_mem;
2726 }
2727
2728 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2729
2730 dest_mem = get_memory_rtx (dest);
2731 set_mem_align (dest_mem, dest_align);
2732 dest_addr = clear_storage (dest_mem, len_rtx);
2733
2734 if (dest_addr == 0)
2735 {
2736 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2737 #ifdef POINTERS_EXTEND_UNSIGNED
2738 if (GET_MODE (dest_addr) != ptr_mode)
2739 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2740 #endif
2741 }
2742
2743 return dest_addr;
2744 }
2745 }
2746
2747 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2748 if we failed the caller should emit a normal call. */
2749
2750 static rtx
2751 expand_builtin_bzero (exp)
2752 tree exp;
2753 {
2754 tree arglist = TREE_OPERAND (exp, 1);
2755 tree dest, size, newarglist;
2756 rtx result;
2757
2758 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2759 return NULL_RTX;
2760
2761 dest = TREE_VALUE (arglist);
2762 size = TREE_VALUE (TREE_CHAIN (arglist));
2763
2764 /* New argument list transforming bzero(ptr x, int y) to
2765 memset(ptr x, int 0, size_t y). This is done this way
2766 so that if it isn't expanded inline, we fallback to
2767 calling bzero instead of memset. */
2768
2769 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2770 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2771 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2772
2773 TREE_OPERAND (exp, 1) = newarglist;
2774 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2775
2776 /* Always restore the original arguments. */
2777 TREE_OPERAND (exp, 1) = arglist;
2778
2779 return result;
2780 }
2781
2782 /* Expand expression EXP, which is a call to the memcmp built-in function.
2783 ARGLIST is the argument list for this call. Return 0 if we failed and the
2784 caller should emit a normal call, otherwise try to get the result in
2785 TARGET, if convenient (and in mode MODE, if that's convenient). */
2786
2787 static rtx
2788 expand_builtin_memcmp (exp, arglist, target, mode)
2789 tree exp ATTRIBUTE_UNUSED;
2790 tree arglist;
2791 rtx target;
2792 enum machine_mode mode;
2793 {
2794 tree arg1, arg2, len;
2795 const char *p1, *p2;
2796
2797 if (!validate_arglist (arglist,
2798 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2799 return 0;
2800
2801 arg1 = TREE_VALUE (arglist);
2802 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2803 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2804
2805 /* If the len parameter is zero, return zero. */
2806 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2807 {
2808 /* Evaluate and ignore arg1 and arg2 in case they have
2809 side-effects. */
2810 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2811 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2812 return const0_rtx;
2813 }
2814
2815 p1 = c_getstr (arg1);
2816 p2 = c_getstr (arg2);
2817
2818 /* If all arguments are constant, and the value of len is not greater
2819 than the lengths of arg1 and arg2, evaluate at compile-time. */
2820 if (host_integerp (len, 1) && p1 && p2
2821 && compare_tree_int (len, strlen (p1) + 1) <= 0
2822 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2823 {
2824 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2825
2826 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2827 }
2828
2829 /* If len parameter is one, return an expression corresponding to
2830 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2831 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2832 {
2833 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2834 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2835 tree ind1 =
2836 fold (build1 (CONVERT_EXPR, integer_type_node,
2837 build1 (INDIRECT_REF, cst_uchar_node,
2838 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2839 tree ind2 =
2840 fold (build1 (CONVERT_EXPR, integer_type_node,
2841 build1 (INDIRECT_REF, cst_uchar_node,
2842 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2843 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2844 return expand_expr (result, target, mode, EXPAND_NORMAL);
2845 }
2846
2847 #ifdef HAVE_cmpstrsi
2848 {
2849 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2850 rtx result;
2851 rtx insn;
2852
2853 int arg1_align
2854 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2855 int arg2_align
2856 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2857 enum machine_mode insn_mode
2858 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2859
2860 /* If we don't have POINTER_TYPE, call the function. */
2861 if (arg1_align == 0 || arg2_align == 0)
2862 return 0;
2863
2864 /* Make a place to write the result of the instruction. */
2865 result = target;
2866 if (! (result != 0
2867 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2868 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2869 result = gen_reg_rtx (insn_mode);
2870
2871 arg1_rtx = get_memory_rtx (arg1);
2872 arg2_rtx = get_memory_rtx (arg2);
2873 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2874 if (!HAVE_cmpstrsi)
2875 insn = NULL_RTX;
2876 else
2877 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2878 GEN_INT (MIN (arg1_align, arg2_align)));
2879
2880 if (insn)
2881 emit_insn (insn);
2882 else
2883 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2884 TYPE_MODE (integer_type_node), 3,
2885 XEXP (arg1_rtx, 0), Pmode,
2886 XEXP (arg2_rtx, 0), Pmode,
2887 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2888 TREE_UNSIGNED (sizetype)),
2889 TYPE_MODE (sizetype));
2890
2891 /* Return the value in the proper mode for this function. */
2892 mode = TYPE_MODE (TREE_TYPE (exp));
2893 if (GET_MODE (result) == mode)
2894 return result;
2895 else if (target != 0)
2896 {
2897 convert_move (target, result, 0);
2898 return target;
2899 }
2900 else
2901 return convert_to_mode (mode, result, 0);
2902 }
2903 #endif
2904
2905 return 0;
2906 }
2907
2908 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2909 if we failed the caller should emit a normal call, otherwise try to get
2910 the result in TARGET, if convenient. */
2911
2912 static rtx
2913 expand_builtin_strcmp (exp, target, mode)
2914 tree exp;
2915 rtx target;
2916 enum machine_mode mode;
2917 {
2918 tree arglist = TREE_OPERAND (exp, 1);
2919 tree arg1, arg2;
2920 const char *p1, *p2;
2921
2922 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2923 return 0;
2924
2925 arg1 = TREE_VALUE (arglist);
2926 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2927
2928 p1 = c_getstr (arg1);
2929 p2 = c_getstr (arg2);
2930
2931 if (p1 && p2)
2932 {
2933 const int i = strcmp (p1, p2);
2934 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2935 }
2936
2937 /* If either arg is "", return an expression corresponding to
2938 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2939 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2940 {
2941 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2942 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2943 tree ind1 =
2944 fold (build1 (CONVERT_EXPR, integer_type_node,
2945 build1 (INDIRECT_REF, cst_uchar_node,
2946 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2947 tree ind2 =
2948 fold (build1 (CONVERT_EXPR, integer_type_node,
2949 build1 (INDIRECT_REF, cst_uchar_node,
2950 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2951 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2952 return expand_expr (result, target, mode, EXPAND_NORMAL);
2953 }
2954
2955 #ifdef HAVE_cmpstrsi
2956 if (HAVE_cmpstrsi)
2957 {
2958 tree len, len1, len2;
2959 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2960 rtx result, insn;
2961
2962 int arg1_align
2963 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2964 int arg2_align
2965 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2966 enum machine_mode insn_mode
2967 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2968
2969 len1 = c_strlen (arg1);
2970 len2 = c_strlen (arg2);
2971
2972 if (len1)
2973 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
2974 if (len2)
2975 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2976
2977 /* If we don't have a constant length for the first, use the length
2978 of the second, if we know it. We don't require a constant for
2979 this case; some cost analysis could be done if both are available
2980 but neither is constant. For now, assume they're equally cheap,
2981 unless one has side effects. If both strings have constant lengths,
2982 use the smaller. */
2983
2984 if (!len1)
2985 len = len2;
2986 else if (!len2)
2987 len = len1;
2988 else if (TREE_SIDE_EFFECTS (len1))
2989 len = len2;
2990 else if (TREE_SIDE_EFFECTS (len2))
2991 len = len1;
2992 else if (TREE_CODE (len1) != INTEGER_CST)
2993 len = len2;
2994 else if (TREE_CODE (len2) != INTEGER_CST)
2995 len = len1;
2996 else if (tree_int_cst_lt (len1, len2))
2997 len = len1;
2998 else
2999 len = len2;
3000
3001 /* If both arguments have side effects, we cannot optimize. */
3002 if (!len || TREE_SIDE_EFFECTS (len))
3003 return 0;
3004
3005 /* If we don't have POINTER_TYPE, call the function. */
3006 if (arg1_align == 0 || arg2_align == 0)
3007 return 0;
3008
3009 /* Make a place to write the result of the instruction. */
3010 result = target;
3011 if (! (result != 0
3012 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3013 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3014 result = gen_reg_rtx (insn_mode);
3015
3016 arg1_rtx = get_memory_rtx (arg1);
3017 arg2_rtx = get_memory_rtx (arg2);
3018 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3019 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3020 GEN_INT (MIN (arg1_align, arg2_align)));
3021 if (!insn)
3022 return 0;
3023
3024 emit_insn (insn);
3025
3026 /* Return the value in the proper mode for this function. */
3027 mode = TYPE_MODE (TREE_TYPE (exp));
3028 if (GET_MODE (result) == mode)
3029 return result;
3030 if (target == 0)
3031 return convert_to_mode (mode, result, 0);
3032 convert_move (target, result, 0);
3033 return target;
3034 }
3035 #endif
3036 return 0;
3037 }
3038
3039 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3040 if we failed the caller should emit a normal call, otherwise try to get
3041 the result in TARGET, if convenient. */
3042
3043 static rtx
3044 expand_builtin_strncmp (exp, target, mode)
3045 tree exp;
3046 rtx target;
3047 enum machine_mode mode;
3048 {
3049 tree arglist = TREE_OPERAND (exp, 1);
3050 tree arg1, arg2, arg3;
3051 const char *p1, *p2;
3052
3053 if (!validate_arglist (arglist,
3054 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3055 return 0;
3056
3057 arg1 = TREE_VALUE (arglist);
3058 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3059 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3060
3061 /* If the len parameter is zero, return zero. */
3062 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3063 {
3064 /* Evaluate and ignore arg1 and arg2 in case they have
3065 side-effects. */
3066 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3067 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3068 return const0_rtx;
3069 }
3070
3071 p1 = c_getstr (arg1);
3072 p2 = c_getstr (arg2);
3073
3074 /* If all arguments are constant, evaluate at compile-time. */
3075 if (host_integerp (arg3, 1) && p1 && p2)
3076 {
3077 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3078 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3079 }
3080
3081 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3082 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3083 if (host_integerp (arg3, 1)
3084 && (tree_low_cst (arg3, 1) == 1
3085 || (tree_low_cst (arg3, 1) > 1
3086 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3087 {
3088 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3089 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3090 tree ind1 =
3091 fold (build1 (CONVERT_EXPR, integer_type_node,
3092 build1 (INDIRECT_REF, cst_uchar_node,
3093 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3094 tree ind2 =
3095 fold (build1 (CONVERT_EXPR, integer_type_node,
3096 build1 (INDIRECT_REF, cst_uchar_node,
3097 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3098 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3099 return expand_expr (result, target, mode, EXPAND_NORMAL);
3100 }
3101
3102 /* If c_strlen can determine an expression for one of the string
3103 lengths, and it doesn't have side effects, then emit cmpstrsi
3104 using length MIN(strlen(string)+1, arg3). */
3105 #ifdef HAVE_cmpstrsi
3106 if (HAVE_cmpstrsi)
3107 {
3108 tree len, len1, len2;
3109 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3110 rtx result, insn;
3111
3112 int arg1_align
3113 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3114 int arg2_align
3115 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3116 enum machine_mode insn_mode
3117 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3118
3119 len1 = c_strlen (arg1);
3120 len2 = c_strlen (arg2);
3121
3122 if (len1)
3123 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3124 if (len2)
3125 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3126
3127 /* If we don't have a constant length for the first, use the length
3128 of the second, if we know it. We don't require a constant for
3129 this case; some cost analysis could be done if both are available
3130 but neither is constant. For now, assume they're equally cheap,
3131 unless one has side effects. If both strings have constant lengths,
3132 use the smaller. */
3133
3134 if (!len1)
3135 len = len2;
3136 else if (!len2)
3137 len = len1;
3138 else if (TREE_SIDE_EFFECTS (len1))
3139 len = len2;
3140 else if (TREE_SIDE_EFFECTS (len2))
3141 len = len1;
3142 else if (TREE_CODE (len1) != INTEGER_CST)
3143 len = len2;
3144 else if (TREE_CODE (len2) != INTEGER_CST)
3145 len = len1;
3146 else if (tree_int_cst_lt (len1, len2))
3147 len = len1;
3148 else
3149 len = len2;
3150
3151 /* If both arguments have side effects, we cannot optimize. */
3152 if (!len || TREE_SIDE_EFFECTS (len))
3153 return 0;
3154
3155 /* The actual new length parameter is MIN(len,arg3). */
3156 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3157
3158 /* If we don't have POINTER_TYPE, call the function. */
3159 if (arg1_align == 0 || arg2_align == 0)
3160 return 0;
3161
3162 /* Make a place to write the result of the instruction. */
3163 result = target;
3164 if (! (result != 0
3165 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3166 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3167 result = gen_reg_rtx (insn_mode);
3168
3169 arg1_rtx = get_memory_rtx (arg1);
3170 arg2_rtx = get_memory_rtx (arg2);
3171 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3172 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3173 GEN_INT (MIN (arg1_align, arg2_align)));
3174 if (!insn)
3175 return 0;
3176
3177 emit_insn (insn);
3178
3179 /* Return the value in the proper mode for this function. */
3180 mode = TYPE_MODE (TREE_TYPE (exp));
3181 if (GET_MODE (result) == mode)
3182 return result;
3183 if (target == 0)
3184 return convert_to_mode (mode, result, 0);
3185 convert_move (target, result, 0);
3186 return target;
3187 }
3188 #endif
3189 return 0;
3190 }
3191
3192 /* Expand expression EXP, which is a call to the strcat builtin.
3193 Return 0 if we failed the caller should emit a normal call,
3194 otherwise try to get the result in TARGET, if convenient. */
3195
3196 static rtx
3197 expand_builtin_strcat (arglist, target, mode)
3198 tree arglist;
3199 rtx target;
3200 enum machine_mode mode;
3201 {
3202 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3203 return 0;
3204 else
3205 {
3206 tree dst = TREE_VALUE (arglist),
3207 src = TREE_VALUE (TREE_CHAIN (arglist));
3208 const char *p = c_getstr (src);
3209
3210 /* If the string length is zero, return the dst parameter. */
3211 if (p && *p == '\0')
3212 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3213
3214 return 0;
3215 }
3216 }
3217
3218 /* Expand expression EXP, which is a call to the strncat builtin.
3219 Return 0 if we failed the caller should emit a normal call,
3220 otherwise try to get the result in TARGET, if convenient. */
3221
3222 static rtx
3223 expand_builtin_strncat (arglist, target, mode)
3224 tree arglist;
3225 rtx target;
3226 enum machine_mode mode;
3227 {
3228 if (!validate_arglist (arglist,
3229 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3230 return 0;
3231 else
3232 {
3233 tree dst = TREE_VALUE (arglist),
3234 src = TREE_VALUE (TREE_CHAIN (arglist)),
3235 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3236 const char *p = c_getstr (src);
3237
3238 /* If the requested length is zero, or the src parameter string
3239 length is zero, return the dst parameter. */
3240 if (integer_zerop (len) || (p && *p == '\0'))
3241 {
3242 /* Evaluate and ignore the src and len parameters in case
3243 they have side-effects. */
3244 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3245 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3246 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3247 }
3248
3249 /* If the requested len is greater than or equal to the string
3250 length, call strcat. */
3251 if (TREE_CODE (len) == INTEGER_CST && p
3252 && compare_tree_int (len, strlen (p)) >= 0)
3253 {
3254 tree newarglist
3255 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3256 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3257
3258 /* If the replacement _DECL isn't initialized, don't do the
3259 transformation. */
3260 if (!fn)
3261 return 0;
3262
3263 return expand_expr (build_function_call_expr (fn, newarglist),
3264 target, mode, EXPAND_NORMAL);
3265 }
3266 return 0;
3267 }
3268 }
3269
3270 /* Expand expression EXP, which is a call to the strspn builtin.
3271 Return 0 if we failed the caller should emit a normal call,
3272 otherwise try to get the result in TARGET, if convenient. */
3273
3274 static rtx
3275 expand_builtin_strspn (arglist, target, mode)
3276 tree arglist;
3277 rtx target;
3278 enum machine_mode mode;
3279 {
3280 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3281 return 0;
3282 else
3283 {
3284 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3285 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3286
3287 /* If both arguments are constants, evaluate at compile-time. */
3288 if (p1 && p2)
3289 {
3290 const size_t r = strspn (p1, p2);
3291 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3292 }
3293
3294 /* If either argument is "", return 0. */
3295 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3296 {
3297 /* Evaluate and ignore both arguments in case either one has
3298 side-effects. */
3299 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3300 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3301 return const0_rtx;
3302 }
3303 return 0;
3304 }
3305 }
3306
3307 /* Expand expression EXP, which is a call to the strcspn builtin.
3308 Return 0 if we failed the caller should emit a normal call,
3309 otherwise try to get the result in TARGET, if convenient. */
3310
3311 static rtx
3312 expand_builtin_strcspn (arglist, target, mode)
3313 tree arglist;
3314 rtx target;
3315 enum machine_mode mode;
3316 {
3317 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3318 return 0;
3319 else
3320 {
3321 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3322 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3323
3324 /* If both arguments are constants, evaluate at compile-time. */
3325 if (p1 && p2)
3326 {
3327 const size_t r = strcspn (p1, p2);
3328 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3329 }
3330
3331 /* If the first argument is "", return 0. */
3332 if (p1 && *p1 == '\0')
3333 {
3334 /* Evaluate and ignore argument s2 in case it has
3335 side-effects. */
3336 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3337 return const0_rtx;
3338 }
3339
3340 /* If the second argument is "", return __builtin_strlen(s1). */
3341 if (p2 && *p2 == '\0')
3342 {
3343 tree newarglist = build_tree_list (NULL_TREE, s1),
3344 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3345
3346 /* If the replacement _DECL isn't initialized, don't do the
3347 transformation. */
3348 if (!fn)
3349 return 0;
3350
3351 return expand_expr (build_function_call_expr (fn, newarglist),
3352 target, mode, EXPAND_NORMAL);
3353 }
3354 return 0;
3355 }
3356 }
3357
3358 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3359 if that's convenient. */
3360
3361 rtx
3362 expand_builtin_saveregs ()
3363 {
3364 rtx val, seq;
3365
3366 /* Don't do __builtin_saveregs more than once in a function.
3367 Save the result of the first call and reuse it. */
3368 if (saveregs_value != 0)
3369 return saveregs_value;
3370
3371 /* When this function is called, it means that registers must be
3372 saved on entry to this function. So we migrate the call to the
3373 first insn of this function. */
3374
3375 start_sequence ();
3376
3377 #ifdef EXPAND_BUILTIN_SAVEREGS
3378 /* Do whatever the machine needs done in this case. */
3379 val = EXPAND_BUILTIN_SAVEREGS ();
3380 #else
3381 /* ??? We used to try and build up a call to the out of line function,
3382 guessing about what registers needed saving etc. This became much
3383 harder with __builtin_va_start, since we don't have a tree for a
3384 call to __builtin_saveregs to fall back on. There was exactly one
3385 port (i860) that used this code, and I'm unconvinced it could actually
3386 handle the general case. So we no longer try to handle anything
3387 weird and make the backend absorb the evil. */
3388
3389 error ("__builtin_saveregs not supported by this target");
3390 val = const0_rtx;
3391 #endif
3392
3393 seq = get_insns ();
3394 end_sequence ();
3395
3396 saveregs_value = val;
3397
3398 /* Put the insns after the NOTE that starts the function. If this
3399 is inside a start_sequence, make the outer-level insn chain current, so
3400 the code is placed at the start of the function. */
3401 push_topmost_sequence ();
3402 emit_insn_after (seq, get_insns ());
3403 pop_topmost_sequence ();
3404
3405 return val;
3406 }
3407
3408 /* __builtin_args_info (N) returns word N of the arg space info
3409 for the current function. The number and meanings of words
3410 is controlled by the definition of CUMULATIVE_ARGS. */
3411
3412 static rtx
3413 expand_builtin_args_info (exp)
3414 tree exp;
3415 {
3416 tree arglist = TREE_OPERAND (exp, 1);
3417 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3418 int *word_ptr = (int *) &current_function_args_info;
3419
3420 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3421 abort ();
3422
3423 if (arglist != 0)
3424 {
3425 if (!host_integerp (TREE_VALUE (arglist), 0))
3426 error ("argument of `__builtin_args_info' must be constant");
3427 else
3428 {
3429 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3430
3431 if (wordnum < 0 || wordnum >= nwords)
3432 error ("argument of `__builtin_args_info' out of range");
3433 else
3434 return GEN_INT (word_ptr[wordnum]);
3435 }
3436 }
3437 else
3438 error ("missing argument in `__builtin_args_info'");
3439
3440 return const0_rtx;
3441 }
3442
3443 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3444
3445 static rtx
3446 expand_builtin_next_arg (arglist)
3447 tree arglist;
3448 {
3449 tree fntype = TREE_TYPE (current_function_decl);
3450
3451 if (TYPE_ARG_TYPES (fntype) == 0
3452 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3453 == void_type_node))
3454 {
3455 error ("`va_start' used in function with fixed args");
3456 return const0_rtx;
3457 }
3458
3459 if (arglist)
3460 {
3461 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3462 tree arg = TREE_VALUE (arglist);
3463
3464 /* Strip off all nops for the sake of the comparison. This
3465 is not quite the same as STRIP_NOPS. It does more.
3466 We must also strip off INDIRECT_EXPR for C++ reference
3467 parameters. */
3468 while (TREE_CODE (arg) == NOP_EXPR
3469 || TREE_CODE (arg) == CONVERT_EXPR
3470 || TREE_CODE (arg) == NON_LVALUE_EXPR
3471 || TREE_CODE (arg) == INDIRECT_REF)
3472 arg = TREE_OPERAND (arg, 0);
3473 if (arg != last_parm)
3474 warning ("second parameter of `va_start' not last named argument");
3475 }
3476 else
3477 /* Evidently an out of date version of <stdarg.h>; can't validate
3478 va_start's second argument, but can still work as intended. */
3479 warning ("`__builtin_next_arg' called without an argument");
3480
3481 return expand_binop (Pmode, add_optab,
3482 current_function_internal_arg_pointer,
3483 current_function_arg_offset_rtx,
3484 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3485 }
3486
3487 /* Make it easier for the backends by protecting the valist argument
3488 from multiple evaluations. */
3489
3490 static tree
3491 stabilize_va_list (valist, needs_lvalue)
3492 tree valist;
3493 int needs_lvalue;
3494 {
3495 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3496 {
3497 if (TREE_SIDE_EFFECTS (valist))
3498 valist = save_expr (valist);
3499
3500 /* For this case, the backends will be expecting a pointer to
3501 TREE_TYPE (va_list_type_node), but it's possible we've
3502 actually been given an array (an actual va_list_type_node).
3503 So fix it. */
3504 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3505 {
3506 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3507 tree p2 = build_pointer_type (va_list_type_node);
3508
3509 valist = build1 (ADDR_EXPR, p2, valist);
3510 valist = fold (build1 (NOP_EXPR, p1, valist));
3511 }
3512 }
3513 else
3514 {
3515 tree pt;
3516
3517 if (! needs_lvalue)
3518 {
3519 if (! TREE_SIDE_EFFECTS (valist))
3520 return valist;
3521
3522 pt = build_pointer_type (va_list_type_node);
3523 valist = fold (build1 (ADDR_EXPR, pt, valist));
3524 TREE_SIDE_EFFECTS (valist) = 1;
3525 }
3526
3527 if (TREE_SIDE_EFFECTS (valist))
3528 valist = save_expr (valist);
3529 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3530 valist));
3531 }
3532
3533 return valist;
3534 }
3535
3536 /* The "standard" implementation of va_start: just assign `nextarg' to
3537 the variable. */
3538
3539 void
3540 std_expand_builtin_va_start (valist, nextarg)
3541 tree valist;
3542 rtx nextarg;
3543 {
3544 tree t;
3545
3546 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3547 make_tree (ptr_type_node, nextarg));
3548 TREE_SIDE_EFFECTS (t) = 1;
3549
3550 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3551 }
3552
3553 /* Expand ARGLIST, from a call to __builtin_va_start. */
3554
3555 static rtx
3556 expand_builtin_va_start (arglist)
3557 tree arglist;
3558 {
3559 rtx nextarg;
3560 tree chain, valist;
3561
3562 chain = TREE_CHAIN (arglist);
3563
3564 if (TREE_CHAIN (chain))
3565 error ("too many arguments to function `va_start'");
3566
3567 nextarg = expand_builtin_next_arg (chain);
3568 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3569
3570 #ifdef EXPAND_BUILTIN_VA_START
3571 EXPAND_BUILTIN_VA_START (valist, nextarg);
3572 #else
3573 std_expand_builtin_va_start (valist, nextarg);
3574 #endif
3575
3576 return const0_rtx;
3577 }
3578
3579 /* The "standard" implementation of va_arg: read the value from the
3580 current (padded) address and increment by the (padded) size. */
3581
3582 rtx
3583 std_expand_builtin_va_arg (valist, type)
3584 tree valist, type;
3585 {
3586 tree addr_tree, t, type_size = NULL;
3587 tree align, alignm1;
3588 tree rounded_size;
3589 rtx addr;
3590
3591 /* Compute the rounded size of the type. */
3592 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3593 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3594 if (type == error_mark_node
3595 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3596 || TREE_OVERFLOW (type_size))
3597 rounded_size = size_zero_node;
3598 else
3599 rounded_size = fold (build (MULT_EXPR, sizetype,
3600 fold (build (TRUNC_DIV_EXPR, sizetype,
3601 fold (build (PLUS_EXPR, sizetype,
3602 type_size, alignm1)),
3603 align)),
3604 align));
3605
3606 /* Get AP. */
3607 addr_tree = valist;
3608 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3609 {
3610 /* Small args are padded downward. */
3611 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3612 fold (build (COND_EXPR, sizetype,
3613 fold (build (GT_EXPR, sizetype,
3614 rounded_size,
3615 align)),
3616 size_zero_node,
3617 fold (build (MINUS_EXPR, sizetype,
3618 rounded_size,
3619 type_size))))));
3620 }
3621
3622 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3623 addr = copy_to_reg (addr);
3624
3625 /* Compute new value for AP. */
3626 if (! integer_zerop (rounded_size))
3627 {
3628 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3629 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3630 rounded_size));
3631 TREE_SIDE_EFFECTS (t) = 1;
3632 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3633 }
3634
3635 return addr;
3636 }
3637
3638 /* Expand __builtin_va_arg, which is not really a builtin function, but
3639 a very special sort of operator. */
3640
3641 rtx
3642 expand_builtin_va_arg (valist, type)
3643 tree valist, type;
3644 {
3645 rtx addr, result;
3646 tree promoted_type, want_va_type, have_va_type;
3647
3648 /* Verify that valist is of the proper type. */
3649
3650 want_va_type = va_list_type_node;
3651 have_va_type = TREE_TYPE (valist);
3652 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3653 {
3654 /* If va_list is an array type, the argument may have decayed
3655 to a pointer type, e.g. by being passed to another function.
3656 In that case, unwrap both types so that we can compare the
3657 underlying records. */
3658 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3659 || TREE_CODE (have_va_type) == POINTER_TYPE)
3660 {
3661 want_va_type = TREE_TYPE (want_va_type);
3662 have_va_type = TREE_TYPE (have_va_type);
3663 }
3664 }
3665 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3666 {
3667 error ("first argument to `va_arg' not of type `va_list'");
3668 addr = const0_rtx;
3669 }
3670
3671 /* Generate a diagnostic for requesting data of a type that cannot
3672 be passed through `...' due to type promotion at the call site. */
3673 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3674 != type)
3675 {
3676 const char *name = "<anonymous type>", *pname = 0;
3677 static bool gave_help;
3678
3679 if (TYPE_NAME (type))
3680 {
3681 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3682 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3683 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3684 && DECL_NAME (TYPE_NAME (type)))
3685 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3686 }
3687 if (TYPE_NAME (promoted_type))
3688 {
3689 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3690 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3691 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3692 && DECL_NAME (TYPE_NAME (promoted_type)))
3693 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3694 }
3695
3696 /* Unfortunately, this is merely undefined, rather than a constraint
3697 violation, so we cannot make this an error. If this call is never
3698 executed, the program is still strictly conforming. */
3699 warning ("`%s' is promoted to `%s' when passed through `...'",
3700 name, pname);
3701 if (! gave_help)
3702 {
3703 gave_help = true;
3704 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3705 pname, name);
3706 }
3707
3708 /* We can, however, treat "undefined" any way we please.
3709 Call abort to encourage the user to fix the program. */
3710 expand_builtin_trap ();
3711
3712 /* This is dead code, but go ahead and finish so that the
3713 mode of the result comes out right. */
3714 addr = const0_rtx;
3715 }
3716 else
3717 {
3718 /* Make it easier for the backends by protecting the valist argument
3719 from multiple evaluations. */
3720 valist = stabilize_va_list (valist, 0);
3721
3722 #ifdef EXPAND_BUILTIN_VA_ARG
3723 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3724 #else
3725 addr = std_expand_builtin_va_arg (valist, type);
3726 #endif
3727 }
3728
3729 #ifdef POINTERS_EXTEND_UNSIGNED
3730 if (GET_MODE (addr) != Pmode)
3731 addr = convert_memory_address (Pmode, addr);
3732 #endif
3733
3734 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3735 set_mem_alias_set (result, get_varargs_alias_set ());
3736
3737 return result;
3738 }
3739
3740 /* Expand ARGLIST, from a call to __builtin_va_end. */
3741
3742 static rtx
3743 expand_builtin_va_end (arglist)
3744 tree arglist;
3745 {
3746 tree valist = TREE_VALUE (arglist);
3747
3748 #ifdef EXPAND_BUILTIN_VA_END
3749 valist = stabilize_va_list (valist, 0);
3750 EXPAND_BUILTIN_VA_END (arglist);
3751 #else
3752 /* Evaluate for side effects, if needed. I hate macros that don't
3753 do that. */
3754 if (TREE_SIDE_EFFECTS (valist))
3755 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3756 #endif
3757
3758 return const0_rtx;
3759 }
3760
3761 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3762 builtin rather than just as an assignment in stdarg.h because of the
3763 nastiness of array-type va_list types. */
3764
3765 static rtx
3766 expand_builtin_va_copy (arglist)
3767 tree arglist;
3768 {
3769 tree dst, src, t;
3770
3771 dst = TREE_VALUE (arglist);
3772 src = TREE_VALUE (TREE_CHAIN (arglist));
3773
3774 dst = stabilize_va_list (dst, 1);
3775 src = stabilize_va_list (src, 0);
3776
3777 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3778 {
3779 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3780 TREE_SIDE_EFFECTS (t) = 1;
3781 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3782 }
3783 else
3784 {
3785 rtx dstb, srcb, size;
3786
3787 /* Evaluate to pointers. */
3788 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3789 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3790 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3791 VOIDmode, EXPAND_NORMAL);
3792
3793 #ifdef POINTERS_EXTEND_UNSIGNED
3794 if (GET_MODE (dstb) != Pmode)
3795 dstb = convert_memory_address (Pmode, dstb);
3796
3797 if (GET_MODE (srcb) != Pmode)
3798 srcb = convert_memory_address (Pmode, srcb);
3799 #endif
3800
3801 /* "Dereference" to BLKmode memories. */
3802 dstb = gen_rtx_MEM (BLKmode, dstb);
3803 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3804 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3805 srcb = gen_rtx_MEM (BLKmode, srcb);
3806 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3807 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3808
3809 /* Copy. */
3810 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3811 }
3812
3813 return const0_rtx;
3814 }
3815
3816 /* Expand a call to one of the builtin functions __builtin_frame_address or
3817 __builtin_return_address. */
3818
3819 static rtx
3820 expand_builtin_frame_address (exp)
3821 tree exp;
3822 {
3823 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3824 tree arglist = TREE_OPERAND (exp, 1);
3825
3826 /* The argument must be a nonnegative integer constant.
3827 It counts the number of frames to scan up the stack.
3828 The value is the return address saved in that frame. */
3829 if (arglist == 0)
3830 /* Warning about missing arg was already issued. */
3831 return const0_rtx;
3832 else if (! host_integerp (TREE_VALUE (arglist), 1))
3833 {
3834 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3835 error ("invalid arg to `__builtin_frame_address'");
3836 else
3837 error ("invalid arg to `__builtin_return_address'");
3838 return const0_rtx;
3839 }
3840 else
3841 {
3842 rtx tem
3843 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3844 tree_low_cst (TREE_VALUE (arglist), 1),
3845 hard_frame_pointer_rtx);
3846
3847 /* Some ports cannot access arbitrary stack frames. */
3848 if (tem == NULL)
3849 {
3850 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3851 warning ("unsupported arg to `__builtin_frame_address'");
3852 else
3853 warning ("unsupported arg to `__builtin_return_address'");
3854 return const0_rtx;
3855 }
3856
3857 /* For __builtin_frame_address, return what we've got. */
3858 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3859 return tem;
3860
3861 if (GET_CODE (tem) != REG
3862 && ! CONSTANT_P (tem))
3863 tem = copy_to_mode_reg (Pmode, tem);
3864 return tem;
3865 }
3866 }
3867
3868 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3869 we failed and the caller should emit a normal call, otherwise try to get
3870 the result in TARGET, if convenient. */
3871
3872 static rtx
3873 expand_builtin_alloca (arglist, target)
3874 tree arglist;
3875 rtx target;
3876 {
3877 rtx op0;
3878 rtx result;
3879
3880 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3881 return 0;
3882
3883 /* Compute the argument. */
3884 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3885
3886 /* Allocate the desired space. */
3887 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3888
3889 #ifdef POINTERS_EXTEND_UNSIGNED
3890 if (GET_MODE (result) != ptr_mode)
3891 result = convert_memory_address (ptr_mode, result);
3892 #endif
3893
3894 return result;
3895 }
3896
3897 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
3898 Return 0 if a normal call should be emitted rather than expanding the
3899 function in-line. If convenient, the result should be placed in TARGET.
3900 SUBTARGET may be used as the target for computing one of EXP's operands. */
3901
3902 static rtx
3903 expand_builtin_unop (target_mode, arglist, target, subtarget, op_optab)
3904 enum machine_mode target_mode;
3905 tree arglist;
3906 rtx target, subtarget;
3907 optab op_optab;
3908 {
3909 rtx op0;
3910 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3911 return 0;
3912
3913 /* Compute the argument. */
3914 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3915 /* Compute op, into TARGET if possible.
3916 Set TARGET to wherever the result comes back. */
3917 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3918 op_optab, op0, target, 1);
3919 if (target == 0)
3920 abort ();
3921
3922 return convert_to_mode (target_mode, target, 0);
3923 }
3924
3925 /* If the string passed to fputs is a constant and is one character
3926 long, we attempt to transform this call into __builtin_fputc(). */
3927
3928 static rtx
3929 expand_builtin_fputs (arglist, ignore, unlocked)
3930 tree arglist;
3931 int ignore;
3932 int unlocked;
3933 {
3934 tree len, fn;
3935 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3936 : implicit_built_in_decls[BUILT_IN_FPUTC];
3937 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3938 : implicit_built_in_decls[BUILT_IN_FWRITE];
3939
3940 /* If the return value is used, or the replacement _DECL isn't
3941 initialized, don't do the transformation. */
3942 if (!ignore || !fn_fputc || !fn_fwrite)
3943 return 0;
3944
3945 /* Verify the arguments in the original call. */
3946 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3947 return 0;
3948
3949 /* Get the length of the string passed to fputs. If the length
3950 can't be determined, punt. */
3951 if (!(len = c_strlen (TREE_VALUE (arglist)))
3952 || TREE_CODE (len) != INTEGER_CST)
3953 return 0;
3954
3955 switch (compare_tree_int (len, 1))
3956 {
3957 case -1: /* length is 0, delete the call entirely . */
3958 {
3959 /* Evaluate and ignore the argument in case it has
3960 side-effects. */
3961 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3962 VOIDmode, EXPAND_NORMAL);
3963 return const0_rtx;
3964 }
3965 case 0: /* length is 1, call fputc. */
3966 {
3967 const char *p = c_getstr (TREE_VALUE (arglist));
3968
3969 if (p != NULL)
3970 {
3971 /* New argument list transforming fputs(string, stream) to
3972 fputc(string[0], stream). */
3973 arglist =
3974 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3975 arglist =
3976 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3977 fn = fn_fputc;
3978 break;
3979 }
3980 }
3981 /* FALLTHROUGH */
3982 case 1: /* length is greater than 1, call fwrite. */
3983 {
3984 tree string_arg;
3985
3986 /* If optimizing for size keep fputs. */
3987 if (optimize_size)
3988 return 0;
3989 string_arg = TREE_VALUE (arglist);
3990 /* New argument list transforming fputs(string, stream) to
3991 fwrite(string, 1, len, stream). */
3992 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3993 arglist = tree_cons (NULL_TREE, len, arglist);
3994 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3995 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3996 fn = fn_fwrite;
3997 break;
3998 }
3999 default:
4000 abort ();
4001 }
4002
4003 return expand_expr (build_function_call_expr (fn, arglist),
4004 (ignore ? const0_rtx : NULL_RTX),
4005 VOIDmode, EXPAND_NORMAL);
4006 }
4007
4008 /* Expand a call to __builtin_expect. We return our argument and emit a
4009 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4010 a non-jump context. */
4011
4012 static rtx
4013 expand_builtin_expect (arglist, target)
4014 tree arglist;
4015 rtx target;
4016 {
4017 tree exp, c;
4018 rtx note, rtx_c;
4019
4020 if (arglist == NULL_TREE
4021 || TREE_CHAIN (arglist) == NULL_TREE)
4022 return const0_rtx;
4023 exp = TREE_VALUE (arglist);
4024 c = TREE_VALUE (TREE_CHAIN (arglist));
4025
4026 if (TREE_CODE (c) != INTEGER_CST)
4027 {
4028 error ("second arg to `__builtin_expect' must be a constant");
4029 c = integer_zero_node;
4030 }
4031
4032 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4033
4034 /* Don't bother with expected value notes for integral constants. */
4035 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4036 {
4037 /* We do need to force this into a register so that we can be
4038 moderately sure to be able to correctly interpret the branch
4039 condition later. */
4040 target = force_reg (GET_MODE (target), target);
4041
4042 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4043
4044 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
4045 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4046 }
4047
4048 return target;
4049 }
4050
4051 /* Like expand_builtin_expect, except do this in a jump context. This is
4052 called from do_jump if the conditional is a __builtin_expect. Return either
4053 a list of insns to emit the jump or NULL if we cannot optimize
4054 __builtin_expect. We need to optimize this at jump time so that machines
4055 like the PowerPC don't turn the test into a SCC operation, and then jump
4056 based on the test being 0/1. */
4057
4058 rtx
4059 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
4060 tree exp;
4061 rtx if_false_label;
4062 rtx if_true_label;
4063 {
4064 tree arglist = TREE_OPERAND (exp, 1);
4065 tree arg0 = TREE_VALUE (arglist);
4066 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4067 rtx ret = NULL_RTX;
4068
4069 /* Only handle __builtin_expect (test, 0) and
4070 __builtin_expect (test, 1). */
4071 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4072 && (integer_zerop (arg1) || integer_onep (arg1)))
4073 {
4074 int num_jumps = 0;
4075 rtx insn;
4076
4077 /* If we fail to locate an appropriate conditional jump, we'll
4078 fall back to normal evaluation. Ensure that the expression
4079 can be re-evaluated. */
4080 switch (unsafe_for_reeval (arg0))
4081 {
4082 case 0: /* Safe. */
4083 break;
4084
4085 case 1: /* Mildly unsafe. */
4086 arg0 = unsave_expr (arg0);
4087 break;
4088
4089 case 2: /* Wildly unsafe. */
4090 return NULL_RTX;
4091 }
4092
4093 /* Expand the jump insns. */
4094 start_sequence ();
4095 do_jump (arg0, if_false_label, if_true_label);
4096 ret = get_insns ();
4097 end_sequence ();
4098
4099 /* Now that the __builtin_expect has been validated, go through and add
4100 the expect's to each of the conditional jumps. If we run into an
4101 error, just give up and generate the 'safe' code of doing a SCC
4102 operation and then doing a branch on that. */
4103 insn = ret;
4104 while (insn != NULL_RTX)
4105 {
4106 rtx next = NEXT_INSN (insn);
4107 rtx pattern;
4108
4109 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
4110 && (pattern = pc_set (insn)) != NULL_RTX)
4111 {
4112 rtx ifelse = SET_SRC (pattern);
4113 rtx label;
4114 int taken;
4115
4116 if (GET_CODE (ifelse) != IF_THEN_ELSE)
4117 goto do_next_insn;
4118
4119 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4120 {
4121 taken = 1;
4122 label = XEXP (XEXP (ifelse, 1), 0);
4123 }
4124 /* An inverted jump reverses the probabilities. */
4125 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4126 {
4127 taken = 0;
4128 label = XEXP (XEXP (ifelse, 2), 0);
4129 }
4130 /* We shouldn't have to worry about conditional returns during
4131 the expansion stage, but handle it gracefully anyway. */
4132 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4133 {
4134 taken = 1;
4135 label = NULL_RTX;
4136 }
4137 /* An inverted return reverses the probabilities. */
4138 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4139 {
4140 taken = 0;
4141 label = NULL_RTX;
4142 }
4143 else
4144 goto do_next_insn;
4145
4146 /* If the test is expected to fail, reverse the
4147 probabilities. */
4148 if (integer_zerop (arg1))
4149 taken = 1 - taken;
4150
4151 /* If we are jumping to the false label, reverse the
4152 probabilities. */
4153 if (label == NULL_RTX)
4154 ; /* conditional return */
4155 else if (label == if_false_label)
4156 taken = 1 - taken;
4157 else if (label != if_true_label)
4158 goto do_next_insn;
4159
4160 num_jumps++;
4161 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4162 }
4163
4164 do_next_insn:
4165 insn = next;
4166 }
4167
4168 /* If no jumps were modified, fail and do __builtin_expect the normal
4169 way. */
4170 if (num_jumps == 0)
4171 ret = NULL_RTX;
4172 }
4173
4174 return ret;
4175 }
4176
4177 void
4178 expand_builtin_trap ()
4179 {
4180 #ifdef HAVE_trap
4181 if (HAVE_trap)
4182 emit_insn (gen_trap ());
4183 else
4184 #endif
4185 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4186 emit_barrier ();
4187 }
4188 \f
4189 /* Expand an expression EXP that calls a built-in function,
4190 with result going to TARGET if that's convenient
4191 (and in mode MODE if that's convenient).
4192 SUBTARGET may be used as the target for computing one of EXP's operands.
4193 IGNORE is nonzero if the value is to be ignored. */
4194
4195 rtx
4196 expand_builtin (exp, target, subtarget, mode, ignore)
4197 tree exp;
4198 rtx target;
4199 rtx subtarget;
4200 enum machine_mode mode;
4201 int ignore;
4202 {
4203 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4204 tree arglist = TREE_OPERAND (exp, 1);
4205 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4206 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4207
4208 /* Perform postincrements before expanding builtin functions.  */
4209 emit_queue ();
4210
4211 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4212 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4213
4214 /* When not optimizing, generate calls to library functions for a certain
4215 set of builtins. */
4216 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4217 switch (fcode)
4218 {
4219 case BUILT_IN_SQRT:
4220 case BUILT_IN_SQRTF:
4221 case BUILT_IN_SQRTL:
4222 case BUILT_IN_SIN:
4223 case BUILT_IN_SINF:
4224 case BUILT_IN_SINL:
4225 case BUILT_IN_COS:
4226 case BUILT_IN_COSF:
4227 case BUILT_IN_COSL:
4228 case BUILT_IN_EXP:
4229 case BUILT_IN_EXPF:
4230 case BUILT_IN_EXPL:
4231 case BUILT_IN_LOG:
4232 case BUILT_IN_LOGF:
4233 case BUILT_IN_LOGL:
4234 case BUILT_IN_POW:
4235 case BUILT_IN_POWF:
4236 case BUILT_IN_POWL:
4237 case BUILT_IN_ATAN2:
4238 case BUILT_IN_ATAN2F:
4239 case BUILT_IN_ATAN2L:
4240 case BUILT_IN_MEMSET:
4241 case BUILT_IN_MEMCPY:
4242 case BUILT_IN_MEMCMP:
4243 case BUILT_IN_MEMPCPY:
4244 case BUILT_IN_MEMMOVE:
4245 case BUILT_IN_BCMP:
4246 case BUILT_IN_BZERO:
4247 case BUILT_IN_BCOPY:
4248 case BUILT_IN_INDEX:
4249 case BUILT_IN_RINDEX:
4250 case BUILT_IN_STPCPY:
4251 case BUILT_IN_STRCHR:
4252 case BUILT_IN_STRRCHR:
4253 case BUILT_IN_STRLEN:
4254 case BUILT_IN_STRCPY:
4255 case BUILT_IN_STRNCPY:
4256 case BUILT_IN_STRNCMP:
4257 case BUILT_IN_STRSTR:
4258 case BUILT_IN_STRPBRK:
4259 case BUILT_IN_STRCAT:
4260 case BUILT_IN_STRNCAT:
4261 case BUILT_IN_STRSPN:
4262 case BUILT_IN_STRCSPN:
4263 case BUILT_IN_STRCMP:
4264 case BUILT_IN_FFS:
4265 case BUILT_IN_PUTCHAR:
4266 case BUILT_IN_PUTS:
4267 case BUILT_IN_PRINTF:
4268 case BUILT_IN_FPUTC:
4269 case BUILT_IN_FPUTS:
4270 case BUILT_IN_FWRITE:
4271 case BUILT_IN_PUTCHAR_UNLOCKED:
4272 case BUILT_IN_PUTS_UNLOCKED:
4273 case BUILT_IN_PRINTF_UNLOCKED:
4274 case BUILT_IN_FPUTC_UNLOCKED:
4275 case BUILT_IN_FPUTS_UNLOCKED:
4276 case BUILT_IN_FWRITE_UNLOCKED:
4277 case BUILT_IN_FLOOR:
4278 case BUILT_IN_FLOORF:
4279 case BUILT_IN_FLOORL:
4280 case BUILT_IN_CEIL:
4281 case BUILT_IN_CEILF:
4282 case BUILT_IN_CEILL:
4283 case BUILT_IN_TRUNC:
4284 case BUILT_IN_TRUNCF:
4285 case BUILT_IN_TRUNCL:
4286 case BUILT_IN_ROUND:
4287 case BUILT_IN_ROUNDF:
4288 case BUILT_IN_ROUNDL:
4289 case BUILT_IN_NEARBYINT:
4290 case BUILT_IN_NEARBYINTF:
4291 case BUILT_IN_NEARBYINTL:
4292 return expand_call (exp, target, ignore);
4293
4294 default:
4295 break;
4296 }
4297
4298 /* The built-in function expanders test for target == const0_rtx
4299 to determine whether the function's result will be ignored. */
4300 if (ignore)
4301 target = const0_rtx;
4302
4303 /* If the result of a pure or const built-in function is ignored, and
4304 none of its arguments are volatile, we can avoid expanding the
4305 built-in call and just evaluate the arguments for side-effects. */
4306 if (target == const0_rtx
4307 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4308 {
4309 bool volatilep = false;
4310 tree arg;
4311
4312 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4313 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4314 {
4315 volatilep = true;
4316 break;
4317 }
4318
4319 if (! volatilep)
4320 {
4321 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4322 expand_expr (TREE_VALUE (arg), const0_rtx,
4323 VOIDmode, EXPAND_NORMAL);
4324 return const0_rtx;
4325 }
4326 }
4327
4328 switch (fcode)
4329 {
4330 case BUILT_IN_ABS:
4331 case BUILT_IN_LABS:
4332 case BUILT_IN_LLABS:
4333 case BUILT_IN_IMAXABS:
4334 case BUILT_IN_FABS:
4335 case BUILT_IN_FABSF:
4336 case BUILT_IN_FABSL:
4337 /* build_function_call changes these into ABS_EXPR. */
4338 abort ();
4339
4340 case BUILT_IN_CONJ:
4341 case BUILT_IN_CONJF:
4342 case BUILT_IN_CONJL:
4343 case BUILT_IN_CREAL:
4344 case BUILT_IN_CREALF:
4345 case BUILT_IN_CREALL:
4346 case BUILT_IN_CIMAG:
4347 case BUILT_IN_CIMAGF:
4348 case BUILT_IN_CIMAGL:
4349 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4350 and IMAGPART_EXPR. */
4351 abort ();
4352
4353 case BUILT_IN_SIN:
4354 case BUILT_IN_SINF:
4355 case BUILT_IN_SINL:
4356 case BUILT_IN_COS:
4357 case BUILT_IN_COSF:
4358 case BUILT_IN_COSL:
4359 case BUILT_IN_EXP:
4360 case BUILT_IN_EXPF:
4361 case BUILT_IN_EXPL:
4362 case BUILT_IN_LOG:
4363 case BUILT_IN_LOGF:
4364 case BUILT_IN_LOGL:
4365 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4366 because of possible accuracy problems. */
4367 if (! flag_unsafe_math_optimizations)
4368 break;
4369 case BUILT_IN_SQRT:
4370 case BUILT_IN_SQRTF:
4371 case BUILT_IN_SQRTL:
4372 case BUILT_IN_FLOOR:
4373 case BUILT_IN_FLOORF:
4374 case BUILT_IN_FLOORL:
4375 case BUILT_IN_CEIL:
4376 case BUILT_IN_CEILF:
4377 case BUILT_IN_CEILL:
4378 case BUILT_IN_TRUNC:
4379 case BUILT_IN_TRUNCF:
4380 case BUILT_IN_TRUNCL:
4381 case BUILT_IN_ROUND:
4382 case BUILT_IN_ROUNDF:
4383 case BUILT_IN_ROUNDL:
4384 case BUILT_IN_NEARBYINT:
4385 case BUILT_IN_NEARBYINTF:
4386 case BUILT_IN_NEARBYINTL:
4387 target = expand_builtin_mathfn (exp, target, subtarget);
4388 if (target)
4389 return target;
4390 break;
4391
4392 case BUILT_IN_POW:
4393 case BUILT_IN_POWF:
4394 case BUILT_IN_POWL:
4395 case BUILT_IN_ATAN2:
4396 case BUILT_IN_ATAN2F:
4397 case BUILT_IN_ATAN2L:
4398 if (! flag_unsafe_math_optimizations)
4399 break;
4400 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4401 if (target)
4402 return target;
4403 break;
4404
4405 case BUILT_IN_APPLY_ARGS:
4406 return expand_builtin_apply_args ();
4407
4408 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4409 FUNCTION with a copy of the parameters described by
4410 ARGUMENTS, and ARGSIZE. It returns a block of memory
4411 allocated on the stack into which is stored all the registers
4412 that might possibly be used for returning the result of a
4413 function. ARGUMENTS is the value returned by
4414 __builtin_apply_args. ARGSIZE is the number of bytes of
4415 arguments that must be copied. ??? How should this value be
4416 computed? We'll also need a safe worst case value for varargs
4417 functions. */
4418 case BUILT_IN_APPLY:
4419 if (!validate_arglist (arglist, POINTER_TYPE,
4420 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4421 && !validate_arglist (arglist, REFERENCE_TYPE,
4422 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4423 return const0_rtx;
4424 else
4425 {
4426 int i;
4427 tree t;
4428 rtx ops[3];
4429
4430 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4431 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4432
4433 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4434 }
4435
4436 /* __builtin_return (RESULT) causes the function to return the
4437 value described by RESULT. RESULT is address of the block of
4438 memory returned by __builtin_apply. */
4439 case BUILT_IN_RETURN:
4440 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4441 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4442 NULL_RTX, VOIDmode, 0));
4443 return const0_rtx;
4444
4445 case BUILT_IN_SAVEREGS:
4446 return expand_builtin_saveregs ();
4447
4448 case BUILT_IN_ARGS_INFO:
4449 return expand_builtin_args_info (exp);
4450
4451 /* Return the address of the first anonymous stack arg. */
4452 case BUILT_IN_NEXT_ARG:
4453 return expand_builtin_next_arg (arglist);
4454
4455 case BUILT_IN_CLASSIFY_TYPE:
4456 return expand_builtin_classify_type (arglist);
4457
4458 case BUILT_IN_CONSTANT_P:
4459 return expand_builtin_constant_p (exp);
4460
4461 case BUILT_IN_FRAME_ADDRESS:
4462 case BUILT_IN_RETURN_ADDRESS:
4463 return expand_builtin_frame_address (exp);
4464
4465 /* Returns the address of the area where the structure is returned.
4466 0 otherwise. */
4467 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4468 if (arglist != 0
4469 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4470 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4471 return const0_rtx;
4472 else
4473 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4474
4475 case BUILT_IN_ALLOCA:
4476 target = expand_builtin_alloca (arglist, target);
4477 if (target)
4478 return target;
4479 break;
4480
4481 case BUILT_IN_FFS:
4482 case BUILT_IN_FFSL:
4483 case BUILT_IN_FFSLL:
4484 target = expand_builtin_unop (target_mode, arglist, target,
4485 subtarget, ffs_optab);
4486 if (target)
4487 return target;
4488 break;
4489
4490 case BUILT_IN_CLZ:
4491 case BUILT_IN_CLZL:
4492 case BUILT_IN_CLZLL:
4493 target = expand_builtin_unop (target_mode, arglist, target,
4494 subtarget, clz_optab);
4495 if (target)
4496 return target;
4497 break;
4498
4499 case BUILT_IN_CTZ:
4500 case BUILT_IN_CTZL:
4501 case BUILT_IN_CTZLL:
4502 target = expand_builtin_unop (target_mode, arglist, target,
4503 subtarget, ctz_optab);
4504 if (target)
4505 return target;
4506 break;
4507
4508 case BUILT_IN_POPCOUNT:
4509 case BUILT_IN_POPCOUNTL:
4510 case BUILT_IN_POPCOUNTLL:
4511 target = expand_builtin_unop (target_mode, arglist, target,
4512 subtarget, popcount_optab);
4513 if (target)
4514 return target;
4515 break;
4516
4517 case BUILT_IN_PARITY:
4518 case BUILT_IN_PARITYL:
4519 case BUILT_IN_PARITYLL:
4520 target = expand_builtin_unop (target_mode, arglist, target,
4521 subtarget, parity_optab);
4522 if (target)
4523 return target;
4524 break;
4525
4526 case BUILT_IN_STRLEN:
4527 target = expand_builtin_strlen (exp, target);
4528 if (target)
4529 return target;
4530 break;
4531
4532 case BUILT_IN_STRCPY:
4533 target = expand_builtin_strcpy (exp, target, mode);
4534 if (target)
4535 return target;
4536 break;
4537
4538 case BUILT_IN_STRNCPY:
4539 target = expand_builtin_strncpy (arglist, target, mode);
4540 if (target)
4541 return target;
4542 break;
4543
4544 case BUILT_IN_STPCPY:
4545 target = expand_builtin_stpcpy (arglist, target, mode);
4546 if (target)
4547 return target;
4548 break;
4549
4550 case BUILT_IN_STRCAT:
4551 target = expand_builtin_strcat (arglist, target, mode);
4552 if (target)
4553 return target;
4554 break;
4555
4556 case BUILT_IN_STRNCAT:
4557 target = expand_builtin_strncat (arglist, target, mode);
4558 if (target)
4559 return target;
4560 break;
4561
4562 case BUILT_IN_STRSPN:
4563 target = expand_builtin_strspn (arglist, target, mode);
4564 if (target)
4565 return target;
4566 break;
4567
4568 case BUILT_IN_STRCSPN:
4569 target = expand_builtin_strcspn (arglist, target, mode);
4570 if (target)
4571 return target;
4572 break;
4573
4574 case BUILT_IN_STRSTR:
4575 target = expand_builtin_strstr (arglist, target, mode);
4576 if (target)
4577 return target;
4578 break;
4579
4580 case BUILT_IN_STRPBRK:
4581 target = expand_builtin_strpbrk (arglist, target, mode);
4582 if (target)
4583 return target;
4584 break;
4585
4586 case BUILT_IN_INDEX:
4587 case BUILT_IN_STRCHR:
4588 target = expand_builtin_strchr (arglist, target, mode);
4589 if (target)
4590 return target;
4591 break;
4592
4593 case BUILT_IN_RINDEX:
4594 case BUILT_IN_STRRCHR:
4595 target = expand_builtin_strrchr (arglist, target, mode);
4596 if (target)
4597 return target;
4598 break;
4599
4600 case BUILT_IN_MEMCPY:
4601 target = expand_builtin_memcpy (arglist, target, mode, /*endp=*/0);
4602 if (target)
4603 return target;
4604 break;
4605
4606 case BUILT_IN_MEMPCPY:
4607 target = expand_builtin_memcpy (arglist, target, mode, /*endp=*/1);
4608 if (target)
4609 return target;
4610 break;
4611
4612 case BUILT_IN_MEMMOVE:
4613 target = expand_builtin_memmove (arglist, target, mode);
4614 if (target)
4615 return target;
4616 break;
4617
4618 case BUILT_IN_BCOPY:
4619 target = expand_builtin_bcopy (arglist);
4620 if (target)
4621 return target;
4622 break;
4623
4624 case BUILT_IN_MEMSET:
4625 target = expand_builtin_memset (exp, target, mode);
4626 if (target)
4627 return target;
4628 break;
4629
4630 case BUILT_IN_BZERO:
4631 target = expand_builtin_bzero (exp);
4632 if (target)
4633 return target;
4634 break;
4635
4636 case BUILT_IN_STRCMP:
4637 target = expand_builtin_strcmp (exp, target, mode);
4638 if (target)
4639 return target;
4640 break;
4641
4642 case BUILT_IN_STRNCMP:
4643 target = expand_builtin_strncmp (exp, target, mode);
4644 if (target)
4645 return target;
4646 break;
4647
4648 case BUILT_IN_BCMP:
4649 case BUILT_IN_MEMCMP:
4650 target = expand_builtin_memcmp (exp, arglist, target, mode);
4651 if (target)
4652 return target;
4653 break;
4654
4655 case BUILT_IN_SETJMP:
4656 target = expand_builtin_setjmp (arglist, target);
4657 if (target)
4658 return target;
4659 break;
4660
4661 /* __builtin_longjmp is passed a pointer to an array of five words.
4662 It's similar to the C library longjmp function but works with
4663 __builtin_setjmp above. */
4664 case BUILT_IN_LONGJMP:
4665 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4666 break;
4667 else
4668 {
4669 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4670 VOIDmode, 0);
4671 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4672 NULL_RTX, VOIDmode, 0);
4673
4674 if (value != const1_rtx)
4675 {
4676 error ("__builtin_longjmp second argument must be 1");
4677 return const0_rtx;
4678 }
4679
4680 expand_builtin_longjmp (buf_addr, value);
4681 return const0_rtx;
4682 }
4683
4684 case BUILT_IN_TRAP:
4685 expand_builtin_trap ();
4686 return const0_rtx;
4687
4688 case BUILT_IN_FPUTS:
4689 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4690 if (target)
4691 return target;
4692 break;
4693 case BUILT_IN_FPUTS_UNLOCKED:
4694 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4695 if (target)
4696 return target;
4697 break;
4698
4699 /* Various hooks for the DWARF 2 __throw routine. */
4700 case BUILT_IN_UNWIND_INIT:
4701 expand_builtin_unwind_init ();
4702 return const0_rtx;
4703 case BUILT_IN_DWARF_CFA:
4704 return virtual_cfa_rtx;
4705 #ifdef DWARF2_UNWIND_INFO
4706 case BUILT_IN_DWARF_SP_COLUMN:
4707 return expand_builtin_dwarf_sp_column ();
4708 case BUILT_IN_INIT_DWARF_REG_SIZES:
4709 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4710 return const0_rtx;
4711 #endif
4712 case BUILT_IN_FROB_RETURN_ADDR:
4713 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4714 case BUILT_IN_EXTRACT_RETURN_ADDR:
4715 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4716 case BUILT_IN_EH_RETURN:
4717 expand_builtin_eh_return (TREE_VALUE (arglist),
4718 TREE_VALUE (TREE_CHAIN (arglist)));
4719 return const0_rtx;
4720 #ifdef EH_RETURN_DATA_REGNO
4721 case BUILT_IN_EH_RETURN_DATA_REGNO:
4722 return expand_builtin_eh_return_data_regno (arglist);
4723 #endif
4724 case BUILT_IN_VA_START:
4725 case BUILT_IN_STDARG_START:
4726 return expand_builtin_va_start (arglist);
4727 case BUILT_IN_VA_END:
4728 return expand_builtin_va_end (arglist);
4729 case BUILT_IN_VA_COPY:
4730 return expand_builtin_va_copy (arglist);
4731 case BUILT_IN_EXPECT:
4732 return expand_builtin_expect (arglist, target);
4733 case BUILT_IN_PREFETCH:
4734 expand_builtin_prefetch (arglist);
4735 return const0_rtx;
4736
4737
4738 default: /* just do library call, if unknown builtin */
4739 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4740 error ("built-in function `%s' not currently supported",
4741 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4742 }
4743
4744 /* The switch statement above can drop through to cause the function
4745 to be called normally. */
4746 return expand_call (exp, target, ignore);
4747 }
4748
4749 /* Determine whether a tree node represents a call to a built-in
4750 math function. If the tree T is a call to a built-in function
4751 taking a single real argument, then the return value is the
4752 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
4753 the return value is END_BUILTINS. */
4754
4755 enum built_in_function
4756 builtin_mathfn_code (t)
4757 tree t;
4758 {
4759 tree fndecl, arglist;
4760
4761 if (TREE_CODE (t) != CALL_EXPR
4762 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
4763 return END_BUILTINS;
4764
4765 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4766 if (TREE_CODE (fndecl) != FUNCTION_DECL
4767 || ! DECL_BUILT_IN (fndecl)
4768 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4769 return END_BUILTINS;
4770
4771 arglist = TREE_OPERAND (t, 1);
4772 if (! arglist
4773 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4774 return END_BUILTINS;
4775
4776 arglist = TREE_CHAIN (arglist);
4777 switch (DECL_FUNCTION_CODE (fndecl))
4778 {
4779 case BUILT_IN_POW:
4780 case BUILT_IN_POWF:
4781 case BUILT_IN_POWL:
4782 case BUILT_IN_ATAN2:
4783 case BUILT_IN_ATAN2F:
4784 case BUILT_IN_ATAN2L:
4785 if (! arglist
4786 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
4787 || TREE_CHAIN (arglist))
4788 return END_BUILTINS;
4789 break;
4790
4791 default:
4792 if (arglist)
4793 return END_BUILTINS;
4794 break;
4795 }
4796
4797 return DECL_FUNCTION_CODE (fndecl);
4798 }
4799
4800 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4801 constant. ARGLIST is the argument list of the call. */
4802
4803 static tree
4804 fold_builtin_constant_p (arglist)
4805 tree arglist;
4806 {
4807 if (arglist == 0)
4808 return 0;
4809
4810 arglist = TREE_VALUE (arglist);
4811
4812 /* We return 1 for a numeric type that's known to be a constant
4813 value at compile-time or for an aggregate type that's a
4814 literal constant. */
4815 STRIP_NOPS (arglist);
4816
4817 /* If we know this is a constant, emit the constant of one. */
4818 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4819 || (TREE_CODE (arglist) == CONSTRUCTOR
4820 && TREE_CONSTANT (arglist))
4821 || (TREE_CODE (arglist) == ADDR_EXPR
4822 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4823 return integer_one_node;
4824
4825 /* If we aren't going to be running CSE or this expression
4826 has side effects, show we don't know it to be a constant.
4827 Likewise if it's a pointer or aggregate type since in those
4828 case we only want literals, since those are only optimized
4829 when generating RTL, not later.
4830 And finally, if we are compiling an initializer, not code, we
4831 need to return a definite result now; there's not going to be any
4832 more optimization done. */
4833 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4834 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4835 || POINTER_TYPE_P (TREE_TYPE (arglist))
4836 || cfun == 0)
4837 return integer_zero_node;
4838
4839 return 0;
4840 }
4841
4842 /* Fold a call to __builtin_classify_type. */
4843
4844 static tree
4845 fold_builtin_classify_type (arglist)
4846 tree arglist;
4847 {
4848 if (arglist == 0)
4849 return build_int_2 (no_type_class, 0);
4850
4851 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4852 }
4853
4854 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4855
4856 static tree
4857 fold_builtin_inf (type, warn)
4858 tree type;
4859 int warn;
4860 {
4861 REAL_VALUE_TYPE real;
4862
4863 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
4864 warning ("target format does not support infinity");
4865
4866 real_inf (&real);
4867 return build_real (type, real);
4868 }
4869
4870 /* Fold a call to __builtin_nan or __builtin_nans. */
4871
4872 static tree
4873 fold_builtin_nan (arglist, type, quiet)
4874 tree arglist, type;
4875 int quiet;
4876 {
4877 REAL_VALUE_TYPE real;
4878 const char *str;
4879
4880 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4881 return 0;
4882 str = c_getstr (TREE_VALUE (arglist));
4883 if (!str)
4884 return 0;
4885
4886 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
4887 return 0;
4888
4889 return build_real (type, real);
4890 }
4891
4892 /* EXP is assumed to me builtin call where truncation can be propagated
4893 across (for instance floor((double)f) == (double)floorf (f).
4894 Do the transformation. */
4895 static tree
4896 fold_trunc_transparent_mathfn (exp)
4897 tree exp;
4898 {
4899 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4900 tree arglist = TREE_OPERAND (exp, 1);
4901 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4902
4903 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4904 {
4905 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
4906 tree ftype = TREE_TYPE (exp);
4907 tree newtype = TREE_TYPE (arg0);
4908 tree decl;
4909
4910 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
4911 && (decl = mathfn_built_in (newtype, fcode)))
4912 {
4913 arglist =
4914 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
4915 return convert (ftype,
4916 build_function_call_expr (decl, arglist));
4917 }
4918 }
4919 return 0;
4920 }
4921
4922 /* Used by constant folding to eliminate some builtin calls early. EXP is
4923 the CALL_EXPR of a call to a builtin function. */
4924
4925 tree
4926 fold_builtin (exp)
4927 tree exp;
4928 {
4929 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4930 tree arglist = TREE_OPERAND (exp, 1);
4931 tree type = TREE_TYPE (TREE_TYPE (fndecl));
4932
4933 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4934 return 0;
4935
4936 switch (DECL_FUNCTION_CODE (fndecl))
4937 {
4938 case BUILT_IN_CONSTANT_P:
4939 return fold_builtin_constant_p (arglist);
4940
4941 case BUILT_IN_CLASSIFY_TYPE:
4942 return fold_builtin_classify_type (arglist);
4943
4944 case BUILT_IN_STRLEN:
4945 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4946 {
4947 tree len = c_strlen (TREE_VALUE (arglist));
4948 if (len)
4949 {
4950 /* Convert from the internal "sizetype" type to "size_t". */
4951 if (size_type_node)
4952 len = convert (size_type_node, len);
4953 return len;
4954 }
4955 }
4956 break;
4957
4958 case BUILT_IN_SQRT:
4959 case BUILT_IN_SQRTF:
4960 case BUILT_IN_SQRTL:
4961 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4962 {
4963 enum built_in_function fcode;
4964 tree arg = TREE_VALUE (arglist);
4965
4966 /* Optimize sqrt of constant value. */
4967 if (TREE_CODE (arg) == REAL_CST
4968 && ! TREE_CONSTANT_OVERFLOW (arg))
4969 {
4970 enum machine_mode mode;
4971 REAL_VALUE_TYPE r, x;
4972
4973 x = TREE_REAL_CST (arg);
4974 mode = TYPE_MODE (type);
4975 if (real_sqrt (&r, mode, &x)
4976 || (!flag_trapping_math && !flag_errno_math))
4977 return build_real (type, r);
4978 }
4979
4980 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
4981 fcode = builtin_mathfn_code (arg);
4982 if (flag_unsafe_math_optimizations
4983 && (fcode == BUILT_IN_EXP
4984 || fcode == BUILT_IN_EXPF
4985 || fcode == BUILT_IN_EXPL))
4986 {
4987 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
4988 arg = fold (build (MULT_EXPR, type,
4989 TREE_VALUE (TREE_OPERAND (arg, 1)),
4990 build_real (type, dconsthalf)));
4991 arglist = build_tree_list (NULL_TREE, arg);
4992 return build_function_call_expr (expfn, arglist);
4993 }
4994
4995 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
4996 if (flag_unsafe_math_optimizations
4997 && (fcode == BUILT_IN_POW
4998 || fcode == BUILT_IN_POWF
4999 || fcode == BUILT_IN_POWL))
5000 {
5001 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5002 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5003 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5004 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5005 build_real (type, dconsthalf)));
5006 arglist = tree_cons (NULL_TREE, arg0,
5007 build_tree_list (NULL_TREE, narg1));
5008 return build_function_call_expr (powfn, arglist);
5009 }
5010 }
5011 break;
5012
5013 case BUILT_IN_SIN:
5014 case BUILT_IN_SINF:
5015 case BUILT_IN_SINL:
5016 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5017 {
5018 tree arg = TREE_VALUE (arglist);
5019
5020 /* Optimize sin(0.0) = 0.0. */
5021 if (real_zerop (arg))
5022 return build_real (type, dconst0);
5023 }
5024 break;
5025
5026 case BUILT_IN_COS:
5027 case BUILT_IN_COSF:
5028 case BUILT_IN_COSL:
5029 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5030 {
5031 tree arg = TREE_VALUE (arglist);
5032
5033 /* Optimize cos(0.0) = 1.0. */
5034 if (real_zerop (arg))
5035 return build_real (type, dconst1);
5036 }
5037 break;
5038
5039 case BUILT_IN_EXP:
5040 case BUILT_IN_EXPF:
5041 case BUILT_IN_EXPL:
5042 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5043 {
5044 enum built_in_function fcode;
5045 tree arg = TREE_VALUE (arglist);
5046
5047 /* Optimize exp(0.0) = 1.0. */
5048 if (real_zerop (arg))
5049 return build_real (type, dconst1);
5050
5051 /* Optimize exp(log(x)) = x. */
5052 fcode = builtin_mathfn_code (arg);
5053 if (flag_unsafe_math_optimizations
5054 && (fcode == BUILT_IN_LOG
5055 || fcode == BUILT_IN_LOGF
5056 || fcode == BUILT_IN_LOGL))
5057 return TREE_VALUE (TREE_OPERAND (arg, 1));
5058 }
5059 break;
5060
5061 case BUILT_IN_LOG:
5062 case BUILT_IN_LOGF:
5063 case BUILT_IN_LOGL:
5064 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5065 {
5066 enum built_in_function fcode;
5067 tree arg = TREE_VALUE (arglist);
5068
5069 /* Optimize log(1.0) = 0.0. */
5070 if (real_onep (arg))
5071 return build_real (type, dconst0);
5072
5073 /* Optimize log(exp(x)) = x. */
5074 fcode = builtin_mathfn_code (arg);
5075 if (flag_unsafe_math_optimizations
5076 && (fcode == BUILT_IN_EXP
5077 || fcode == BUILT_IN_EXPF
5078 || fcode == BUILT_IN_EXPL))
5079 return TREE_VALUE (TREE_OPERAND (arg, 1));
5080
5081 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5082 if (flag_unsafe_math_optimizations
5083 && (fcode == BUILT_IN_SQRT
5084 || fcode == BUILT_IN_SQRTF
5085 || fcode == BUILT_IN_SQRTL))
5086 {
5087 tree logfn = build_function_call_expr (fndecl,
5088 TREE_OPERAND (arg, 1));
5089 return fold (build (MULT_EXPR, type, logfn,
5090 build_real (type, dconsthalf)));
5091 }
5092
5093 /* Optimize log(pow(x,y)) = y*log(x). */
5094 if (flag_unsafe_math_optimizations
5095 && (fcode == BUILT_IN_POW
5096 || fcode == BUILT_IN_POWF
5097 || fcode == BUILT_IN_POWL))
5098 {
5099 tree arg0, arg1, logfn;
5100
5101 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5102 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5103 arglist = build_tree_list (NULL_TREE, arg0);
5104 logfn = build_function_call_expr (fndecl, arglist);
5105 return fold (build (MULT_EXPR, type, arg1, logfn));
5106 }
5107 }
5108 break;
5109
5110 case BUILT_IN_POW:
5111 case BUILT_IN_POWF:
5112 case BUILT_IN_POWL:
5113 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5114 {
5115 enum built_in_function fcode;
5116 tree arg0 = TREE_VALUE (arglist);
5117 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5118
5119 /* Optimize pow(1.0,y) = 1.0. */
5120 if (real_onep (arg0))
5121 return omit_one_operand (type, build_real (type, dconst1), arg1);
5122
5123 if (TREE_CODE (arg1) == REAL_CST
5124 && ! TREE_CONSTANT_OVERFLOW (arg1))
5125 {
5126 REAL_VALUE_TYPE c;
5127 c = TREE_REAL_CST (arg1);
5128
5129 /* Optimize pow(x,0.0) = 1.0. */
5130 if (REAL_VALUES_EQUAL (c, dconst0))
5131 return omit_one_operand (type, build_real (type, dconst1),
5132 arg0);
5133
5134 /* Optimize pow(x,1.0) = x. */
5135 if (REAL_VALUES_EQUAL (c, dconst1))
5136 return arg0;
5137
5138 /* Optimize pow(x,-1.0) = 1.0/x. */
5139 if (REAL_VALUES_EQUAL (c, dconstm1))
5140 return fold (build (RDIV_EXPR, type,
5141 build_real (type, dconst1),
5142 arg0));
5143
5144 /* Optimize pow(x,2.0) = x*x. */
5145 if (REAL_VALUES_EQUAL (c, dconst2)
5146 && (*lang_hooks.decls.global_bindings_p) () == 0
5147 && ! contains_placeholder_p (arg0))
5148 {
5149 arg0 = save_expr (arg0);
5150 return fold (build (MULT_EXPR, type, arg0, arg0));
5151 }
5152
5153 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5154 if (flag_unsafe_math_optimizations
5155 && REAL_VALUES_EQUAL (c, dconstm2)
5156 && (*lang_hooks.decls.global_bindings_p) () == 0
5157 && ! contains_placeholder_p (arg0))
5158 {
5159 arg0 = save_expr (arg0);
5160 return fold (build (RDIV_EXPR, type,
5161 build_real (type, dconst1),
5162 fold (build (MULT_EXPR, type,
5163 arg0, arg0))));
5164 }
5165
5166 /* Optimize pow(x,0.5) = sqrt(x). */
5167 if (flag_unsafe_math_optimizations
5168 && REAL_VALUES_EQUAL (c, dconsthalf))
5169 {
5170 tree sqrtfn;
5171
5172 fcode = DECL_FUNCTION_CODE (fndecl);
5173 if (fcode == BUILT_IN_POW)
5174 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5175 else if (fcode == BUILT_IN_POWF)
5176 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5177 else if (fcode == BUILT_IN_POWL)
5178 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5179 else
5180 sqrtfn = NULL_TREE;
5181
5182 if (sqrtfn != NULL_TREE)
5183 {
5184 tree arglist = build_tree_list (NULL_TREE, arg0);
5185 return build_function_call_expr (sqrtfn, arglist);
5186 }
5187 }
5188 }
5189
5190 /* Optimize pow(exp(x),y) = exp(x*y). */
5191 fcode = builtin_mathfn_code (arg0);
5192 if (flag_unsafe_math_optimizations
5193 && (fcode == BUILT_IN_EXP
5194 || fcode == BUILT_IN_EXPF
5195 || fcode == BUILT_IN_EXPL))
5196 {
5197 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5198 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5199 arg = fold (build (MULT_EXPR, type, arg, arg1));
5200 arglist = build_tree_list (NULL_TREE, arg);
5201 return build_function_call_expr (expfn, arglist);
5202 }
5203
5204 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5205 if (flag_unsafe_math_optimizations
5206 && (fcode == BUILT_IN_SQRT
5207 || fcode == BUILT_IN_SQRTF
5208 || fcode == BUILT_IN_SQRTL))
5209 {
5210 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5211 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5212 build_real (type, dconsthalf)));
5213
5214 arglist = tree_cons (NULL_TREE, narg0,
5215 build_tree_list (NULL_TREE, narg1));
5216 return build_function_call_expr (fndecl, arglist);
5217 }
5218
5219 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5220 if (flag_unsafe_math_optimizations
5221 && (fcode == BUILT_IN_POW
5222 || fcode == BUILT_IN_POWF
5223 || fcode == BUILT_IN_POWL))
5224 {
5225 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5226 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
5227 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
5228 arglist = tree_cons (NULL_TREE, arg00,
5229 build_tree_list (NULL_TREE, narg1));
5230 return build_function_call_expr (fndecl, arglist);
5231 }
5232 }
5233 break;
5234
5235 case BUILT_IN_INF:
5236 case BUILT_IN_INFF:
5237 case BUILT_IN_INFL:
5238 return fold_builtin_inf (type, true);
5239
5240 case BUILT_IN_HUGE_VAL:
5241 case BUILT_IN_HUGE_VALF:
5242 case BUILT_IN_HUGE_VALL:
5243 return fold_builtin_inf (type, false);
5244
5245 case BUILT_IN_NAN:
5246 case BUILT_IN_NANF:
5247 case BUILT_IN_NANL:
5248 return fold_builtin_nan (arglist, type, true);
5249
5250 case BUILT_IN_NANS:
5251 case BUILT_IN_NANSF:
5252 case BUILT_IN_NANSL:
5253 return fold_builtin_nan (arglist, type, false);
5254
5255 case BUILT_IN_FLOOR:
5256 case BUILT_IN_FLOORF:
5257 case BUILT_IN_FLOORL:
5258 case BUILT_IN_CEIL:
5259 case BUILT_IN_CEILF:
5260 case BUILT_IN_CEILL:
5261 case BUILT_IN_TRUNC:
5262 case BUILT_IN_TRUNCF:
5263 case BUILT_IN_TRUNCL:
5264 case BUILT_IN_ROUND:
5265 case BUILT_IN_ROUNDF:
5266 case BUILT_IN_ROUNDL:
5267 case BUILT_IN_NEARBYINT:
5268 case BUILT_IN_NEARBYINTF:
5269 case BUILT_IN_NEARBYINTL:
5270 return fold_trunc_transparent_mathfn (exp);
5271
5272 default:
5273 break;
5274 }
5275
5276 return 0;
5277 }
5278
5279 /* Conveniently construct a function call expression. */
5280
5281 tree
5282 build_function_call_expr (fn, arglist)
5283 tree fn, arglist;
5284 {
5285 tree call_expr;
5286
5287 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5288 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
5289 call_expr, arglist);
5290 TREE_SIDE_EFFECTS (call_expr) = 1;
5291 return fold (call_expr);
5292 }
5293
5294 /* This function validates the types of a function call argument list
5295 represented as a tree chain of parameters against a specified list
5296 of tree_codes. If the last specifier is a 0, that represents an
5297 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5298
5299 static int
5300 validate_arglist VPARAMS ((tree arglist, ...))
5301 {
5302 enum tree_code code;
5303 int res = 0;
5304
5305 VA_OPEN (ap, arglist);
5306 VA_FIXEDARG (ap, tree, arglist);
5307
5308 do
5309 {
5310 code = va_arg (ap, enum tree_code);
5311 switch (code)
5312 {
5313 case 0:
5314 /* This signifies an ellipses, any further arguments are all ok. */
5315 res = 1;
5316 goto end;
5317 case VOID_TYPE:
5318 /* This signifies an endlink, if no arguments remain, return
5319 true, otherwise return false. */
5320 res = arglist == 0;
5321 goto end;
5322 default:
5323 /* If no parameters remain or the parameter's code does not
5324 match the specified code, return false. Otherwise continue
5325 checking any remaining arguments. */
5326 if (arglist == 0
5327 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
5328 goto end;
5329 break;
5330 }
5331 arglist = TREE_CHAIN (arglist);
5332 }
5333 while (1);
5334
5335 /* We need gotos here since we can only have one VA_CLOSE in a
5336 function. */
5337 end: ;
5338 VA_CLOSE (ap);
5339
5340 return res;
5341 }
5342
5343 /* Default version of target-specific builtin setup that does nothing. */
5344
5345 void
5346 default_init_builtins ()
5347 {
5348 }
5349
5350 /* Default target-specific builtin expander that does nothing. */
5351
5352 rtx
5353 default_expand_builtin (exp, target, subtarget, mode, ignore)
5354 tree exp ATTRIBUTE_UNUSED;
5355 rtx target ATTRIBUTE_UNUSED;
5356 rtx subtarget ATTRIBUTE_UNUSED;
5357 enum machine_mode mode ATTRIBUTE_UNUSED;
5358 int ignore ATTRIBUTE_UNUSED;
5359 {
5360 return NULL_RTX;
5361 }
5362
5363 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
5364
5365 void
5366 purge_builtin_constant_p ()
5367 {
5368 rtx insn, set, arg, new, note;
5369
5370 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5371 if (INSN_P (insn)
5372 && (set = single_set (insn)) != NULL_RTX
5373 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
5374 || (GET_CODE (arg) == SUBREG
5375 && (GET_CODE (arg = SUBREG_REG (arg))
5376 == CONSTANT_P_RTX))))
5377 {
5378 arg = XEXP (arg, 0);
5379 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
5380 validate_change (insn, &SET_SRC (set), new, 0);
5381
5382 /* Remove the REG_EQUAL note from the insn. */
5383 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
5384 remove_note (insn, note);
5385 }
5386 }
5387