builtins.c (purge_builtin_constant_p): Handle subreg of constant_p_rtx too.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
50
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) STRINGX(X),
68 const char *const built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 static int get_pointer_alignment PARAMS ((tree, unsigned int));
83 static tree c_strlen PARAMS ((tree));
84 static const char *c_getstr PARAMS ((tree));
85 static rtx c_readstr PARAMS ((const char *,
86 enum machine_mode));
87 static int target_char_cast PARAMS ((tree, char *));
88 static rtx get_memory_rtx PARAMS ((tree));
89 static int apply_args_size PARAMS ((void));
90 static int apply_result_size PARAMS ((void));
91 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
92 static rtx result_vector PARAMS ((int, rtx));
93 #endif
94 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
95 static void expand_builtin_prefetch PARAMS ((tree));
96 static rtx expand_builtin_apply_args PARAMS ((void));
97 static rtx expand_builtin_apply_args_1 PARAMS ((void));
98 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
99 static void expand_builtin_return PARAMS ((rtx));
100 static enum type_class type_to_class PARAMS ((tree));
101 static rtx expand_builtin_classify_type PARAMS ((tree));
102 static void expand_errno_check PARAMS ((tree, rtx));
103 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
104 static rtx expand_builtin_mathfn_2 PARAMS ((tree, rtx, rtx));
105 static rtx expand_builtin_constant_p PARAMS ((tree));
106 static rtx expand_builtin_args_info PARAMS ((tree));
107 static rtx expand_builtin_next_arg PARAMS ((tree));
108 static rtx expand_builtin_va_start PARAMS ((tree));
109 static rtx expand_builtin_va_end PARAMS ((tree));
110 static rtx expand_builtin_va_copy PARAMS ((tree));
111 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
112 enum machine_mode));
113 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
114 enum machine_mode));
115 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
116 enum machine_mode));
117 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
118 enum machine_mode));
119 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
120 enum machine_mode));
121 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
122 enum machine_mode));
123 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
124 enum machine_mode));
125 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
126 enum machine_mode));
127 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
128 enum machine_mode));
129 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
130 enum machine_mode));
131 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
132 enum machine_mode));
133 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
134 enum machine_mode));
135 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
136 enum machine_mode));
137 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
138 enum machine_mode));
139 static rtx expand_builtin_memset PARAMS ((tree, rtx,
140 enum machine_mode));
141 static rtx expand_builtin_bzero PARAMS ((tree));
142 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
143 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
144 enum machine_mode));
145 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
146 enum machine_mode));
147 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
148 enum machine_mode));
149 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
150 enum machine_mode));
151 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
152 static rtx expand_builtin_unop PARAMS ((tree, rtx, rtx, optab));
153 static rtx expand_builtin_frame_address PARAMS ((tree));
154 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
155 static tree stabilize_va_list PARAMS ((tree, int));
156 static rtx expand_builtin_expect PARAMS ((tree, rtx));
157 static tree fold_builtin_constant_p PARAMS ((tree));
158 static tree fold_builtin_classify_type PARAMS ((tree));
159 static tree fold_builtin_inf PARAMS ((tree, int));
160 static tree fold_builtin_nan PARAMS ((tree, tree, int));
161 static int validate_arglist PARAMS ((tree, ...));
162 static tree fold_trunc_transparent_mathfn PARAMS ((tree));
163
164 /* Return the alignment in bits of EXP, a pointer valued expression.
165 But don't return more than MAX_ALIGN no matter what.
166 The alignment returned is, by default, the alignment of the thing that
167 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
168
169 Otherwise, look at the expression to see if we can do better, i.e., if the
170 expression is actually pointing at an object whose alignment is tighter. */
171
172 static int
173 get_pointer_alignment (exp, max_align)
174 tree exp;
175 unsigned int max_align;
176 {
177 unsigned int align, inner;
178
179 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
180 return 0;
181
182 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
183 align = MIN (align, max_align);
184
185 while (1)
186 {
187 switch (TREE_CODE (exp))
188 {
189 case NOP_EXPR:
190 case CONVERT_EXPR:
191 case NON_LVALUE_EXPR:
192 exp = TREE_OPERAND (exp, 0);
193 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
194 return align;
195
196 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
197 align = MIN (inner, max_align);
198 break;
199
200 case PLUS_EXPR:
201 /* If sum of pointer + int, restrict our maximum alignment to that
202 imposed by the integer. If not, we can't do any better than
203 ALIGN. */
204 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
205 return align;
206
207 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
208 & (max_align / BITS_PER_UNIT - 1))
209 != 0)
210 max_align >>= 1;
211
212 exp = TREE_OPERAND (exp, 0);
213 break;
214
215 case ADDR_EXPR:
216 /* See what we are pointing at and look at its alignment. */
217 exp = TREE_OPERAND (exp, 0);
218 if (TREE_CODE (exp) == FUNCTION_DECL)
219 align = FUNCTION_BOUNDARY;
220 else if (DECL_P (exp))
221 align = DECL_ALIGN (exp);
222 #ifdef CONSTANT_ALIGNMENT
223 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
224 align = CONSTANT_ALIGNMENT (exp, align);
225 #endif
226 return MIN (align, max_align);
227
228 default:
229 return align;
230 }
231 }
232 }
233
234 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
235 way, because it could contain a zero byte in the middle.
236 TREE_STRING_LENGTH is the size of the character array, not the string.
237
238 The value returned is of type `ssizetype'.
239
240 Unfortunately, string_constant can't access the values of const char
241 arrays with initializers, so neither can we do so here. */
242
243 static tree
244 c_strlen (src)
245 tree src;
246 {
247 tree offset_node;
248 HOST_WIDE_INT offset;
249 int max;
250 const char *ptr;
251
252 src = string_constant (src, &offset_node);
253 if (src == 0)
254 return 0;
255
256 max = TREE_STRING_LENGTH (src) - 1;
257 ptr = TREE_STRING_POINTER (src);
258
259 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
260 {
261 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
262 compute the offset to the following null if we don't know where to
263 start searching for it. */
264 int i;
265
266 for (i = 0; i < max; i++)
267 if (ptr[i] == 0)
268 return 0;
269
270 /* We don't know the starting offset, but we do know that the string
271 has no internal zero bytes. We can assume that the offset falls
272 within the bounds of the string; otherwise, the programmer deserves
273 what he gets. Subtract the offset from the length of the string,
274 and return that. This would perhaps not be valid if we were dealing
275 with named arrays in addition to literal string constants. */
276
277 return size_diffop (size_int (max), offset_node);
278 }
279
280 /* We have a known offset into the string. Start searching there for
281 a null character if we can represent it as a single HOST_WIDE_INT. */
282 if (offset_node == 0)
283 offset = 0;
284 else if (! host_integerp (offset_node, 0))
285 offset = -1;
286 else
287 offset = tree_low_cst (offset_node, 0);
288
289 /* If the offset is known to be out of bounds, warn, and call strlen at
290 runtime. */
291 if (offset < 0 || offset > max)
292 {
293 warning ("offset outside bounds of constant string");
294 return 0;
295 }
296
297 /* Use strlen to search for the first zero byte. Since any strings
298 constructed with build_string will have nulls appended, we win even
299 if we get handed something like (char[4])"abcd".
300
301 Since OFFSET is our starting index into the string, no further
302 calculation is needed. */
303 return ssize_int (strlen (ptr + offset));
304 }
305
306 /* Return a char pointer for a C string if it is a string constant
307 or sum of string constant and integer constant. */
308
309 static const char *
310 c_getstr (src)
311 tree src;
312 {
313 tree offset_node;
314
315 src = string_constant (src, &offset_node);
316 if (src == 0)
317 return 0;
318
319 if (offset_node == 0)
320 return TREE_STRING_POINTER (src);
321 else if (!host_integerp (offset_node, 1)
322 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
323 return 0;
324
325 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
326 }
327
328 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
329 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
330
331 static rtx
332 c_readstr (str, mode)
333 const char *str;
334 enum machine_mode mode;
335 {
336 HOST_WIDE_INT c[2];
337 HOST_WIDE_INT ch;
338 unsigned int i, j;
339
340 if (GET_MODE_CLASS (mode) != MODE_INT)
341 abort ();
342 c[0] = 0;
343 c[1] = 0;
344 ch = 1;
345 for (i = 0; i < GET_MODE_SIZE (mode); i++)
346 {
347 j = i;
348 if (WORDS_BIG_ENDIAN)
349 j = GET_MODE_SIZE (mode) - i - 1;
350 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
351 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
352 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
353 j *= BITS_PER_UNIT;
354 if (j > 2 * HOST_BITS_PER_WIDE_INT)
355 abort ();
356 if (ch)
357 ch = (unsigned char) str[i];
358 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
359 }
360 return immed_double_const (c[0], c[1], mode);
361 }
362
363 /* Cast a target constant CST to target CHAR and if that value fits into
364 host char type, return zero and put that value into variable pointed by
365 P. */
366
367 static int
368 target_char_cast (cst, p)
369 tree cst;
370 char *p;
371 {
372 unsigned HOST_WIDE_INT val, hostval;
373
374 if (!host_integerp (cst, 1)
375 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
376 return 1;
377
378 val = tree_low_cst (cst, 1);
379 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
380 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
381
382 hostval = val;
383 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
384 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
385
386 if (val != hostval)
387 return 1;
388
389 *p = hostval;
390 return 0;
391 }
392
393 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
394 times to get the address of either a higher stack frame, or a return
395 address located within it (depending on FNDECL_CODE). */
396
397 rtx
398 expand_builtin_return_addr (fndecl_code, count, tem)
399 enum built_in_function fndecl_code;
400 int count;
401 rtx tem;
402 {
403 int i;
404
405 /* Some machines need special handling before we can access
406 arbitrary frames. For example, on the sparc, we must first flush
407 all register windows to the stack. */
408 #ifdef SETUP_FRAME_ADDRESSES
409 if (count > 0)
410 SETUP_FRAME_ADDRESSES ();
411 #endif
412
413 /* On the sparc, the return address is not in the frame, it is in a
414 register. There is no way to access it off of the current frame
415 pointer, but it can be accessed off the previous frame pointer by
416 reading the value from the register window save area. */
417 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
418 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
419 count--;
420 #endif
421
422 /* Scan back COUNT frames to the specified frame. */
423 for (i = 0; i < count; i++)
424 {
425 /* Assume the dynamic chain pointer is in the word that the
426 frame address points to, unless otherwise specified. */
427 #ifdef DYNAMIC_CHAIN_ADDRESS
428 tem = DYNAMIC_CHAIN_ADDRESS (tem);
429 #endif
430 tem = memory_address (Pmode, tem);
431 tem = gen_rtx_MEM (Pmode, tem);
432 set_mem_alias_set (tem, get_frame_alias_set ());
433 tem = copy_to_reg (tem);
434 }
435
436 /* For __builtin_frame_address, return what we've got. */
437 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
438 return tem;
439
440 /* For __builtin_return_address, Get the return address from that
441 frame. */
442 #ifdef RETURN_ADDR_RTX
443 tem = RETURN_ADDR_RTX (count, tem);
444 #else
445 tem = memory_address (Pmode,
446 plus_constant (tem, GET_MODE_SIZE (Pmode)));
447 tem = gen_rtx_MEM (Pmode, tem);
448 set_mem_alias_set (tem, get_frame_alias_set ());
449 #endif
450 return tem;
451 }
452
453 /* Alias set used for setjmp buffer. */
454 static HOST_WIDE_INT setjmp_alias_set = -1;
455
456 /* Construct the leading half of a __builtin_setjmp call. Control will
457 return to RECEIVER_LABEL. This is used directly by sjlj exception
458 handling code. */
459
460 void
461 expand_builtin_setjmp_setup (buf_addr, receiver_label)
462 rtx buf_addr;
463 rtx receiver_label;
464 {
465 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
466 rtx stack_save;
467 rtx mem;
468
469 if (setjmp_alias_set == -1)
470 setjmp_alias_set = new_alias_set ();
471
472 #ifdef POINTERS_EXTEND_UNSIGNED
473 if (GET_MODE (buf_addr) != Pmode)
474 buf_addr = convert_memory_address (Pmode, buf_addr);
475 #endif
476
477 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
478
479 emit_queue ();
480
481 /* We store the frame pointer and the address of receiver_label in
482 the buffer and use the rest of it for the stack save area, which
483 is machine-dependent. */
484
485 #ifndef BUILTIN_SETJMP_FRAME_VALUE
486 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
487 #endif
488
489 mem = gen_rtx_MEM (Pmode, buf_addr);
490 set_mem_alias_set (mem, setjmp_alias_set);
491 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
492
493 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
494 set_mem_alias_set (mem, setjmp_alias_set);
495
496 emit_move_insn (validize_mem (mem),
497 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
498
499 stack_save = gen_rtx_MEM (sa_mode,
500 plus_constant (buf_addr,
501 2 * GET_MODE_SIZE (Pmode)));
502 set_mem_alias_set (stack_save, setjmp_alias_set);
503 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
504
505 /* If there is further processing to do, do it. */
506 #ifdef HAVE_builtin_setjmp_setup
507 if (HAVE_builtin_setjmp_setup)
508 emit_insn (gen_builtin_setjmp_setup (buf_addr));
509 #endif
510
511 /* Tell optimize_save_area_alloca that extra work is going to
512 need to go on during alloca. */
513 current_function_calls_setjmp = 1;
514
515 /* Set this so all the registers get saved in our frame; we need to be
516 able to copy the saved values for any registers from frames we unwind. */
517 current_function_has_nonlocal_label = 1;
518 }
519
520 /* Construct the trailing part of a __builtin_setjmp call.
521 This is used directly by sjlj exception handling code. */
522
523 void
524 expand_builtin_setjmp_receiver (receiver_label)
525 rtx receiver_label ATTRIBUTE_UNUSED;
526 {
527 /* Clobber the FP when we get here, so we have to make sure it's
528 marked as used by this function. */
529 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
530
531 /* Mark the static chain as clobbered here so life information
532 doesn't get messed up for it. */
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
534
535 /* Now put in the code to restore the frame pointer, and argument
536 pointer, if needed. The code below is from expand_end_bindings
537 in stmt.c; see detailed documentation there. */
538 #ifdef HAVE_nonlocal_goto
539 if (! HAVE_nonlocal_goto)
540 #endif
541 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
542
543 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
544 if (fixed_regs[ARG_POINTER_REGNUM])
545 {
546 #ifdef ELIMINABLE_REGS
547 size_t i;
548 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
549
550 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
551 if (elim_regs[i].from == ARG_POINTER_REGNUM
552 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
553 break;
554
555 if (i == ARRAY_SIZE (elim_regs))
556 #endif
557 {
558 /* Now restore our arg pointer from the address at which it
559 was saved in our stack frame. */
560 emit_move_insn (virtual_incoming_args_rtx,
561 copy_to_reg (get_arg_pointer_save_area (cfun)));
562 }
563 }
564 #endif
565
566 #ifdef HAVE_builtin_setjmp_receiver
567 if (HAVE_builtin_setjmp_receiver)
568 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
569 else
570 #endif
571 #ifdef HAVE_nonlocal_goto_receiver
572 if (HAVE_nonlocal_goto_receiver)
573 emit_insn (gen_nonlocal_goto_receiver ());
574 else
575 #endif
576 { /* Nothing */ }
577
578 /* @@@ This is a kludge. Not all machine descriptions define a blockage
579 insn, but we must not allow the code we just generated to be reordered
580 by scheduling. Specifically, the update of the frame pointer must
581 happen immediately, not later. So emit an ASM_INPUT to act as blockage
582 insn. */
583 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
584 }
585
586 /* __builtin_setjmp is passed a pointer to an array of five words (not
587 all will be used on all machines). It operates similarly to the C
588 library function of the same name, but is more efficient. Much of
589 the code below (and for longjmp) is copied from the handling of
590 non-local gotos.
591
592 NOTE: This is intended for use by GNAT and the exception handling
593 scheme in the compiler and will only work in the method used by
594 them. */
595
596 static rtx
597 expand_builtin_setjmp (arglist, target)
598 tree arglist;
599 rtx target;
600 {
601 rtx buf_addr, next_lab, cont_lab;
602
603 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
604 return NULL_RTX;
605
606 if (target == 0 || GET_CODE (target) != REG
607 || REGNO (target) < FIRST_PSEUDO_REGISTER)
608 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
609
610 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
611
612 next_lab = gen_label_rtx ();
613 cont_lab = gen_label_rtx ();
614
615 expand_builtin_setjmp_setup (buf_addr, next_lab);
616
617 /* Set TARGET to zero and branch to the continue label. */
618 emit_move_insn (target, const0_rtx);
619 emit_jump_insn (gen_jump (cont_lab));
620 emit_barrier ();
621 emit_label (next_lab);
622
623 expand_builtin_setjmp_receiver (next_lab);
624
625 /* Set TARGET to one. */
626 emit_move_insn (target, const1_rtx);
627 emit_label (cont_lab);
628
629 /* Tell flow about the strange goings on. Putting `next_lab' on
630 `nonlocal_goto_handler_labels' to indicates that function
631 calls may traverse the arc back to this label. */
632
633 current_function_has_nonlocal_label = 1;
634 nonlocal_goto_handler_labels
635 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
636
637 return target;
638 }
639
640 /* __builtin_longjmp is passed a pointer to an array of five words (not
641 all will be used on all machines). It operates similarly to the C
642 library function of the same name, but is more efficient. Much of
643 the code below is copied from the handling of non-local gotos.
644
645 NOTE: This is intended for use by GNAT and the exception handling
646 scheme in the compiler and will only work in the method used by
647 them. */
648
649 void
650 expand_builtin_longjmp (buf_addr, value)
651 rtx buf_addr, value;
652 {
653 rtx fp, lab, stack, insn, last;
654 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
655
656 if (setjmp_alias_set == -1)
657 setjmp_alias_set = new_alias_set ();
658
659 #ifdef POINTERS_EXTEND_UNSIGNED
660 if (GET_MODE (buf_addr) != Pmode)
661 buf_addr = convert_memory_address (Pmode, buf_addr);
662 #endif
663
664 buf_addr = force_reg (Pmode, buf_addr);
665
666 /* We used to store value in static_chain_rtx, but that fails if pointers
667 are smaller than integers. We instead require that the user must pass
668 a second argument of 1, because that is what builtin_setjmp will
669 return. This also makes EH slightly more efficient, since we are no
670 longer copying around a value that we don't care about. */
671 if (value != const1_rtx)
672 abort ();
673
674 current_function_calls_longjmp = 1;
675
676 last = get_last_insn ();
677 #ifdef HAVE_builtin_longjmp
678 if (HAVE_builtin_longjmp)
679 emit_insn (gen_builtin_longjmp (buf_addr));
680 else
681 #endif
682 {
683 fp = gen_rtx_MEM (Pmode, buf_addr);
684 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
685 GET_MODE_SIZE (Pmode)));
686
687 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
688 2 * GET_MODE_SIZE (Pmode)));
689 set_mem_alias_set (fp, setjmp_alias_set);
690 set_mem_alias_set (lab, setjmp_alias_set);
691 set_mem_alias_set (stack, setjmp_alias_set);
692
693 /* Pick up FP, label, and SP from the block and jump. This code is
694 from expand_goto in stmt.c; see there for detailed comments. */
695 #if HAVE_nonlocal_goto
696 if (HAVE_nonlocal_goto)
697 /* We have to pass a value to the nonlocal_goto pattern that will
698 get copied into the static_chain pointer, but it does not matter
699 what that value is, because builtin_setjmp does not use it. */
700 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
701 else
702 #endif
703 {
704 lab = copy_to_reg (lab);
705
706 emit_move_insn (hard_frame_pointer_rtx, fp);
707 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
708
709 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
710 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
711 emit_indirect_jump (lab);
712 }
713 }
714
715 /* Search backwards and mark the jump insn as a non-local goto.
716 Note that this precludes the use of __builtin_longjmp to a
717 __builtin_setjmp target in the same function. However, we've
718 already cautioned the user that these functions are for
719 internal exception handling use only. */
720 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
721 {
722 if (insn == last)
723 abort ();
724 if (GET_CODE (insn) == JUMP_INSN)
725 {
726 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
727 REG_NOTES (insn));
728 break;
729 }
730 else if (GET_CODE (insn) == CALL_INSN)
731 break;
732 }
733 }
734
735 /* Expand a call to __builtin_prefetch. For a target that does not support
736 data prefetch, evaluate the memory address argument in case it has side
737 effects. */
738
739 static void
740 expand_builtin_prefetch (arglist)
741 tree arglist;
742 {
743 tree arg0, arg1, arg2;
744 rtx op0, op1, op2;
745
746 if (!validate_arglist (arglist, POINTER_TYPE, 0))
747 return;
748
749 arg0 = TREE_VALUE (arglist);
750 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
751 zero (read) and argument 2 (locality) defaults to 3 (high degree of
752 locality). */
753 if (TREE_CHAIN (arglist))
754 {
755 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
756 if (TREE_CHAIN (TREE_CHAIN (arglist)))
757 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
758 else
759 arg2 = build_int_2 (3, 0);
760 }
761 else
762 {
763 arg1 = integer_zero_node;
764 arg2 = build_int_2 (3, 0);
765 }
766
767 /* Argument 0 is an address. */
768 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
769
770 /* Argument 1 (read/write flag) must be a compile-time constant int. */
771 if (TREE_CODE (arg1) != INTEGER_CST)
772 {
773 error ("second arg to `__builtin_prefetch' must be a constant");
774 arg1 = integer_zero_node;
775 }
776 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
777 /* Argument 1 must be either zero or one. */
778 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
779 {
780 warning ("invalid second arg to __builtin_prefetch; using zero");
781 op1 = const0_rtx;
782 }
783
784 /* Argument 2 (locality) must be a compile-time constant int. */
785 if (TREE_CODE (arg2) != INTEGER_CST)
786 {
787 error ("third arg to `__builtin_prefetch' must be a constant");
788 arg2 = integer_zero_node;
789 }
790 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
791 /* Argument 2 must be 0, 1, 2, or 3. */
792 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
793 {
794 warning ("invalid third arg to __builtin_prefetch; using zero");
795 op2 = const0_rtx;
796 }
797
798 #ifdef HAVE_prefetch
799 if (HAVE_prefetch)
800 {
801 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
802 (op0,
803 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
804 || (GET_MODE(op0) != Pmode))
805 {
806 #ifdef POINTERS_EXTEND_UNSIGNED
807 if (GET_MODE(op0) != Pmode)
808 op0 = convert_memory_address (Pmode, op0);
809 #endif
810 op0 = force_reg (Pmode, op0);
811 }
812 emit_insn (gen_prefetch (op0, op1, op2));
813 }
814 else
815 #endif
816 op0 = protect_from_queue (op0, 0);
817 /* Don't do anything with direct references to volatile memory, but
818 generate code to handle other side effects. */
819 if (GET_CODE (op0) != MEM && side_effects_p (op0))
820 emit_insn (op0);
821 }
822
823 /* Get a MEM rtx for expression EXP which is the address of an operand
824 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
825
826 static rtx
827 get_memory_rtx (exp)
828 tree exp;
829 {
830 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
831 rtx mem;
832
833 #ifdef POINTERS_EXTEND_UNSIGNED
834 if (GET_MODE (addr) != Pmode)
835 addr = convert_memory_address (Pmode, addr);
836 #endif
837
838 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
839
840 /* Get an expression we can use to find the attributes to assign to MEM.
841 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
842 we can. First remove any nops. */
843 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
844 || TREE_CODE (exp) == NON_LVALUE_EXPR)
845 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
846 exp = TREE_OPERAND (exp, 0);
847
848 if (TREE_CODE (exp) == ADDR_EXPR)
849 {
850 exp = TREE_OPERAND (exp, 0);
851 set_mem_attributes (mem, exp, 0);
852 }
853 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
854 {
855 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
856 /* memcpy, memset and other builtin stringops can alias with anything. */
857 set_mem_alias_set (mem, 0);
858 }
859
860 return mem;
861 }
862 \f
863 /* Built-in functions to perform an untyped call and return. */
864
865 /* For each register that may be used for calling a function, this
866 gives a mode used to copy the register's value. VOIDmode indicates
867 the register is not used for calling a function. If the machine
868 has register windows, this gives only the outbound registers.
869 INCOMING_REGNO gives the corresponding inbound register. */
870 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
871
872 /* For each register that may be used for returning values, this gives
873 a mode used to copy the register's value. VOIDmode indicates the
874 register is not used for returning values. If the machine has
875 register windows, this gives only the outbound registers.
876 INCOMING_REGNO gives the corresponding inbound register. */
877 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
878
879 /* For each register that may be used for calling a function, this
880 gives the offset of that register into the block returned by
881 __builtin_apply_args. 0 indicates that the register is not
882 used for calling a function. */
883 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
884
885 /* Return the offset of register REGNO into the block returned by
886 __builtin_apply_args. This is not declared static, since it is
887 needed in objc-act.c. */
888
889 int
890 apply_args_register_offset (regno)
891 int regno;
892 {
893 apply_args_size ();
894
895 /* Arguments are always put in outgoing registers (in the argument
896 block) if such make sense. */
897 #ifdef OUTGOING_REGNO
898 regno = OUTGOING_REGNO (regno);
899 #endif
900 return apply_args_reg_offset[regno];
901 }
902
903 /* Return the size required for the block returned by __builtin_apply_args,
904 and initialize apply_args_mode. */
905
906 static int
907 apply_args_size ()
908 {
909 static int size = -1;
910 int align;
911 unsigned int regno;
912 enum machine_mode mode;
913
914 /* The values computed by this function never change. */
915 if (size < 0)
916 {
917 /* The first value is the incoming arg-pointer. */
918 size = GET_MODE_SIZE (Pmode);
919
920 /* The second value is the structure value address unless this is
921 passed as an "invisible" first argument. */
922 if (struct_value_rtx)
923 size += GET_MODE_SIZE (Pmode);
924
925 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
926 if (FUNCTION_ARG_REGNO_P (regno))
927 {
928 /* Search for the proper mode for copying this register's
929 value. I'm not sure this is right, but it works so far. */
930 enum machine_mode best_mode = VOIDmode;
931
932 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
933 mode != VOIDmode;
934 mode = GET_MODE_WIDER_MODE (mode))
935 if (HARD_REGNO_MODE_OK (regno, mode)
936 && HARD_REGNO_NREGS (regno, mode) == 1)
937 best_mode = mode;
938
939 if (best_mode == VOIDmode)
940 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
941 mode != VOIDmode;
942 mode = GET_MODE_WIDER_MODE (mode))
943 if (HARD_REGNO_MODE_OK (regno, mode)
944 && have_insn_for (SET, mode))
945 best_mode = mode;
946
947 if (best_mode == VOIDmode)
948 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
949 mode != VOIDmode;
950 mode = GET_MODE_WIDER_MODE (mode))
951 if (HARD_REGNO_MODE_OK (regno, mode)
952 && have_insn_for (SET, mode))
953 best_mode = mode;
954
955 if (best_mode == VOIDmode)
956 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
957 mode != VOIDmode;
958 mode = GET_MODE_WIDER_MODE (mode))
959 if (HARD_REGNO_MODE_OK (regno, mode)
960 && have_insn_for (SET, mode))
961 best_mode = mode;
962
963 mode = best_mode;
964 if (mode == VOIDmode)
965 abort ();
966
967 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
968 if (size % align != 0)
969 size = CEIL (size, align) * align;
970 apply_args_reg_offset[regno] = size;
971 size += GET_MODE_SIZE (mode);
972 apply_args_mode[regno] = mode;
973 }
974 else
975 {
976 apply_args_mode[regno] = VOIDmode;
977 apply_args_reg_offset[regno] = 0;
978 }
979 }
980 return size;
981 }
982
983 /* Return the size required for the block returned by __builtin_apply,
984 and initialize apply_result_mode. */
985
986 static int
987 apply_result_size ()
988 {
989 static int size = -1;
990 int align, regno;
991 enum machine_mode mode;
992
993 /* The values computed by this function never change. */
994 if (size < 0)
995 {
996 size = 0;
997
998 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
999 if (FUNCTION_VALUE_REGNO_P (regno))
1000 {
1001 /* Search for the proper mode for copying this register's
1002 value. I'm not sure this is right, but it works so far. */
1003 enum machine_mode best_mode = VOIDmode;
1004
1005 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1006 mode != TImode;
1007 mode = GET_MODE_WIDER_MODE (mode))
1008 if (HARD_REGNO_MODE_OK (regno, mode))
1009 best_mode = mode;
1010
1011 if (best_mode == VOIDmode)
1012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1013 mode != VOIDmode;
1014 mode = GET_MODE_WIDER_MODE (mode))
1015 if (HARD_REGNO_MODE_OK (regno, mode)
1016 && have_insn_for (SET, mode))
1017 best_mode = mode;
1018
1019 if (best_mode == VOIDmode)
1020 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1021 mode != VOIDmode;
1022 mode = GET_MODE_WIDER_MODE (mode))
1023 if (HARD_REGNO_MODE_OK (regno, mode)
1024 && have_insn_for (SET, mode))
1025 best_mode = mode;
1026
1027 if (best_mode == VOIDmode)
1028 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1029 mode != VOIDmode;
1030 mode = GET_MODE_WIDER_MODE (mode))
1031 if (HARD_REGNO_MODE_OK (regno, mode)
1032 && have_insn_for (SET, mode))
1033 best_mode = mode;
1034
1035 mode = best_mode;
1036 if (mode == VOIDmode)
1037 abort ();
1038
1039 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1040 if (size % align != 0)
1041 size = CEIL (size, align) * align;
1042 size += GET_MODE_SIZE (mode);
1043 apply_result_mode[regno] = mode;
1044 }
1045 else
1046 apply_result_mode[regno] = VOIDmode;
1047
1048 /* Allow targets that use untyped_call and untyped_return to override
1049 the size so that machine-specific information can be stored here. */
1050 #ifdef APPLY_RESULT_SIZE
1051 size = APPLY_RESULT_SIZE;
1052 #endif
1053 }
1054 return size;
1055 }
1056
1057 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1058 /* Create a vector describing the result block RESULT. If SAVEP is true,
1059 the result block is used to save the values; otherwise it is used to
1060 restore the values. */
1061
1062 static rtx
1063 result_vector (savep, result)
1064 int savep;
1065 rtx result;
1066 {
1067 int regno, size, align, nelts;
1068 enum machine_mode mode;
1069 rtx reg, mem;
1070 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1071
1072 size = nelts = 0;
1073 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1074 if ((mode = apply_result_mode[regno]) != VOIDmode)
1075 {
1076 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1077 if (size % align != 0)
1078 size = CEIL (size, align) * align;
1079 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1080 mem = adjust_address (result, mode, size);
1081 savevec[nelts++] = (savep
1082 ? gen_rtx_SET (VOIDmode, mem, reg)
1083 : gen_rtx_SET (VOIDmode, reg, mem));
1084 size += GET_MODE_SIZE (mode);
1085 }
1086 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1087 }
1088 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1089
1090 /* Save the state required to perform an untyped call with the same
1091 arguments as were passed to the current function. */
1092
1093 static rtx
1094 expand_builtin_apply_args_1 ()
1095 {
1096 rtx registers;
1097 int size, align, regno;
1098 enum machine_mode mode;
1099
1100 /* Create a block where the arg-pointer, structure value address,
1101 and argument registers can be saved. */
1102 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1103
1104 /* Walk past the arg-pointer and structure value address. */
1105 size = GET_MODE_SIZE (Pmode);
1106 if (struct_value_rtx)
1107 size += GET_MODE_SIZE (Pmode);
1108
1109 /* Save each register used in calling a function to the block. */
1110 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1111 if ((mode = apply_args_mode[regno]) != VOIDmode)
1112 {
1113 rtx tem;
1114
1115 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1116 if (size % align != 0)
1117 size = CEIL (size, align) * align;
1118
1119 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1120
1121 emit_move_insn (adjust_address (registers, mode, size), tem);
1122 size += GET_MODE_SIZE (mode);
1123 }
1124
1125 /* Save the arg pointer to the block. */
1126 emit_move_insn (adjust_address (registers, Pmode, 0),
1127 copy_to_reg (virtual_incoming_args_rtx));
1128 size = GET_MODE_SIZE (Pmode);
1129
1130 /* Save the structure value address unless this is passed as an
1131 "invisible" first argument. */
1132 if (struct_value_incoming_rtx)
1133 {
1134 emit_move_insn (adjust_address (registers, Pmode, size),
1135 copy_to_reg (struct_value_incoming_rtx));
1136 size += GET_MODE_SIZE (Pmode);
1137 }
1138
1139 /* Return the address of the block. */
1140 return copy_addr_to_reg (XEXP (registers, 0));
1141 }
1142
1143 /* __builtin_apply_args returns block of memory allocated on
1144 the stack into which is stored the arg pointer, structure
1145 value address, static chain, and all the registers that might
1146 possibly be used in performing a function call. The code is
1147 moved to the start of the function so the incoming values are
1148 saved. */
1149
1150 static rtx
1151 expand_builtin_apply_args ()
1152 {
1153 /* Don't do __builtin_apply_args more than once in a function.
1154 Save the result of the first call and reuse it. */
1155 if (apply_args_value != 0)
1156 return apply_args_value;
1157 {
1158 /* When this function is called, it means that registers must be
1159 saved on entry to this function. So we migrate the
1160 call to the first insn of this function. */
1161 rtx temp;
1162 rtx seq;
1163
1164 start_sequence ();
1165 temp = expand_builtin_apply_args_1 ();
1166 seq = get_insns ();
1167 end_sequence ();
1168
1169 apply_args_value = temp;
1170
1171 /* Put the insns after the NOTE that starts the function.
1172 If this is inside a start_sequence, make the outer-level insn
1173 chain current, so the code is placed at the start of the
1174 function. */
1175 push_topmost_sequence ();
1176 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1177 pop_topmost_sequence ();
1178 return temp;
1179 }
1180 }
1181
1182 /* Perform an untyped call and save the state required to perform an
1183 untyped return of whatever value was returned by the given function. */
1184
1185 static rtx
1186 expand_builtin_apply (function, arguments, argsize)
1187 rtx function, arguments, argsize;
1188 {
1189 int size, align, regno;
1190 enum machine_mode mode;
1191 rtx incoming_args, result, reg, dest, src, call_insn;
1192 rtx old_stack_level = 0;
1193 rtx call_fusage = 0;
1194
1195 #ifdef POINTERS_EXTEND_UNSIGNED
1196 if (GET_MODE (arguments) != Pmode)
1197 arguments = convert_memory_address (Pmode, arguments);
1198 #endif
1199
1200 /* Create a block where the return registers can be saved. */
1201 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1202
1203 /* Fetch the arg pointer from the ARGUMENTS block. */
1204 incoming_args = gen_reg_rtx (Pmode);
1205 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1206 #ifndef STACK_GROWS_DOWNWARD
1207 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1208 incoming_args, 0, OPTAB_LIB_WIDEN);
1209 #endif
1210
1211 /* Perform postincrements before actually calling the function. */
1212 emit_queue ();
1213
1214 /* Push a new argument block and copy the arguments. Do not allow
1215 the (potential) memcpy call below to interfere with our stack
1216 manipulations. */
1217 do_pending_stack_adjust ();
1218 NO_DEFER_POP;
1219
1220 /* Save the stack with nonlocal if available */
1221 #ifdef HAVE_save_stack_nonlocal
1222 if (HAVE_save_stack_nonlocal)
1223 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1224 else
1225 #endif
1226 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1227
1228 /* Push a block of memory onto the stack to store the memory arguments.
1229 Save the address in a register, and copy the memory arguments. ??? I
1230 haven't figured out how the calling convention macros effect this,
1231 but it's likely that the source and/or destination addresses in
1232 the block copy will need updating in machine specific ways. */
1233 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1234 dest = gen_rtx_MEM (BLKmode, dest);
1235 set_mem_align (dest, PARM_BOUNDARY);
1236 src = gen_rtx_MEM (BLKmode, incoming_args);
1237 set_mem_align (src, PARM_BOUNDARY);
1238 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1239
1240 /* Refer to the argument block. */
1241 apply_args_size ();
1242 arguments = gen_rtx_MEM (BLKmode, arguments);
1243 set_mem_align (arguments, PARM_BOUNDARY);
1244
1245 /* Walk past the arg-pointer and structure value address. */
1246 size = GET_MODE_SIZE (Pmode);
1247 if (struct_value_rtx)
1248 size += GET_MODE_SIZE (Pmode);
1249
1250 /* Restore each of the registers previously saved. Make USE insns
1251 for each of these registers for use in making the call. */
1252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1253 if ((mode = apply_args_mode[regno]) != VOIDmode)
1254 {
1255 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1256 if (size % align != 0)
1257 size = CEIL (size, align) * align;
1258 reg = gen_rtx_REG (mode, regno);
1259 emit_move_insn (reg, adjust_address (arguments, mode, size));
1260 use_reg (&call_fusage, reg);
1261 size += GET_MODE_SIZE (mode);
1262 }
1263
1264 /* Restore the structure value address unless this is passed as an
1265 "invisible" first argument. */
1266 size = GET_MODE_SIZE (Pmode);
1267 if (struct_value_rtx)
1268 {
1269 rtx value = gen_reg_rtx (Pmode);
1270 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1271 emit_move_insn (struct_value_rtx, value);
1272 if (GET_CODE (struct_value_rtx) == REG)
1273 use_reg (&call_fusage, struct_value_rtx);
1274 size += GET_MODE_SIZE (Pmode);
1275 }
1276
1277 /* All arguments and registers used for the call are set up by now! */
1278 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1279
1280 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1281 and we don't want to load it into a register as an optimization,
1282 because prepare_call_address already did it if it should be done. */
1283 if (GET_CODE (function) != SYMBOL_REF)
1284 function = memory_address (FUNCTION_MODE, function);
1285
1286 /* Generate the actual call instruction and save the return value. */
1287 #ifdef HAVE_untyped_call
1288 if (HAVE_untyped_call)
1289 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1290 result, result_vector (1, result)));
1291 else
1292 #endif
1293 #ifdef HAVE_call_value
1294 if (HAVE_call_value)
1295 {
1296 rtx valreg = 0;
1297
1298 /* Locate the unique return register. It is not possible to
1299 express a call that sets more than one return register using
1300 call_value; use untyped_call for that. In fact, untyped_call
1301 only needs to save the return registers in the given block. */
1302 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1303 if ((mode = apply_result_mode[regno]) != VOIDmode)
1304 {
1305 if (valreg)
1306 abort (); /* HAVE_untyped_call required. */
1307 valreg = gen_rtx_REG (mode, regno);
1308 }
1309
1310 emit_call_insn (GEN_CALL_VALUE (valreg,
1311 gen_rtx_MEM (FUNCTION_MODE, function),
1312 const0_rtx, NULL_RTX, const0_rtx));
1313
1314 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1315 }
1316 else
1317 #endif
1318 abort ();
1319
1320 /* Find the CALL insn we just emitted. */
1321 for (call_insn = get_last_insn ();
1322 call_insn && GET_CODE (call_insn) != CALL_INSN;
1323 call_insn = PREV_INSN (call_insn))
1324 ;
1325
1326 if (! call_insn)
1327 abort ();
1328
1329 /* Put the register usage information on the CALL. If there is already
1330 some usage information, put ours at the end. */
1331 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1332 {
1333 rtx link;
1334
1335 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1336 link = XEXP (link, 1))
1337 ;
1338
1339 XEXP (link, 1) = call_fusage;
1340 }
1341 else
1342 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1343
1344 /* Restore the stack. */
1345 #ifdef HAVE_save_stack_nonlocal
1346 if (HAVE_save_stack_nonlocal)
1347 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1348 else
1349 #endif
1350 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1351
1352 OK_DEFER_POP;
1353
1354 /* Return the address of the result block. */
1355 return copy_addr_to_reg (XEXP (result, 0));
1356 }
1357
1358 /* Perform an untyped return. */
1359
1360 static void
1361 expand_builtin_return (result)
1362 rtx result;
1363 {
1364 int size, align, regno;
1365 enum machine_mode mode;
1366 rtx reg;
1367 rtx call_fusage = 0;
1368
1369 #ifdef POINTERS_EXTEND_UNSIGNED
1370 if (GET_MODE (result) != Pmode)
1371 result = convert_memory_address (Pmode, result);
1372 #endif
1373
1374 apply_result_size ();
1375 result = gen_rtx_MEM (BLKmode, result);
1376
1377 #ifdef HAVE_untyped_return
1378 if (HAVE_untyped_return)
1379 {
1380 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1381 emit_barrier ();
1382 return;
1383 }
1384 #endif
1385
1386 /* Restore the return value and note that each value is used. */
1387 size = 0;
1388 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1389 if ((mode = apply_result_mode[regno]) != VOIDmode)
1390 {
1391 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1392 if (size % align != 0)
1393 size = CEIL (size, align) * align;
1394 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1395 emit_move_insn (reg, adjust_address (result, mode, size));
1396
1397 push_to_sequence (call_fusage);
1398 emit_insn (gen_rtx_USE (VOIDmode, reg));
1399 call_fusage = get_insns ();
1400 end_sequence ();
1401 size += GET_MODE_SIZE (mode);
1402 }
1403
1404 /* Put the USE insns before the return. */
1405 emit_insn (call_fusage);
1406
1407 /* Return whatever values was restored by jumping directly to the end
1408 of the function. */
1409 expand_null_return ();
1410 }
1411
1412 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1413
1414 static enum type_class
1415 type_to_class (type)
1416 tree type;
1417 {
1418 switch (TREE_CODE (type))
1419 {
1420 case VOID_TYPE: return void_type_class;
1421 case INTEGER_TYPE: return integer_type_class;
1422 case CHAR_TYPE: return char_type_class;
1423 case ENUMERAL_TYPE: return enumeral_type_class;
1424 case BOOLEAN_TYPE: return boolean_type_class;
1425 case POINTER_TYPE: return pointer_type_class;
1426 case REFERENCE_TYPE: return reference_type_class;
1427 case OFFSET_TYPE: return offset_type_class;
1428 case REAL_TYPE: return real_type_class;
1429 case COMPLEX_TYPE: return complex_type_class;
1430 case FUNCTION_TYPE: return function_type_class;
1431 case METHOD_TYPE: return method_type_class;
1432 case RECORD_TYPE: return record_type_class;
1433 case UNION_TYPE:
1434 case QUAL_UNION_TYPE: return union_type_class;
1435 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1436 ? string_type_class : array_type_class);
1437 case SET_TYPE: return set_type_class;
1438 case FILE_TYPE: return file_type_class;
1439 case LANG_TYPE: return lang_type_class;
1440 default: return no_type_class;
1441 }
1442 }
1443
1444 /* Expand a call to __builtin_classify_type with arguments found in
1445 ARGLIST. */
1446
1447 static rtx
1448 expand_builtin_classify_type (arglist)
1449 tree arglist;
1450 {
1451 if (arglist != 0)
1452 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1453 return GEN_INT (no_type_class);
1454 }
1455
1456 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1457
1458 static rtx
1459 expand_builtin_constant_p (exp)
1460 tree exp;
1461 {
1462 tree arglist = TREE_OPERAND (exp, 1);
1463 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1464 rtx tmp;
1465
1466 if (arglist == 0)
1467 return const0_rtx;
1468 arglist = TREE_VALUE (arglist);
1469
1470 /* We have taken care of the easy cases during constant folding. This
1471 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1472 get a chance to see if it can deduce whether ARGLIST is constant. */
1473
1474 current_function_calls_constant_p = 1;
1475
1476 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1477 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1478 return tmp;
1479 }
1480
1481 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1482 if available. */
1483 tree
1484 mathfn_built_in (type, fn)
1485 tree type;
1486 enum built_in_function fn;
1487 {
1488 enum built_in_function fcode = NOT_BUILT_IN;
1489 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1490 switch (fn)
1491 {
1492 case BUILT_IN_SQRT:
1493 case BUILT_IN_SQRTF:
1494 case BUILT_IN_SQRTL:
1495 fcode = BUILT_IN_SQRT;
1496 break;
1497 case BUILT_IN_SIN:
1498 case BUILT_IN_SINF:
1499 case BUILT_IN_SINL:
1500 fcode = BUILT_IN_SIN;
1501 break;
1502 case BUILT_IN_COS:
1503 case BUILT_IN_COSF:
1504 case BUILT_IN_COSL:
1505 fcode = BUILT_IN_COS;
1506 break;
1507 case BUILT_IN_EXP:
1508 case BUILT_IN_EXPF:
1509 case BUILT_IN_EXPL:
1510 fcode = BUILT_IN_EXP;
1511 break;
1512 case BUILT_IN_LOG:
1513 case BUILT_IN_LOGF:
1514 case BUILT_IN_LOGL:
1515 fcode = BUILT_IN_LOG;
1516 break;
1517 case BUILT_IN_FLOOR:
1518 case BUILT_IN_FLOORF:
1519 case BUILT_IN_FLOORL:
1520 fcode = BUILT_IN_FLOOR;
1521 break;
1522 case BUILT_IN_CEIL:
1523 case BUILT_IN_CEILF:
1524 case BUILT_IN_CEILL:
1525 fcode = BUILT_IN_CEIL;
1526 break;
1527 case BUILT_IN_TRUNC:
1528 case BUILT_IN_TRUNCF:
1529 case BUILT_IN_TRUNCL:
1530 fcode = BUILT_IN_TRUNC;
1531 break;
1532 case BUILT_IN_ROUND:
1533 case BUILT_IN_ROUNDF:
1534 case BUILT_IN_ROUNDL:
1535 fcode = BUILT_IN_ROUND;
1536 break;
1537 case BUILT_IN_NEARBYINT:
1538 case BUILT_IN_NEARBYINTF:
1539 case BUILT_IN_NEARBYINTL:
1540 fcode = BUILT_IN_NEARBYINT;
1541 break;
1542 default:
1543 abort ();
1544 }
1545 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1546 switch (fn)
1547 {
1548 case BUILT_IN_SQRT:
1549 case BUILT_IN_SQRTF:
1550 case BUILT_IN_SQRTL:
1551 fcode = BUILT_IN_SQRTF;
1552 break;
1553 case BUILT_IN_SIN:
1554 case BUILT_IN_SINF:
1555 case BUILT_IN_SINL:
1556 fcode = BUILT_IN_SINF;
1557 break;
1558 case BUILT_IN_COS:
1559 case BUILT_IN_COSF:
1560 case BUILT_IN_COSL:
1561 fcode = BUILT_IN_COSF;
1562 break;
1563 case BUILT_IN_EXP:
1564 case BUILT_IN_EXPF:
1565 case BUILT_IN_EXPL:
1566 fcode = BUILT_IN_EXPF;
1567 break;
1568 case BUILT_IN_LOG:
1569 case BUILT_IN_LOGF:
1570 case BUILT_IN_LOGL:
1571 fcode = BUILT_IN_LOGF;
1572 break;
1573 case BUILT_IN_FLOOR:
1574 case BUILT_IN_FLOORF:
1575 case BUILT_IN_FLOORL:
1576 fcode = BUILT_IN_FLOORF;
1577 break;
1578 case BUILT_IN_CEIL:
1579 case BUILT_IN_CEILF:
1580 case BUILT_IN_CEILL:
1581 fcode = BUILT_IN_CEILF;
1582 break;
1583 case BUILT_IN_TRUNC:
1584 case BUILT_IN_TRUNCF:
1585 case BUILT_IN_TRUNCL:
1586 fcode = BUILT_IN_TRUNCF;
1587 break;
1588 case BUILT_IN_ROUND:
1589 case BUILT_IN_ROUNDF:
1590 case BUILT_IN_ROUNDL:
1591 fcode = BUILT_IN_ROUNDF;
1592 break;
1593 case BUILT_IN_NEARBYINT:
1594 case BUILT_IN_NEARBYINTF:
1595 case BUILT_IN_NEARBYINTL:
1596 fcode = BUILT_IN_NEARBYINTF;
1597 break;
1598 default:
1599 abort ();
1600 }
1601 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1602 switch (fn)
1603 {
1604 case BUILT_IN_SQRT:
1605 case BUILT_IN_SQRTF:
1606 case BUILT_IN_SQRTL:
1607 fcode = BUILT_IN_SQRTL;
1608 break;
1609 case BUILT_IN_SIN:
1610 case BUILT_IN_SINF:
1611 case BUILT_IN_SINL:
1612 fcode = BUILT_IN_SINL;
1613 break;
1614 case BUILT_IN_COS:
1615 case BUILT_IN_COSF:
1616 case BUILT_IN_COSL:
1617 fcode = BUILT_IN_COSL;
1618 break;
1619 case BUILT_IN_EXP:
1620 case BUILT_IN_EXPF:
1621 case BUILT_IN_EXPL:
1622 fcode = BUILT_IN_EXPL;
1623 break;
1624 case BUILT_IN_LOG:
1625 case BUILT_IN_LOGF:
1626 case BUILT_IN_LOGL:
1627 fcode = BUILT_IN_LOGL;
1628 break;
1629 case BUILT_IN_FLOOR:
1630 case BUILT_IN_FLOORF:
1631 case BUILT_IN_FLOORL:
1632 fcode = BUILT_IN_FLOORL;
1633 break;
1634 case BUILT_IN_CEIL:
1635 case BUILT_IN_CEILF:
1636 case BUILT_IN_CEILL:
1637 fcode = BUILT_IN_CEILL;
1638 break;
1639 case BUILT_IN_TRUNC:
1640 case BUILT_IN_TRUNCF:
1641 case BUILT_IN_TRUNCL:
1642 fcode = BUILT_IN_TRUNCL;
1643 break;
1644 case BUILT_IN_ROUND:
1645 case BUILT_IN_ROUNDF:
1646 case BUILT_IN_ROUNDL:
1647 fcode = BUILT_IN_ROUNDL;
1648 break;
1649 case BUILT_IN_NEARBYINT:
1650 case BUILT_IN_NEARBYINTF:
1651 case BUILT_IN_NEARBYINTL:
1652 fcode = BUILT_IN_NEARBYINTL;
1653 break;
1654 default:
1655 abort ();
1656 }
1657 return implicit_built_in_decls[fcode];
1658 }
1659
1660 /* If errno must be maintained, expand the RTL to check if the result,
1661 TARGET, of a built-in function call, EXP, is NaN, and if so set
1662 errno to EDOM. */
1663
1664 static void
1665 expand_errno_check (exp, target)
1666 tree exp;
1667 rtx target;
1668 {
1669 rtx lab;
1670
1671 if (flag_errno_math && HONOR_NANS (GET_MODE (target)))
1672 {
1673 lab = gen_label_rtx ();
1674
1675 /* Test the result; if it is NaN, set errno=EDOM because
1676 the argument was not in the domain. */
1677 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1678 0, lab);
1679
1680 #ifdef TARGET_EDOM
1681 {
1682 #ifdef GEN_ERRNO_RTX
1683 rtx errno_rtx = GEN_ERRNO_RTX;
1684 #else
1685 rtx errno_rtx
1686 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1687 #endif
1688
1689 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1690 }
1691 #else
1692 /* We can't set errno=EDOM directly; let the library call do it.
1693 Pop the arguments right away in case the call gets deleted. */
1694 NO_DEFER_POP;
1695 expand_call (exp, target, 0);
1696 OK_DEFER_POP;
1697 #endif
1698
1699 emit_label (lab);
1700 }
1701 }
1702
1703
1704 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1705 Return 0 if a normal call should be emitted rather than expanding the
1706 function in-line. EXP is the expression that is a call to the builtin
1707 function; if convenient, the result should be placed in TARGET.
1708 SUBTARGET may be used as the target for computing one of EXP's operands. */
1709
1710 static rtx
1711 expand_builtin_mathfn (exp, target, subtarget)
1712 tree exp;
1713 rtx target, subtarget;
1714 {
1715 optab builtin_optab;
1716 rtx op0, insns;
1717 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1718 tree arglist = TREE_OPERAND (exp, 1);
1719 enum machine_mode argmode;
1720 bool errno_set = true;
1721
1722 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1723 return 0;
1724
1725 /* Stabilize and compute the argument. */
1726 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1727 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1728 {
1729 exp = copy_node (exp);
1730 TREE_OPERAND (exp, 1) = arglist;
1731 /* Wrap the computation of the argument in a SAVE_EXPR. That
1732 way, if we need to expand the argument again (as in the
1733 flag_errno_math case below where we cannot directly set
1734 errno), we will not perform side-effects more than once.
1735 Note that here we're mutating the original EXP as well as the
1736 copy; that's the right thing to do in case the original EXP
1737 is expanded later. */
1738 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1739 arglist = copy_node (arglist);
1740 }
1741 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1742
1743 /* Make a suitable register to place result in. */
1744 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1745
1746 emit_queue ();
1747 start_sequence ();
1748
1749 switch (DECL_FUNCTION_CODE (fndecl))
1750 {
1751 case BUILT_IN_SIN:
1752 case BUILT_IN_SINF:
1753 case BUILT_IN_SINL:
1754 builtin_optab = sin_optab; break;
1755 case BUILT_IN_COS:
1756 case BUILT_IN_COSF:
1757 case BUILT_IN_COSL:
1758 builtin_optab = cos_optab; break;
1759 case BUILT_IN_SQRT:
1760 case BUILT_IN_SQRTF:
1761 case BUILT_IN_SQRTL:
1762 builtin_optab = sqrt_optab; break;
1763 case BUILT_IN_EXP:
1764 case BUILT_IN_EXPF:
1765 case BUILT_IN_EXPL:
1766 builtin_optab = exp_optab; break;
1767 case BUILT_IN_LOG:
1768 case BUILT_IN_LOGF:
1769 case BUILT_IN_LOGL:
1770 builtin_optab = log_optab; break;
1771 case BUILT_IN_FLOOR:
1772 case BUILT_IN_FLOORF:
1773 case BUILT_IN_FLOORL:
1774 errno_set = false ; builtin_optab = floor_optab; break;
1775 case BUILT_IN_CEIL:
1776 case BUILT_IN_CEILF:
1777 case BUILT_IN_CEILL:
1778 errno_set = false ; builtin_optab = ceil_optab; break;
1779 case BUILT_IN_TRUNC:
1780 case BUILT_IN_TRUNCF:
1781 case BUILT_IN_TRUNCL:
1782 errno_set = false ; builtin_optab = trunc_optab; break;
1783 case BUILT_IN_ROUND:
1784 case BUILT_IN_ROUNDF:
1785 case BUILT_IN_ROUNDL:
1786 errno_set = false ; builtin_optab = round_optab; break;
1787 case BUILT_IN_NEARBYINT:
1788 case BUILT_IN_NEARBYINTF:
1789 case BUILT_IN_NEARBYINTL:
1790 errno_set = false ; builtin_optab = nearbyint_optab; break;
1791 default:
1792 abort ();
1793 }
1794
1795 /* Compute into TARGET.
1796 Set TARGET to wherever the result comes back. */
1797 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1798 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1799
1800 /* If we were unable to expand via the builtin, stop the
1801 sequence (without outputting the insns) and return 0, causing
1802 a call to the library function. */
1803 if (target == 0)
1804 {
1805 end_sequence ();
1806 return 0;
1807 }
1808
1809 if (errno_set)
1810 expand_errno_check (exp, target);
1811
1812 /* Output the entire sequence. */
1813 insns = get_insns ();
1814 end_sequence ();
1815 emit_insn (insns);
1816
1817 return target;
1818 }
1819
1820 /* Expand a call to the builtin binary math functions (pow and atan2).
1821 Return 0 if a normal call should be emitted rather than expanding the
1822 function in-line. EXP is the expression that is a call to the builtin
1823 function; if convenient, the result should be placed in TARGET.
1824 SUBTARGET may be used as the target for computing one of EXP's
1825 operands. */
1826
1827 static rtx
1828 expand_builtin_mathfn_2 (exp, target, subtarget)
1829 tree exp;
1830 rtx target, subtarget;
1831 {
1832 optab builtin_optab;
1833 rtx op0, op1, insns;
1834 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1835 tree arglist = TREE_OPERAND (exp, 1);
1836 tree arg0, arg1;
1837 enum machine_mode argmode;
1838 bool errno_set = true;
1839 bool stable = true;
1840
1841 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1842 return 0;
1843
1844 arg0 = TREE_VALUE (arglist);
1845 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1846
1847 /* Stabilize the arguments. */
1848 if (TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
1849 {
1850 arg0 = save_expr (arg0);
1851 TREE_VALUE (arglist) = arg0;
1852 stable = false;
1853 }
1854 if (TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
1855 {
1856 arg1 = save_expr (arg1);
1857 TREE_VALUE (TREE_CHAIN (arglist)) = arg1;
1858 stable = false;
1859 }
1860
1861 if (! stable)
1862 {
1863 exp = copy_node (exp);
1864 arglist = tree_cons (NULL_TREE, arg0,
1865 build_tree_list (NULL_TREE, arg1));
1866 TREE_OPERAND (exp, 1) = arglist;
1867 }
1868
1869 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1870 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1871
1872 /* Make a suitable register to place result in. */
1873 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1874
1875 emit_queue ();
1876 start_sequence ();
1877
1878 switch (DECL_FUNCTION_CODE (fndecl))
1879 {
1880 case BUILT_IN_POW:
1881 case BUILT_IN_POWF:
1882 case BUILT_IN_POWL:
1883 builtin_optab = pow_optab; break;
1884 case BUILT_IN_ATAN2:
1885 case BUILT_IN_ATAN2F:
1886 case BUILT_IN_ATAN2L:
1887 builtin_optab = atan2_optab; break;
1888 default:
1889 abort ();
1890 }
1891
1892 /* Compute into TARGET.
1893 Set TARGET to wherever the result comes back. */
1894 argmode = TYPE_MODE (TREE_TYPE (arg0));
1895 target = expand_binop (argmode, builtin_optab, op0, op1,
1896 target, 0, OPTAB_DIRECT);
1897
1898 /* If we were unable to expand via the builtin, stop the
1899 sequence (without outputting the insns) and return 0, causing
1900 a call to the library function. */
1901 if (target == 0)
1902 {
1903 end_sequence ();
1904 return 0;
1905 }
1906
1907 if (errno_set)
1908 expand_errno_check (exp, target);
1909
1910 /* Output the entire sequence. */
1911 insns = get_insns ();
1912 end_sequence ();
1913 emit_insn (insns);
1914
1915 return target;
1916 }
1917
1918 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1919 if we failed the caller should emit a normal call, otherwise
1920 try to get the result in TARGET, if convenient. */
1921
1922 static rtx
1923 expand_builtin_strlen (exp, target)
1924 tree exp;
1925 rtx target;
1926 {
1927 tree arglist = TREE_OPERAND (exp, 1);
1928 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1929
1930 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1931 return 0;
1932 else
1933 {
1934 rtx pat;
1935 tree src = TREE_VALUE (arglist);
1936
1937 int align
1938 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1939
1940 rtx result, src_reg, char_rtx, before_strlen;
1941 enum machine_mode insn_mode = value_mode, char_mode;
1942 enum insn_code icode = CODE_FOR_nothing;
1943
1944 /* If SRC is not a pointer type, don't do this operation inline. */
1945 if (align == 0)
1946 return 0;
1947
1948 /* Bail out if we can't compute strlen in the right mode. */
1949 while (insn_mode != VOIDmode)
1950 {
1951 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1952 if (icode != CODE_FOR_nothing)
1953 break;
1954
1955 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1956 }
1957 if (insn_mode == VOIDmode)
1958 return 0;
1959
1960 /* Make a place to write the result of the instruction. */
1961 result = target;
1962 if (! (result != 0
1963 && GET_CODE (result) == REG
1964 && GET_MODE (result) == insn_mode
1965 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1966 result = gen_reg_rtx (insn_mode);
1967
1968 /* Make a place to hold the source address. We will not expand
1969 the actual source until we are sure that the expansion will
1970 not fail -- there are trees that cannot be expanded twice. */
1971 src_reg = gen_reg_rtx (Pmode);
1972
1973 /* Mark the beginning of the strlen sequence so we can emit the
1974 source operand later. */
1975 before_strlen = get_last_insn ();
1976
1977 char_rtx = const0_rtx;
1978 char_mode = insn_data[(int) icode].operand[2].mode;
1979 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1980 char_mode))
1981 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1982
1983 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1984 char_rtx, GEN_INT (align));
1985 if (! pat)
1986 return 0;
1987 emit_insn (pat);
1988
1989 /* Now that we are assured of success, expand the source. */
1990 start_sequence ();
1991 pat = memory_address (BLKmode,
1992 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1993 if (pat != src_reg)
1994 emit_move_insn (src_reg, pat);
1995 pat = get_insns ();
1996 end_sequence ();
1997
1998 if (before_strlen)
1999 emit_insn_after (pat, before_strlen);
2000 else
2001 emit_insn_before (pat, get_insns ());
2002
2003 /* Return the value in the proper mode for this function. */
2004 if (GET_MODE (result) == value_mode)
2005 target = result;
2006 else if (target != 0)
2007 convert_move (target, result, 0);
2008 else
2009 target = convert_to_mode (value_mode, result, 0);
2010
2011 return target;
2012 }
2013 }
2014
2015 /* Expand a call to the strstr builtin. Return 0 if we failed the
2016 caller should emit a normal call, otherwise try to get the result
2017 in TARGET, if convenient (and in mode MODE if that's convenient). */
2018
2019 static rtx
2020 expand_builtin_strstr (arglist, target, mode)
2021 tree arglist;
2022 rtx target;
2023 enum machine_mode mode;
2024 {
2025 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2026 return 0;
2027 else
2028 {
2029 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2030 tree fn;
2031 const char *p1, *p2;
2032
2033 p2 = c_getstr (s2);
2034 if (p2 == NULL)
2035 return 0;
2036
2037 p1 = c_getstr (s1);
2038 if (p1 != NULL)
2039 {
2040 const char *r = strstr (p1, p2);
2041
2042 if (r == NULL)
2043 return const0_rtx;
2044
2045 /* Return an offset into the constant string argument. */
2046 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2047 s1, ssize_int (r - p1))),
2048 target, mode, EXPAND_NORMAL);
2049 }
2050
2051 if (p2[0] == '\0')
2052 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2053
2054 if (p2[1] != '\0')
2055 return 0;
2056
2057 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2058 if (!fn)
2059 return 0;
2060
2061 /* New argument list transforming strstr(s1, s2) to
2062 strchr(s1, s2[0]). */
2063 arglist =
2064 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2065 arglist = tree_cons (NULL_TREE, s1, arglist);
2066 return expand_expr (build_function_call_expr (fn, arglist),
2067 target, mode, EXPAND_NORMAL);
2068 }
2069 }
2070
2071 /* Expand a call to the strchr builtin. Return 0 if we failed the
2072 caller should emit a normal call, otherwise try to get the result
2073 in TARGET, if convenient (and in mode MODE if that's convenient). */
2074
2075 static rtx
2076 expand_builtin_strchr (arglist, target, mode)
2077 tree arglist;
2078 rtx target;
2079 enum machine_mode mode;
2080 {
2081 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2082 return 0;
2083 else
2084 {
2085 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2086 const char *p1;
2087
2088 if (TREE_CODE (s2) != INTEGER_CST)
2089 return 0;
2090
2091 p1 = c_getstr (s1);
2092 if (p1 != NULL)
2093 {
2094 char c;
2095 const char *r;
2096
2097 if (target_char_cast (s2, &c))
2098 return 0;
2099
2100 r = strchr (p1, c);
2101
2102 if (r == NULL)
2103 return const0_rtx;
2104
2105 /* Return an offset into the constant string argument. */
2106 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2107 s1, ssize_int (r - p1))),
2108 target, mode, EXPAND_NORMAL);
2109 }
2110
2111 /* FIXME: Should use here strchrM optab so that ports can optimize
2112 this. */
2113 return 0;
2114 }
2115 }
2116
2117 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2118 caller should emit a normal call, otherwise try to get the result
2119 in TARGET, if convenient (and in mode MODE if that's convenient). */
2120
2121 static rtx
2122 expand_builtin_strrchr (arglist, target, mode)
2123 tree arglist;
2124 rtx target;
2125 enum machine_mode mode;
2126 {
2127 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2128 return 0;
2129 else
2130 {
2131 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2132 tree fn;
2133 const char *p1;
2134
2135 if (TREE_CODE (s2) != INTEGER_CST)
2136 return 0;
2137
2138 p1 = c_getstr (s1);
2139 if (p1 != NULL)
2140 {
2141 char c;
2142 const char *r;
2143
2144 if (target_char_cast (s2, &c))
2145 return 0;
2146
2147 r = strrchr (p1, c);
2148
2149 if (r == NULL)
2150 return const0_rtx;
2151
2152 /* Return an offset into the constant string argument. */
2153 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2154 s1, ssize_int (r - p1))),
2155 target, mode, EXPAND_NORMAL);
2156 }
2157
2158 if (! integer_zerop (s2))
2159 return 0;
2160
2161 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2162 if (!fn)
2163 return 0;
2164
2165 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2166 return expand_expr (build_function_call_expr (fn, arglist),
2167 target, mode, EXPAND_NORMAL);
2168 }
2169 }
2170
2171 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2172 caller should emit a normal call, otherwise try to get the result
2173 in TARGET, if convenient (and in mode MODE if that's convenient). */
2174
2175 static rtx
2176 expand_builtin_strpbrk (arglist, target, mode)
2177 tree arglist;
2178 rtx target;
2179 enum machine_mode mode;
2180 {
2181 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2182 return 0;
2183 else
2184 {
2185 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2186 tree fn;
2187 const char *p1, *p2;
2188
2189 p2 = c_getstr (s2);
2190 if (p2 == NULL)
2191 return 0;
2192
2193 p1 = c_getstr (s1);
2194 if (p1 != NULL)
2195 {
2196 const char *r = strpbrk (p1, p2);
2197
2198 if (r == NULL)
2199 return const0_rtx;
2200
2201 /* Return an offset into the constant string argument. */
2202 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2203 s1, ssize_int (r - p1))),
2204 target, mode, EXPAND_NORMAL);
2205 }
2206
2207 if (p2[0] == '\0')
2208 {
2209 /* strpbrk(x, "") == NULL.
2210 Evaluate and ignore the arguments in case they had
2211 side-effects. */
2212 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2213 return const0_rtx;
2214 }
2215
2216 if (p2[1] != '\0')
2217 return 0; /* Really call strpbrk. */
2218
2219 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2220 if (!fn)
2221 return 0;
2222
2223 /* New argument list transforming strpbrk(s1, s2) to
2224 strchr(s1, s2[0]). */
2225 arglist =
2226 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2227 arglist = tree_cons (NULL_TREE, s1, arglist);
2228 return expand_expr (build_function_call_expr (fn, arglist),
2229 target, mode, EXPAND_NORMAL);
2230 }
2231 }
2232
2233 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2234 bytes from constant string DATA + OFFSET and return it as target
2235 constant. */
2236
2237 static rtx
2238 builtin_memcpy_read_str (data, offset, mode)
2239 PTR data;
2240 HOST_WIDE_INT offset;
2241 enum machine_mode mode;
2242 {
2243 const char *str = (const char *) data;
2244
2245 if (offset < 0
2246 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2247 > strlen (str) + 1))
2248 abort (); /* Attempt to read past the end of constant string. */
2249
2250 return c_readstr (str + offset, mode);
2251 }
2252
2253 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2254 Return 0 if we failed, the caller should emit a normal call, otherwise
2255 try to get the result in TARGET, if convenient (and in mode MODE if
2256 that's convenient). */
2257
2258 static rtx
2259 expand_builtin_memcpy (arglist, target, mode)
2260 tree arglist;
2261 rtx target;
2262 enum machine_mode mode;
2263 {
2264 if (!validate_arglist (arglist,
2265 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2266 return 0;
2267 else
2268 {
2269 tree dest = TREE_VALUE (arglist);
2270 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2271 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2272 const char *src_str;
2273
2274 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2275 unsigned int dest_align
2276 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2277 rtx dest_mem, src_mem, dest_addr, len_rtx;
2278
2279 /* If DEST is not a pointer type, call the normal function. */
2280 if (dest_align == 0)
2281 return 0;
2282
2283 /* If the LEN parameter is zero, return DEST. */
2284 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2285 {
2286 /* Evaluate and ignore SRC in case it has side-effects. */
2287 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2288 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2289 }
2290
2291 /* If either SRC is not a pointer type, don't do this
2292 operation in-line. */
2293 if (src_align == 0)
2294 return 0;
2295
2296 dest_mem = get_memory_rtx (dest);
2297 set_mem_align (dest_mem, dest_align);
2298 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2299 src_str = c_getstr (src);
2300
2301 /* If SRC is a string constant and block move would be done
2302 by pieces, we can avoid loading the string from memory
2303 and only stored the computed constants. */
2304 if (src_str
2305 && GET_CODE (len_rtx) == CONST_INT
2306 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2307 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2308 (PTR) src_str, dest_align))
2309 {
2310 store_by_pieces (dest_mem, INTVAL (len_rtx),
2311 builtin_memcpy_read_str,
2312 (PTR) src_str, dest_align);
2313 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2314 #ifdef POINTERS_EXTEND_UNSIGNED
2315 if (GET_MODE (dest_mem) != ptr_mode)
2316 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2317 #endif
2318 return dest_mem;
2319 }
2320
2321 src_mem = get_memory_rtx (src);
2322 set_mem_align (src_mem, src_align);
2323
2324 /* Copy word part most expediently. */
2325 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2326 BLOCK_OP_NORMAL);
2327
2328 if (dest_addr == 0)
2329 {
2330 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2331 #ifdef POINTERS_EXTEND_UNSIGNED
2332 if (GET_MODE (dest_addr) != ptr_mode)
2333 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2334 #endif
2335 }
2336
2337 return dest_addr;
2338 }
2339 }
2340
2341 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2342 if we failed the caller should emit a normal call, otherwise try to get
2343 the result in TARGET, if convenient (and in mode MODE if that's
2344 convenient). */
2345
2346 static rtx
2347 expand_builtin_strcpy (exp, target, mode)
2348 tree exp;
2349 rtx target;
2350 enum machine_mode mode;
2351 {
2352 tree arglist = TREE_OPERAND (exp, 1);
2353 tree fn, len;
2354
2355 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2356 return 0;
2357
2358 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2359 if (!fn)
2360 return 0;
2361
2362 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2363 if (len == 0)
2364 return 0;
2365
2366 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2367 chainon (arglist, build_tree_list (NULL_TREE, len));
2368 return expand_expr (build_function_call_expr (fn, arglist),
2369 target, mode, EXPAND_NORMAL);
2370 }
2371
2372 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2373 bytes from constant string DATA + OFFSET and return it as target
2374 constant. */
2375
2376 static rtx
2377 builtin_strncpy_read_str (data, offset, mode)
2378 PTR data;
2379 HOST_WIDE_INT offset;
2380 enum machine_mode mode;
2381 {
2382 const char *str = (const char *) data;
2383
2384 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2385 return const0_rtx;
2386
2387 return c_readstr (str + offset, mode);
2388 }
2389
2390 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2391 if we failed the caller should emit a normal call. */
2392
2393 static rtx
2394 expand_builtin_strncpy (arglist, target, mode)
2395 tree arglist;
2396 rtx target;
2397 enum machine_mode mode;
2398 {
2399 if (!validate_arglist (arglist,
2400 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2401 return 0;
2402 else
2403 {
2404 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2405 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2406 tree fn;
2407
2408 /* We must be passed a constant len parameter. */
2409 if (TREE_CODE (len) != INTEGER_CST)
2410 return 0;
2411
2412 /* If the len parameter is zero, return the dst parameter. */
2413 if (integer_zerop (len))
2414 {
2415 /* Evaluate and ignore the src argument in case it has
2416 side-effects. */
2417 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2418 VOIDmode, EXPAND_NORMAL);
2419 /* Return the dst parameter. */
2420 return expand_expr (TREE_VALUE (arglist), target, mode,
2421 EXPAND_NORMAL);
2422 }
2423
2424 /* Now, we must be passed a constant src ptr parameter. */
2425 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2426 return 0;
2427
2428 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2429
2430 /* We're required to pad with trailing zeros if the requested
2431 len is greater than strlen(s2)+1. In that case try to
2432 use store_by_pieces, if it fails, punt. */
2433 if (tree_int_cst_lt (slen, len))
2434 {
2435 tree dest = TREE_VALUE (arglist);
2436 unsigned int dest_align
2437 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2438 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2439 rtx dest_mem;
2440
2441 if (!p || dest_align == 0 || !host_integerp (len, 1)
2442 || !can_store_by_pieces (tree_low_cst (len, 1),
2443 builtin_strncpy_read_str,
2444 (PTR) p, dest_align))
2445 return 0;
2446
2447 dest_mem = get_memory_rtx (dest);
2448 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2449 builtin_strncpy_read_str,
2450 (PTR) p, dest_align);
2451 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2452 #ifdef POINTERS_EXTEND_UNSIGNED
2453 if (GET_MODE (dest_mem) != ptr_mode)
2454 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2455 #endif
2456 return dest_mem;
2457 }
2458
2459 /* OK transform into builtin memcpy. */
2460 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2461 if (!fn)
2462 return 0;
2463 return expand_expr (build_function_call_expr (fn, arglist),
2464 target, mode, EXPAND_NORMAL);
2465 }
2466 }
2467
2468 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2469 bytes from constant string DATA + OFFSET and return it as target
2470 constant. */
2471
2472 static rtx
2473 builtin_memset_read_str (data, offset, mode)
2474 PTR data;
2475 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2476 enum machine_mode mode;
2477 {
2478 const char *c = (const char *) data;
2479 char *p = alloca (GET_MODE_SIZE (mode));
2480
2481 memset (p, *c, GET_MODE_SIZE (mode));
2482
2483 return c_readstr (p, mode);
2484 }
2485
2486 /* Callback routine for store_by_pieces. Return the RTL of a register
2487 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2488 char value given in the RTL register data. For example, if mode is
2489 4 bytes wide, return the RTL for 0x01010101*data. */
2490
2491 static rtx
2492 builtin_memset_gen_str (data, offset, mode)
2493 PTR data;
2494 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2495 enum machine_mode mode;
2496 {
2497 rtx target, coeff;
2498 size_t size;
2499 char *p;
2500
2501 size = GET_MODE_SIZE (mode);
2502 if (size == 1)
2503 return (rtx) data;
2504
2505 p = alloca (size);
2506 memset (p, 1, size);
2507 coeff = c_readstr (p, mode);
2508
2509 target = convert_to_mode (mode, (rtx) data, 1);
2510 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2511 return force_reg (mode, target);
2512 }
2513
2514 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2515 if we failed the caller should emit a normal call, otherwise try to get
2516 the result in TARGET, if convenient (and in mode MODE if that's
2517 convenient). */
2518
2519 static rtx
2520 expand_builtin_memset (exp, target, mode)
2521 tree exp;
2522 rtx target;
2523 enum machine_mode mode;
2524 {
2525 tree arglist = TREE_OPERAND (exp, 1);
2526
2527 if (!validate_arglist (arglist,
2528 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2529 return 0;
2530 else
2531 {
2532 tree dest = TREE_VALUE (arglist);
2533 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2534 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2535 char c;
2536
2537 unsigned int dest_align
2538 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2539 rtx dest_mem, dest_addr, len_rtx;
2540
2541 /* If DEST is not a pointer type, don't do this
2542 operation in-line. */
2543 if (dest_align == 0)
2544 return 0;
2545
2546 /* If the LEN parameter is zero, return DEST. */
2547 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2548 {
2549 /* Evaluate and ignore VAL in case it has side-effects. */
2550 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2551 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2552 }
2553
2554 if (TREE_CODE (val) != INTEGER_CST)
2555 {
2556 rtx val_rtx;
2557
2558 if (!host_integerp (len, 1))
2559 return 0;
2560
2561 if (optimize_size && tree_low_cst (len, 1) > 1)
2562 return 0;
2563
2564 /* Assume that we can memset by pieces if we can store the
2565 * the coefficients by pieces (in the required modes).
2566 * We can't pass builtin_memset_gen_str as that emits RTL. */
2567 c = 1;
2568 if (!can_store_by_pieces (tree_low_cst (len, 1),
2569 builtin_memset_read_str,
2570 (PTR) &c, dest_align))
2571 return 0;
2572
2573 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2574 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2575 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2576 val_rtx);
2577 dest_mem = get_memory_rtx (dest);
2578 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2579 builtin_memset_gen_str,
2580 (PTR) val_rtx, dest_align);
2581 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2582 #ifdef POINTERS_EXTEND_UNSIGNED
2583 if (GET_MODE (dest_mem) != ptr_mode)
2584 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2585 #endif
2586 return dest_mem;
2587 }
2588
2589 if (target_char_cast (val, &c))
2590 return 0;
2591
2592 if (c)
2593 {
2594 if (!host_integerp (len, 1))
2595 return 0;
2596 if (!can_store_by_pieces (tree_low_cst (len, 1),
2597 builtin_memset_read_str, (PTR) &c,
2598 dest_align))
2599 return 0;
2600
2601 dest_mem = get_memory_rtx (dest);
2602 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2603 builtin_memset_read_str,
2604 (PTR) &c, dest_align);
2605 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2606 #ifdef POINTERS_EXTEND_UNSIGNED
2607 if (GET_MODE (dest_mem) != ptr_mode)
2608 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2609 #endif
2610 return dest_mem;
2611 }
2612
2613 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2614
2615 dest_mem = get_memory_rtx (dest);
2616 set_mem_align (dest_mem, dest_align);
2617 dest_addr = clear_storage (dest_mem, len_rtx);
2618
2619 if (dest_addr == 0)
2620 {
2621 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2622 #ifdef POINTERS_EXTEND_UNSIGNED
2623 if (GET_MODE (dest_addr) != ptr_mode)
2624 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2625 #endif
2626 }
2627
2628 return dest_addr;
2629 }
2630 }
2631
2632 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2633 if we failed the caller should emit a normal call. */
2634
2635 static rtx
2636 expand_builtin_bzero (exp)
2637 tree exp;
2638 {
2639 tree arglist = TREE_OPERAND (exp, 1);
2640 tree dest, size, newarglist;
2641 rtx result;
2642
2643 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2644 return NULL_RTX;
2645
2646 dest = TREE_VALUE (arglist);
2647 size = TREE_VALUE (TREE_CHAIN (arglist));
2648
2649 /* New argument list transforming bzero(ptr x, int y) to
2650 memset(ptr x, int 0, size_t y). This is done this way
2651 so that if it isn't expanded inline, we fallback to
2652 calling bzero instead of memset. */
2653
2654 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2655 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2656 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2657
2658 TREE_OPERAND (exp, 1) = newarglist;
2659 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2660
2661 /* Always restore the original arguments. */
2662 TREE_OPERAND (exp, 1) = arglist;
2663
2664 return result;
2665 }
2666
2667 /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
2668 ARGLIST is the argument list for this call. Return 0 if we failed and the
2669 caller should emit a normal call, otherwise try to get the result in
2670 TARGET, if convenient (and in mode MODE, if that's convenient). */
2671
2672 static rtx
2673 expand_builtin_memcmp (exp, arglist, target, mode)
2674 tree exp ATTRIBUTE_UNUSED;
2675 tree arglist;
2676 rtx target;
2677 enum machine_mode mode;
2678 {
2679 tree arg1, arg2, len;
2680 const char *p1, *p2;
2681
2682 if (!validate_arglist (arglist,
2683 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2684 return 0;
2685
2686 arg1 = TREE_VALUE (arglist);
2687 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2688 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2689
2690 /* If the len parameter is zero, return zero. */
2691 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2692 {
2693 /* Evaluate and ignore arg1 and arg2 in case they have
2694 side-effects. */
2695 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2696 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2697 return const0_rtx;
2698 }
2699
2700 p1 = c_getstr (arg1);
2701 p2 = c_getstr (arg2);
2702
2703 /* If all arguments are constant, and the value of len is not greater
2704 than the lengths of arg1 and arg2, evaluate at compile-time. */
2705 if (host_integerp (len, 1) && p1 && p2
2706 && compare_tree_int (len, strlen (p1) + 1) <= 0
2707 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2708 {
2709 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2710
2711 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2712 }
2713
2714 /* If len parameter is one, return an expression corresponding to
2715 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2716 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2717 {
2718 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2719 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2720 tree ind1 =
2721 fold (build1 (CONVERT_EXPR, integer_type_node,
2722 build1 (INDIRECT_REF, cst_uchar_node,
2723 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2724 tree ind2 =
2725 fold (build1 (CONVERT_EXPR, integer_type_node,
2726 build1 (INDIRECT_REF, cst_uchar_node,
2727 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2728 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2729 return expand_expr (result, target, mode, EXPAND_NORMAL);
2730 }
2731
2732 #ifdef HAVE_cmpstrsi
2733 {
2734 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2735 rtx result;
2736 rtx insn;
2737
2738 int arg1_align
2739 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2740 int arg2_align
2741 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2742 enum machine_mode insn_mode
2743 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2744
2745 /* If we don't have POINTER_TYPE, call the function. */
2746 if (arg1_align == 0 || arg2_align == 0)
2747 return 0;
2748
2749 /* Make a place to write the result of the instruction. */
2750 result = target;
2751 if (! (result != 0
2752 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2753 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2754 result = gen_reg_rtx (insn_mode);
2755
2756 arg1_rtx = get_memory_rtx (arg1);
2757 arg2_rtx = get_memory_rtx (arg2);
2758 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2759 if (!HAVE_cmpstrsi)
2760 insn = NULL_RTX;
2761 else
2762 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2763 GEN_INT (MIN (arg1_align, arg2_align)));
2764
2765 if (insn)
2766 emit_insn (insn);
2767 else
2768 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2769 TYPE_MODE (integer_type_node), 3,
2770 XEXP (arg1_rtx, 0), Pmode,
2771 XEXP (arg2_rtx, 0), Pmode,
2772 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2773 TREE_UNSIGNED (sizetype)),
2774 TYPE_MODE (sizetype));
2775
2776 /* Return the value in the proper mode for this function. */
2777 mode = TYPE_MODE (TREE_TYPE (exp));
2778 if (GET_MODE (result) == mode)
2779 return result;
2780 else if (target != 0)
2781 {
2782 convert_move (target, result, 0);
2783 return target;
2784 }
2785 else
2786 return convert_to_mode (mode, result, 0);
2787 }
2788 #endif
2789
2790 return 0;
2791 }
2792
2793 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2794 if we failed the caller should emit a normal call, otherwise try to get
2795 the result in TARGET, if convenient. */
2796
2797 static rtx
2798 expand_builtin_strcmp (exp, target, mode)
2799 tree exp;
2800 rtx target;
2801 enum machine_mode mode;
2802 {
2803 tree arglist = TREE_OPERAND (exp, 1);
2804 tree arg1, arg2, len, len2, fn;
2805 const char *p1, *p2;
2806
2807 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2808 return 0;
2809
2810 arg1 = TREE_VALUE (arglist);
2811 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2812
2813 p1 = c_getstr (arg1);
2814 p2 = c_getstr (arg2);
2815
2816 if (p1 && p2)
2817 {
2818 const int i = strcmp (p1, p2);
2819 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2820 }
2821
2822 /* If either arg is "", return an expression corresponding to
2823 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2824 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2825 {
2826 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2827 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2828 tree ind1 =
2829 fold (build1 (CONVERT_EXPR, integer_type_node,
2830 build1 (INDIRECT_REF, cst_uchar_node,
2831 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2832 tree ind2 =
2833 fold (build1 (CONVERT_EXPR, integer_type_node,
2834 build1 (INDIRECT_REF, cst_uchar_node,
2835 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2836 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2837 return expand_expr (result, target, mode, EXPAND_NORMAL);
2838 }
2839
2840 len = c_strlen (arg1);
2841 len2 = c_strlen (arg2);
2842
2843 if (len)
2844 len = size_binop (PLUS_EXPR, ssize_int (1), len);
2845
2846 if (len2)
2847 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2848
2849 /* If we don't have a constant length for the first, use the length
2850 of the second, if we know it. We don't require a constant for
2851 this case; some cost analysis could be done if both are available
2852 but neither is constant. For now, assume they're equally cheap
2853 unless one has side effects.
2854
2855 If both strings have constant lengths, use the smaller. This
2856 could arise if optimization results in strcpy being called with
2857 two fixed strings, or if the code was machine-generated. We should
2858 add some code to the `memcmp' handler below to deal with such
2859 situations, someday. */
2860
2861 if (!len || TREE_CODE (len) != INTEGER_CST)
2862 {
2863 if (len2 && !TREE_SIDE_EFFECTS (len2))
2864 len = len2;
2865 else if (len == 0)
2866 return 0;
2867 }
2868 else if (len2 && TREE_CODE (len2) == INTEGER_CST
2869 && tree_int_cst_lt (len2, len))
2870 len = len2;
2871
2872 /* If both arguments have side effects, we cannot optimize. */
2873 if (TREE_SIDE_EFFECTS (len))
2874 return 0;
2875
2876 fn = implicit_built_in_decls[BUILT_IN_MEMCMP];
2877 if (!fn)
2878 return 0;
2879
2880 chainon (arglist, build_tree_list (NULL_TREE, len));
2881 return expand_expr (build_function_call_expr (fn, arglist),
2882 target, mode, EXPAND_NORMAL);
2883 }
2884
2885 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
2886 if we failed the caller should emit a normal call, otherwise try to get
2887 the result in TARGET, if convenient. */
2888
2889 static rtx
2890 expand_builtin_strncmp (exp, target, mode)
2891 tree exp;
2892 rtx target;
2893 enum machine_mode mode;
2894 {
2895 tree arglist = TREE_OPERAND (exp, 1);
2896 tree fn, newarglist, len = 0;
2897 tree arg1, arg2, arg3;
2898 const char *p1, *p2;
2899
2900 if (!validate_arglist (arglist,
2901 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2902 return 0;
2903
2904 arg1 = TREE_VALUE (arglist);
2905 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2906 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2907
2908 /* If the len parameter is zero, return zero. */
2909 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
2910 {
2911 /* Evaluate and ignore arg1 and arg2 in case they have
2912 side-effects. */
2913 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2914 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2915 return const0_rtx;
2916 }
2917
2918 p1 = c_getstr (arg1);
2919 p2 = c_getstr (arg2);
2920
2921 /* If all arguments are constant, evaluate at compile-time. */
2922 if (host_integerp (arg3, 1) && p1 && p2)
2923 {
2924 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
2925 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2926 }
2927
2928 /* If len == 1 or (either string parameter is "" and (len >= 1)),
2929 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
2930 if (host_integerp (arg3, 1)
2931 && (tree_low_cst (arg3, 1) == 1
2932 || (tree_low_cst (arg3, 1) > 1
2933 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
2934 {
2935 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2936 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2937 tree ind1 =
2938 fold (build1 (CONVERT_EXPR, integer_type_node,
2939 build1 (INDIRECT_REF, cst_uchar_node,
2940 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2941 tree ind2 =
2942 fold (build1 (CONVERT_EXPR, integer_type_node,
2943 build1 (INDIRECT_REF, cst_uchar_node,
2944 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2945 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2946 return expand_expr (result, target, mode, EXPAND_NORMAL);
2947 }
2948
2949 /* If c_strlen can determine an expression for one of the string
2950 lengths, and it doesn't have side effects, then call
2951 expand_builtin_memcmp() using length MIN(strlen(string)+1, arg3). */
2952
2953 /* Perhaps one of the strings is really constant, if so prefer
2954 that constant length over the other string's length. */
2955 if (p1)
2956 len = c_strlen (arg1);
2957 else if (p2)
2958 len = c_strlen (arg2);
2959
2960 /* If we still don't have a len, try either string arg as long
2961 as they don't have side effects. */
2962 if (!len && !TREE_SIDE_EFFECTS (arg1))
2963 len = c_strlen (arg1);
2964 if (!len && !TREE_SIDE_EFFECTS (arg2))
2965 len = c_strlen (arg2);
2966 /* If we still don't have a length, punt. */
2967 if (!len)
2968 return 0;
2969
2970 fn = implicit_built_in_decls[BUILT_IN_MEMCMP];
2971 if (!fn)
2972 return 0;
2973
2974 /* Add one to the string length. */
2975 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2976
2977 /* The actual new length parameter is MIN(len,arg3). */
2978 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
2979
2980 newarglist = build_tree_list (NULL_TREE, len);
2981 newarglist = tree_cons (NULL_TREE, arg2, newarglist);
2982 newarglist = tree_cons (NULL_TREE, arg1, newarglist);
2983 return expand_expr (build_function_call_expr (fn, newarglist),
2984 target, mode, EXPAND_NORMAL);
2985 }
2986
2987 /* Expand expression EXP, which is a call to the strcat builtin.
2988 Return 0 if we failed the caller should emit a normal call,
2989 otherwise try to get the result in TARGET, if convenient. */
2990
2991 static rtx
2992 expand_builtin_strcat (arglist, target, mode)
2993 tree arglist;
2994 rtx target;
2995 enum machine_mode mode;
2996 {
2997 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2998 return 0;
2999 else
3000 {
3001 tree dst = TREE_VALUE (arglist),
3002 src = TREE_VALUE (TREE_CHAIN (arglist));
3003 const char *p = c_getstr (src);
3004
3005 /* If the string length is zero, return the dst parameter. */
3006 if (p && *p == '\0')
3007 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3008
3009 return 0;
3010 }
3011 }
3012
3013 /* Expand expression EXP, which is a call to the strncat builtin.
3014 Return 0 if we failed the caller should emit a normal call,
3015 otherwise try to get the result in TARGET, if convenient. */
3016
3017 static rtx
3018 expand_builtin_strncat (arglist, target, mode)
3019 tree arglist;
3020 rtx target;
3021 enum machine_mode mode;
3022 {
3023 if (!validate_arglist (arglist,
3024 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3025 return 0;
3026 else
3027 {
3028 tree dst = TREE_VALUE (arglist),
3029 src = TREE_VALUE (TREE_CHAIN (arglist)),
3030 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3031 const char *p = c_getstr (src);
3032
3033 /* If the requested length is zero, or the src parameter string
3034 length is zero, return the dst parameter. */
3035 if (integer_zerop (len) || (p && *p == '\0'))
3036 {
3037 /* Evaluate and ignore the src and len parameters in case
3038 they have side-effects. */
3039 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3040 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3041 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3042 }
3043
3044 /* If the requested len is greater than or equal to the string
3045 length, call strcat. */
3046 if (TREE_CODE (len) == INTEGER_CST && p
3047 && compare_tree_int (len, strlen (p)) >= 0)
3048 {
3049 tree newarglist
3050 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3051 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3052
3053 /* If the replacement _DECL isn't initialized, don't do the
3054 transformation. */
3055 if (!fn)
3056 return 0;
3057
3058 return expand_expr (build_function_call_expr (fn, newarglist),
3059 target, mode, EXPAND_NORMAL);
3060 }
3061 return 0;
3062 }
3063 }
3064
3065 /* Expand expression EXP, which is a call to the strspn builtin.
3066 Return 0 if we failed the caller should emit a normal call,
3067 otherwise try to get the result in TARGET, if convenient. */
3068
3069 static rtx
3070 expand_builtin_strspn (arglist, target, mode)
3071 tree arglist;
3072 rtx target;
3073 enum machine_mode mode;
3074 {
3075 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3076 return 0;
3077 else
3078 {
3079 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3080 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3081
3082 /* If both arguments are constants, evaluate at compile-time. */
3083 if (p1 && p2)
3084 {
3085 const size_t r = strspn (p1, p2);
3086 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3087 }
3088
3089 /* If either argument is "", return 0. */
3090 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3091 {
3092 /* Evaluate and ignore both arguments in case either one has
3093 side-effects. */
3094 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3095 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3096 return const0_rtx;
3097 }
3098 return 0;
3099 }
3100 }
3101
3102 /* Expand expression EXP, which is a call to the strcspn builtin.
3103 Return 0 if we failed the caller should emit a normal call,
3104 otherwise try to get the result in TARGET, if convenient. */
3105
3106 static rtx
3107 expand_builtin_strcspn (arglist, target, mode)
3108 tree arglist;
3109 rtx target;
3110 enum machine_mode mode;
3111 {
3112 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3113 return 0;
3114 else
3115 {
3116 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3117 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3118
3119 /* If both arguments are constants, evaluate at compile-time. */
3120 if (p1 && p2)
3121 {
3122 const size_t r = strcspn (p1, p2);
3123 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3124 }
3125
3126 /* If the first argument is "", return 0. */
3127 if (p1 && *p1 == '\0')
3128 {
3129 /* Evaluate and ignore argument s2 in case it has
3130 side-effects. */
3131 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3132 return const0_rtx;
3133 }
3134
3135 /* If the second argument is "", return __builtin_strlen(s1). */
3136 if (p2 && *p2 == '\0')
3137 {
3138 tree newarglist = build_tree_list (NULL_TREE, s1),
3139 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3140
3141 /* If the replacement _DECL isn't initialized, don't do the
3142 transformation. */
3143 if (!fn)
3144 return 0;
3145
3146 return expand_expr (build_function_call_expr (fn, newarglist),
3147 target, mode, EXPAND_NORMAL);
3148 }
3149 return 0;
3150 }
3151 }
3152
3153 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3154 if that's convenient. */
3155
3156 rtx
3157 expand_builtin_saveregs ()
3158 {
3159 rtx val, seq;
3160
3161 /* Don't do __builtin_saveregs more than once in a function.
3162 Save the result of the first call and reuse it. */
3163 if (saveregs_value != 0)
3164 return saveregs_value;
3165
3166 /* When this function is called, it means that registers must be
3167 saved on entry to this function. So we migrate the call to the
3168 first insn of this function. */
3169
3170 start_sequence ();
3171
3172 #ifdef EXPAND_BUILTIN_SAVEREGS
3173 /* Do whatever the machine needs done in this case. */
3174 val = EXPAND_BUILTIN_SAVEREGS ();
3175 #else
3176 /* ??? We used to try and build up a call to the out of line function,
3177 guessing about what registers needed saving etc. This became much
3178 harder with __builtin_va_start, since we don't have a tree for a
3179 call to __builtin_saveregs to fall back on. There was exactly one
3180 port (i860) that used this code, and I'm unconvinced it could actually
3181 handle the general case. So we no longer try to handle anything
3182 weird and make the backend absorb the evil. */
3183
3184 error ("__builtin_saveregs not supported by this target");
3185 val = const0_rtx;
3186 #endif
3187
3188 seq = get_insns ();
3189 end_sequence ();
3190
3191 saveregs_value = val;
3192
3193 /* Put the insns after the NOTE that starts the function. If this
3194 is inside a start_sequence, make the outer-level insn chain current, so
3195 the code is placed at the start of the function. */
3196 push_topmost_sequence ();
3197 emit_insn_after (seq, get_insns ());
3198 pop_topmost_sequence ();
3199
3200 return val;
3201 }
3202
3203 /* __builtin_args_info (N) returns word N of the arg space info
3204 for the current function. The number and meanings of words
3205 is controlled by the definition of CUMULATIVE_ARGS. */
3206
3207 static rtx
3208 expand_builtin_args_info (exp)
3209 tree exp;
3210 {
3211 tree arglist = TREE_OPERAND (exp, 1);
3212 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3213 int *word_ptr = (int *) &current_function_args_info;
3214 #if 0
3215 /* These are used by the code below that is if 0'ed away */
3216 int i;
3217 tree type, elts, result;
3218 #endif
3219
3220 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3221 abort ();
3222
3223 if (arglist != 0)
3224 {
3225 if (!host_integerp (TREE_VALUE (arglist), 0))
3226 error ("argument of `__builtin_args_info' must be constant");
3227 else
3228 {
3229 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3230
3231 if (wordnum < 0 || wordnum >= nwords)
3232 error ("argument of `__builtin_args_info' out of range");
3233 else
3234 return GEN_INT (word_ptr[wordnum]);
3235 }
3236 }
3237 else
3238 error ("missing argument in `__builtin_args_info'");
3239
3240 return const0_rtx;
3241
3242 #if 0
3243 for (i = 0; i < nwords; i++)
3244 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
3245
3246 type = build_array_type (integer_type_node,
3247 build_index_type (build_int_2 (nwords, 0)));
3248 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
3249 TREE_CONSTANT (result) = 1;
3250 TREE_STATIC (result) = 1;
3251 result = build1 (INDIRECT_REF, build_pointer_type (type), result);
3252 TREE_CONSTANT (result) = 1;
3253 return expand_expr (result, NULL_RTX, VOIDmode, 0);
3254 #endif
3255 }
3256
3257 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3258
3259 static rtx
3260 expand_builtin_next_arg (arglist)
3261 tree arglist;
3262 {
3263 tree fntype = TREE_TYPE (current_function_decl);
3264
3265 if (TYPE_ARG_TYPES (fntype) == 0
3266 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3267 == void_type_node))
3268 {
3269 error ("`va_start' used in function with fixed args");
3270 return const0_rtx;
3271 }
3272
3273 if (arglist)
3274 {
3275 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3276 tree arg = TREE_VALUE (arglist);
3277
3278 /* Strip off all nops for the sake of the comparison. This
3279 is not quite the same as STRIP_NOPS. It does more.
3280 We must also strip off INDIRECT_EXPR for C++ reference
3281 parameters. */
3282 while (TREE_CODE (arg) == NOP_EXPR
3283 || TREE_CODE (arg) == CONVERT_EXPR
3284 || TREE_CODE (arg) == NON_LVALUE_EXPR
3285 || TREE_CODE (arg) == INDIRECT_REF)
3286 arg = TREE_OPERAND (arg, 0);
3287 if (arg != last_parm)
3288 warning ("second parameter of `va_start' not last named argument");
3289 }
3290 else
3291 /* Evidently an out of date version of <stdarg.h>; can't validate
3292 va_start's second argument, but can still work as intended. */
3293 warning ("`__builtin_next_arg' called without an argument");
3294
3295 return expand_binop (Pmode, add_optab,
3296 current_function_internal_arg_pointer,
3297 current_function_arg_offset_rtx,
3298 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3299 }
3300
3301 /* Make it easier for the backends by protecting the valist argument
3302 from multiple evaluations. */
3303
3304 static tree
3305 stabilize_va_list (valist, needs_lvalue)
3306 tree valist;
3307 int needs_lvalue;
3308 {
3309 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3310 {
3311 if (TREE_SIDE_EFFECTS (valist))
3312 valist = save_expr (valist);
3313
3314 /* For this case, the backends will be expecting a pointer to
3315 TREE_TYPE (va_list_type_node), but it's possible we've
3316 actually been given an array (an actual va_list_type_node).
3317 So fix it. */
3318 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3319 {
3320 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3321 tree p2 = build_pointer_type (va_list_type_node);
3322
3323 valist = build1 (ADDR_EXPR, p2, valist);
3324 valist = fold (build1 (NOP_EXPR, p1, valist));
3325 }
3326 }
3327 else
3328 {
3329 tree pt;
3330
3331 if (! needs_lvalue)
3332 {
3333 if (! TREE_SIDE_EFFECTS (valist))
3334 return valist;
3335
3336 pt = build_pointer_type (va_list_type_node);
3337 valist = fold (build1 (ADDR_EXPR, pt, valist));
3338 TREE_SIDE_EFFECTS (valist) = 1;
3339 }
3340
3341 if (TREE_SIDE_EFFECTS (valist))
3342 valist = save_expr (valist);
3343 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3344 valist));
3345 }
3346
3347 return valist;
3348 }
3349
3350 /* The "standard" implementation of va_start: just assign `nextarg' to
3351 the variable. */
3352
3353 void
3354 std_expand_builtin_va_start (valist, nextarg)
3355 tree valist;
3356 rtx nextarg;
3357 {
3358 tree t;
3359
3360 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3361 make_tree (ptr_type_node, nextarg));
3362 TREE_SIDE_EFFECTS (t) = 1;
3363
3364 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3365 }
3366
3367 /* Expand ARGLIST, from a call to __builtin_va_start. */
3368
3369 static rtx
3370 expand_builtin_va_start (arglist)
3371 tree arglist;
3372 {
3373 rtx nextarg;
3374 tree chain, valist;
3375
3376 chain = TREE_CHAIN (arglist);
3377
3378 if (TREE_CHAIN (chain))
3379 error ("too many arguments to function `va_start'");
3380
3381 nextarg = expand_builtin_next_arg (chain);
3382 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3383
3384 #ifdef EXPAND_BUILTIN_VA_START
3385 EXPAND_BUILTIN_VA_START (valist, nextarg);
3386 #else
3387 std_expand_builtin_va_start (valist, nextarg);
3388 #endif
3389
3390 return const0_rtx;
3391 }
3392
3393 /* The "standard" implementation of va_arg: read the value from the
3394 current (padded) address and increment by the (padded) size. */
3395
3396 rtx
3397 std_expand_builtin_va_arg (valist, type)
3398 tree valist, type;
3399 {
3400 tree addr_tree, t, type_size = NULL;
3401 tree align, alignm1;
3402 tree rounded_size;
3403 rtx addr;
3404
3405 /* Compute the rounded size of the type. */
3406 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3407 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3408 if (type == error_mark_node
3409 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3410 || TREE_OVERFLOW (type_size))
3411 rounded_size = size_zero_node;
3412 else
3413 rounded_size = fold (build (MULT_EXPR, sizetype,
3414 fold (build (TRUNC_DIV_EXPR, sizetype,
3415 fold (build (PLUS_EXPR, sizetype,
3416 type_size, alignm1)),
3417 align)),
3418 align));
3419
3420 /* Get AP. */
3421 addr_tree = valist;
3422 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3423 {
3424 /* Small args are padded downward. */
3425 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3426 fold (build (COND_EXPR, sizetype,
3427 fold (build (GT_EXPR, sizetype,
3428 rounded_size,
3429 align)),
3430 size_zero_node,
3431 fold (build (MINUS_EXPR, sizetype,
3432 rounded_size,
3433 type_size))))));
3434 }
3435
3436 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3437 addr = copy_to_reg (addr);
3438
3439 /* Compute new value for AP. */
3440 if (! integer_zerop (rounded_size))
3441 {
3442 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3443 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3444 rounded_size));
3445 TREE_SIDE_EFFECTS (t) = 1;
3446 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3447 }
3448
3449 return addr;
3450 }
3451
3452 /* Expand __builtin_va_arg, which is not really a builtin function, but
3453 a very special sort of operator. */
3454
3455 rtx
3456 expand_builtin_va_arg (valist, type)
3457 tree valist, type;
3458 {
3459 rtx addr, result;
3460 tree promoted_type, want_va_type, have_va_type;
3461
3462 /* Verify that valist is of the proper type. */
3463
3464 want_va_type = va_list_type_node;
3465 have_va_type = TREE_TYPE (valist);
3466 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3467 {
3468 /* If va_list is an array type, the argument may have decayed
3469 to a pointer type, e.g. by being passed to another function.
3470 In that case, unwrap both types so that we can compare the
3471 underlying records. */
3472 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3473 || TREE_CODE (have_va_type) == POINTER_TYPE)
3474 {
3475 want_va_type = TREE_TYPE (want_va_type);
3476 have_va_type = TREE_TYPE (have_va_type);
3477 }
3478 }
3479 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3480 {
3481 error ("first argument to `va_arg' not of type `va_list'");
3482 addr = const0_rtx;
3483 }
3484
3485 /* Generate a diagnostic for requesting data of a type that cannot
3486 be passed through `...' due to type promotion at the call site. */
3487 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3488 != type)
3489 {
3490 const char *name = "<anonymous type>", *pname = 0;
3491 static bool gave_help;
3492
3493 if (TYPE_NAME (type))
3494 {
3495 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3496 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3497 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3498 && DECL_NAME (TYPE_NAME (type)))
3499 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3500 }
3501 if (TYPE_NAME (promoted_type))
3502 {
3503 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3504 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3505 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3506 && DECL_NAME (TYPE_NAME (promoted_type)))
3507 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3508 }
3509
3510 /* Unfortunately, this is merely undefined, rather than a constraint
3511 violation, so we cannot make this an error. If this call is never
3512 executed, the program is still strictly conforming. */
3513 warning ("`%s' is promoted to `%s' when passed through `...'",
3514 name, pname);
3515 if (! gave_help)
3516 {
3517 gave_help = true;
3518 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3519 pname, name);
3520 }
3521
3522 /* We can, however, treat "undefined" any way we please.
3523 Call abort to encourage the user to fix the program. */
3524 expand_builtin_trap ();
3525
3526 /* This is dead code, but go ahead and finish so that the
3527 mode of the result comes out right. */
3528 addr = const0_rtx;
3529 }
3530 else
3531 {
3532 /* Make it easier for the backends by protecting the valist argument
3533 from multiple evaluations. */
3534 valist = stabilize_va_list (valist, 0);
3535
3536 #ifdef EXPAND_BUILTIN_VA_ARG
3537 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3538 #else
3539 addr = std_expand_builtin_va_arg (valist, type);
3540 #endif
3541 }
3542
3543 #ifdef POINTERS_EXTEND_UNSIGNED
3544 if (GET_MODE (addr) != Pmode)
3545 addr = convert_memory_address (Pmode, addr);
3546 #endif
3547
3548 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3549 set_mem_alias_set (result, get_varargs_alias_set ());
3550
3551 return result;
3552 }
3553
3554 /* Expand ARGLIST, from a call to __builtin_va_end. */
3555
3556 static rtx
3557 expand_builtin_va_end (arglist)
3558 tree arglist;
3559 {
3560 tree valist = TREE_VALUE (arglist);
3561
3562 #ifdef EXPAND_BUILTIN_VA_END
3563 valist = stabilize_va_list (valist, 0);
3564 EXPAND_BUILTIN_VA_END (arglist);
3565 #else
3566 /* Evaluate for side effects, if needed. I hate macros that don't
3567 do that. */
3568 if (TREE_SIDE_EFFECTS (valist))
3569 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3570 #endif
3571
3572 return const0_rtx;
3573 }
3574
3575 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3576 builtin rather than just as an assignment in stdarg.h because of the
3577 nastiness of array-type va_list types. */
3578
3579 static rtx
3580 expand_builtin_va_copy (arglist)
3581 tree arglist;
3582 {
3583 tree dst, src, t;
3584
3585 dst = TREE_VALUE (arglist);
3586 src = TREE_VALUE (TREE_CHAIN (arglist));
3587
3588 dst = stabilize_va_list (dst, 1);
3589 src = stabilize_va_list (src, 0);
3590
3591 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3592 {
3593 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3594 TREE_SIDE_EFFECTS (t) = 1;
3595 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3596 }
3597 else
3598 {
3599 rtx dstb, srcb, size;
3600
3601 /* Evaluate to pointers. */
3602 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3603 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3604 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3605 VOIDmode, EXPAND_NORMAL);
3606
3607 #ifdef POINTERS_EXTEND_UNSIGNED
3608 if (GET_MODE (dstb) != Pmode)
3609 dstb = convert_memory_address (Pmode, dstb);
3610
3611 if (GET_MODE (srcb) != Pmode)
3612 srcb = convert_memory_address (Pmode, srcb);
3613 #endif
3614
3615 /* "Dereference" to BLKmode memories. */
3616 dstb = gen_rtx_MEM (BLKmode, dstb);
3617 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3618 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3619 srcb = gen_rtx_MEM (BLKmode, srcb);
3620 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3621 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3622
3623 /* Copy. */
3624 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3625 }
3626
3627 return const0_rtx;
3628 }
3629
3630 /* Expand a call to one of the builtin functions __builtin_frame_address or
3631 __builtin_return_address. */
3632
3633 static rtx
3634 expand_builtin_frame_address (exp)
3635 tree exp;
3636 {
3637 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3638 tree arglist = TREE_OPERAND (exp, 1);
3639
3640 /* The argument must be a nonnegative integer constant.
3641 It counts the number of frames to scan up the stack.
3642 The value is the return address saved in that frame. */
3643 if (arglist == 0)
3644 /* Warning about missing arg was already issued. */
3645 return const0_rtx;
3646 else if (! host_integerp (TREE_VALUE (arglist), 1))
3647 {
3648 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3649 error ("invalid arg to `__builtin_frame_address'");
3650 else
3651 error ("invalid arg to `__builtin_return_address'");
3652 return const0_rtx;
3653 }
3654 else
3655 {
3656 rtx tem
3657 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3658 tree_low_cst (TREE_VALUE (arglist), 1),
3659 hard_frame_pointer_rtx);
3660
3661 /* Some ports cannot access arbitrary stack frames. */
3662 if (tem == NULL)
3663 {
3664 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3665 warning ("unsupported arg to `__builtin_frame_address'");
3666 else
3667 warning ("unsupported arg to `__builtin_return_address'");
3668 return const0_rtx;
3669 }
3670
3671 /* For __builtin_frame_address, return what we've got. */
3672 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3673 return tem;
3674
3675 if (GET_CODE (tem) != REG
3676 && ! CONSTANT_P (tem))
3677 tem = copy_to_mode_reg (Pmode, tem);
3678 return tem;
3679 }
3680 }
3681
3682 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3683 we failed and the caller should emit a normal call, otherwise try to get
3684 the result in TARGET, if convenient. */
3685
3686 static rtx
3687 expand_builtin_alloca (arglist, target)
3688 tree arglist;
3689 rtx target;
3690 {
3691 rtx op0;
3692 rtx result;
3693
3694 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3695 return 0;
3696
3697 /* Compute the argument. */
3698 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3699
3700 /* Allocate the desired space. */
3701 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3702
3703 #ifdef POINTERS_EXTEND_UNSIGNED
3704 if (GET_MODE (result) != ptr_mode)
3705 result = convert_memory_address (ptr_mode, result);
3706 #endif
3707
3708 return result;
3709 }
3710
3711 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
3712 Return 0 if a normal call should be emitted rather than expanding the
3713 function in-line. If convenient, the result should be placed in TARGET.
3714 SUBTARGET may be used as the target for computing one of EXP's operands. */
3715
3716 static rtx
3717 expand_builtin_unop (arglist, target, subtarget, op_optab)
3718 tree arglist;
3719 rtx target, subtarget;
3720 optab op_optab;
3721 {
3722 rtx op0;
3723 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3724 return 0;
3725
3726 /* Compute the argument. */
3727 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3728 /* Compute op, into TARGET if possible.
3729 Set TARGET to wherever the result comes back. */
3730 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3731 op_optab, op0, target, 1);
3732 if (target == 0)
3733 abort ();
3734 return target;
3735 }
3736
3737 /* If the string passed to fputs is a constant and is one character
3738 long, we attempt to transform this call into __builtin_fputc(). */
3739
3740 static rtx
3741 expand_builtin_fputs (arglist, ignore, unlocked)
3742 tree arglist;
3743 int ignore;
3744 int unlocked;
3745 {
3746 tree len, fn;
3747 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3748 : implicit_built_in_decls[BUILT_IN_FPUTC];
3749 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3750 : implicit_built_in_decls[BUILT_IN_FWRITE];
3751
3752 /* If the return value is used, or the replacement _DECL isn't
3753 initialized, don't do the transformation. */
3754 if (!ignore || !fn_fputc || !fn_fwrite)
3755 return 0;
3756
3757 /* Verify the arguments in the original call. */
3758 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3759 return 0;
3760
3761 /* Get the length of the string passed to fputs. If the length
3762 can't be determined, punt. */
3763 if (!(len = c_strlen (TREE_VALUE (arglist)))
3764 || TREE_CODE (len) != INTEGER_CST)
3765 return 0;
3766
3767 switch (compare_tree_int (len, 1))
3768 {
3769 case -1: /* length is 0, delete the call entirely . */
3770 {
3771 /* Evaluate and ignore the argument in case it has
3772 side-effects. */
3773 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3774 VOIDmode, EXPAND_NORMAL);
3775 return const0_rtx;
3776 }
3777 case 0: /* length is 1, call fputc. */
3778 {
3779 const char *p = c_getstr (TREE_VALUE (arglist));
3780
3781 if (p != NULL)
3782 {
3783 /* New argument list transforming fputs(string, stream) to
3784 fputc(string[0], stream). */
3785 arglist =
3786 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3787 arglist =
3788 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3789 fn = fn_fputc;
3790 break;
3791 }
3792 }
3793 /* FALLTHROUGH */
3794 case 1: /* length is greater than 1, call fwrite. */
3795 {
3796 tree string_arg = TREE_VALUE (arglist);
3797
3798 /* New argument list transforming fputs(string, stream) to
3799 fwrite(string, 1, len, stream). */
3800 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3801 arglist = tree_cons (NULL_TREE, len, arglist);
3802 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3803 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3804 fn = fn_fwrite;
3805 break;
3806 }
3807 default:
3808 abort ();
3809 }
3810
3811 return expand_expr (build_function_call_expr (fn, arglist),
3812 (ignore ? const0_rtx : NULL_RTX),
3813 VOIDmode, EXPAND_NORMAL);
3814 }
3815
3816 /* Expand a call to __builtin_expect. We return our argument and emit a
3817 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
3818 a non-jump context. */
3819
3820 static rtx
3821 expand_builtin_expect (arglist, target)
3822 tree arglist;
3823 rtx target;
3824 {
3825 tree exp, c;
3826 rtx note, rtx_c;
3827
3828 if (arglist == NULL_TREE
3829 || TREE_CHAIN (arglist) == NULL_TREE)
3830 return const0_rtx;
3831 exp = TREE_VALUE (arglist);
3832 c = TREE_VALUE (TREE_CHAIN (arglist));
3833
3834 if (TREE_CODE (c) != INTEGER_CST)
3835 {
3836 error ("second arg to `__builtin_expect' must be a constant");
3837 c = integer_zero_node;
3838 }
3839
3840 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
3841
3842 /* Don't bother with expected value notes for integral constants. */
3843 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
3844 {
3845 /* We do need to force this into a register so that we can be
3846 moderately sure to be able to correctly interpret the branch
3847 condition later. */
3848 target = force_reg (GET_MODE (target), target);
3849
3850 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
3851
3852 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
3853 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
3854 }
3855
3856 return target;
3857 }
3858
3859 /* Like expand_builtin_expect, except do this in a jump context. This is
3860 called from do_jump if the conditional is a __builtin_expect. Return either
3861 a list of insns to emit the jump or NULL if we cannot optimize
3862 __builtin_expect. We need to optimize this at jump time so that machines
3863 like the PowerPC don't turn the test into a SCC operation, and then jump
3864 based on the test being 0/1. */
3865
3866 rtx
3867 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
3868 tree exp;
3869 rtx if_false_label;
3870 rtx if_true_label;
3871 {
3872 tree arglist = TREE_OPERAND (exp, 1);
3873 tree arg0 = TREE_VALUE (arglist);
3874 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3875 rtx ret = NULL_RTX;
3876
3877 /* Only handle __builtin_expect (test, 0) and
3878 __builtin_expect (test, 1). */
3879 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3880 && (integer_zerop (arg1) || integer_onep (arg1)))
3881 {
3882 int num_jumps = 0;
3883 rtx insn;
3884
3885 /* If we fail to locate an appropriate conditional jump, we'll
3886 fall back to normal evaluation. Ensure that the expression
3887 can be re-evaluated. */
3888 switch (unsafe_for_reeval (arg0))
3889 {
3890 case 0: /* Safe. */
3891 break;
3892
3893 case 1: /* Mildly unsafe. */
3894 arg0 = unsave_expr (arg0);
3895 break;
3896
3897 case 2: /* Wildly unsafe. */
3898 return NULL_RTX;
3899 }
3900
3901 /* Expand the jump insns. */
3902 start_sequence ();
3903 do_jump (arg0, if_false_label, if_true_label);
3904 ret = get_insns ();
3905 end_sequence ();
3906
3907 /* Now that the __builtin_expect has been validated, go through and add
3908 the expect's to each of the conditional jumps. If we run into an
3909 error, just give up and generate the 'safe' code of doing a SCC
3910 operation and then doing a branch on that. */
3911 insn = ret;
3912 while (insn != NULL_RTX)
3913 {
3914 rtx next = NEXT_INSN (insn);
3915 rtx pattern;
3916
3917 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
3918 && (pattern = pc_set (insn)) != NULL_RTX)
3919 {
3920 rtx ifelse = SET_SRC (pattern);
3921 rtx label;
3922 int taken;
3923
3924 if (GET_CODE (ifelse) != IF_THEN_ELSE)
3925 goto do_next_insn;
3926
3927 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
3928 {
3929 taken = 1;
3930 label = XEXP (XEXP (ifelse, 1), 0);
3931 }
3932 /* An inverted jump reverses the probabilities. */
3933 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
3934 {
3935 taken = 0;
3936 label = XEXP (XEXP (ifelse, 2), 0);
3937 }
3938 /* We shouldn't have to worry about conditional returns during
3939 the expansion stage, but handle it gracefully anyway. */
3940 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
3941 {
3942 taken = 1;
3943 label = NULL_RTX;
3944 }
3945 /* An inverted return reverses the probabilities. */
3946 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
3947 {
3948 taken = 0;
3949 label = NULL_RTX;
3950 }
3951 else
3952 goto do_next_insn;
3953
3954 /* If the test is expected to fail, reverse the
3955 probabilities. */
3956 if (integer_zerop (arg1))
3957 taken = 1 - taken;
3958
3959 /* If we are jumping to the false label, reverse the
3960 probabilities. */
3961 if (label == NULL_RTX)
3962 ; /* conditional return */
3963 else if (label == if_false_label)
3964 taken = 1 - taken;
3965 else if (label != if_true_label)
3966 goto do_next_insn;
3967
3968 num_jumps++;
3969 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
3970 }
3971
3972 do_next_insn:
3973 insn = next;
3974 }
3975
3976 /* If no jumps were modified, fail and do __builtin_expect the normal
3977 way. */
3978 if (num_jumps == 0)
3979 ret = NULL_RTX;
3980 }
3981
3982 return ret;
3983 }
3984
3985 void
3986 expand_builtin_trap ()
3987 {
3988 #ifdef HAVE_trap
3989 if (HAVE_trap)
3990 emit_insn (gen_trap ());
3991 else
3992 #endif
3993 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
3994 emit_barrier ();
3995 }
3996 \f
3997 /* Expand an expression EXP that calls a built-in function,
3998 with result going to TARGET if that's convenient
3999 (and in mode MODE if that's convenient).
4000 SUBTARGET may be used as the target for computing one of EXP's operands.
4001 IGNORE is nonzero if the value is to be ignored. */
4002
4003 rtx
4004 expand_builtin (exp, target, subtarget, mode, ignore)
4005 tree exp;
4006 rtx target;
4007 rtx subtarget;
4008 enum machine_mode mode;
4009 int ignore;
4010 {
4011 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4012 tree arglist = TREE_OPERAND (exp, 1);
4013 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4014
4015 /* Perform postincrements before expanding builtin functions.  */
4016 emit_queue ();
4017
4018 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4019 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4020
4021 /* When not optimizing, generate calls to library functions for a certain
4022 set of builtins. */
4023 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4024 switch (fcode)
4025 {
4026 case BUILT_IN_SQRT:
4027 case BUILT_IN_SQRTF:
4028 case BUILT_IN_SQRTL:
4029 case BUILT_IN_SIN:
4030 case BUILT_IN_SINF:
4031 case BUILT_IN_SINL:
4032 case BUILT_IN_COS:
4033 case BUILT_IN_COSF:
4034 case BUILT_IN_COSL:
4035 case BUILT_IN_EXP:
4036 case BUILT_IN_EXPF:
4037 case BUILT_IN_EXPL:
4038 case BUILT_IN_LOG:
4039 case BUILT_IN_LOGF:
4040 case BUILT_IN_LOGL:
4041 case BUILT_IN_POW:
4042 case BUILT_IN_POWF:
4043 case BUILT_IN_POWL:
4044 case BUILT_IN_ATAN2:
4045 case BUILT_IN_ATAN2F:
4046 case BUILT_IN_ATAN2L:
4047 case BUILT_IN_MEMSET:
4048 case BUILT_IN_MEMCPY:
4049 case BUILT_IN_MEMCMP:
4050 case BUILT_IN_BCMP:
4051 case BUILT_IN_BZERO:
4052 case BUILT_IN_INDEX:
4053 case BUILT_IN_RINDEX:
4054 case BUILT_IN_STRCHR:
4055 case BUILT_IN_STRRCHR:
4056 case BUILT_IN_STRLEN:
4057 case BUILT_IN_STRCPY:
4058 case BUILT_IN_STRNCPY:
4059 case BUILT_IN_STRNCMP:
4060 case BUILT_IN_STRSTR:
4061 case BUILT_IN_STRPBRK:
4062 case BUILT_IN_STRCAT:
4063 case BUILT_IN_STRNCAT:
4064 case BUILT_IN_STRSPN:
4065 case BUILT_IN_STRCSPN:
4066 case BUILT_IN_STRCMP:
4067 case BUILT_IN_FFS:
4068 case BUILT_IN_PUTCHAR:
4069 case BUILT_IN_PUTS:
4070 case BUILT_IN_PRINTF:
4071 case BUILT_IN_FPUTC:
4072 case BUILT_IN_FPUTS:
4073 case BUILT_IN_FWRITE:
4074 case BUILT_IN_PUTCHAR_UNLOCKED:
4075 case BUILT_IN_PUTS_UNLOCKED:
4076 case BUILT_IN_PRINTF_UNLOCKED:
4077 case BUILT_IN_FPUTC_UNLOCKED:
4078 case BUILT_IN_FPUTS_UNLOCKED:
4079 case BUILT_IN_FWRITE_UNLOCKED:
4080 case BUILT_IN_FLOOR:
4081 case BUILT_IN_FLOORF:
4082 case BUILT_IN_FLOORL:
4083 case BUILT_IN_CEIL:
4084 case BUILT_IN_CEILF:
4085 case BUILT_IN_CEILL:
4086 case BUILT_IN_TRUNC:
4087 case BUILT_IN_TRUNCF:
4088 case BUILT_IN_TRUNCL:
4089 case BUILT_IN_ROUND:
4090 case BUILT_IN_ROUNDF:
4091 case BUILT_IN_ROUNDL:
4092 case BUILT_IN_NEARBYINT:
4093 case BUILT_IN_NEARBYINTF:
4094 case BUILT_IN_NEARBYINTL:
4095 return expand_call (exp, target, ignore);
4096
4097 default:
4098 break;
4099 }
4100
4101 switch (fcode)
4102 {
4103 case BUILT_IN_ABS:
4104 case BUILT_IN_LABS:
4105 case BUILT_IN_LLABS:
4106 case BUILT_IN_IMAXABS:
4107 case BUILT_IN_FABS:
4108 case BUILT_IN_FABSF:
4109 case BUILT_IN_FABSL:
4110 /* build_function_call changes these into ABS_EXPR. */
4111 abort ();
4112
4113 case BUILT_IN_CONJ:
4114 case BUILT_IN_CONJF:
4115 case BUILT_IN_CONJL:
4116 case BUILT_IN_CREAL:
4117 case BUILT_IN_CREALF:
4118 case BUILT_IN_CREALL:
4119 case BUILT_IN_CIMAG:
4120 case BUILT_IN_CIMAGF:
4121 case BUILT_IN_CIMAGL:
4122 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4123 and IMAGPART_EXPR. */
4124 abort ();
4125
4126 case BUILT_IN_SIN:
4127 case BUILT_IN_SINF:
4128 case BUILT_IN_SINL:
4129 case BUILT_IN_COS:
4130 case BUILT_IN_COSF:
4131 case BUILT_IN_COSL:
4132 case BUILT_IN_EXP:
4133 case BUILT_IN_EXPF:
4134 case BUILT_IN_EXPL:
4135 case BUILT_IN_LOG:
4136 case BUILT_IN_LOGF:
4137 case BUILT_IN_LOGL:
4138 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4139 because of possible accuracy problems. */
4140 if (! flag_unsafe_math_optimizations)
4141 break;
4142 case BUILT_IN_SQRT:
4143 case BUILT_IN_SQRTF:
4144 case BUILT_IN_SQRTL:
4145 case BUILT_IN_FLOOR:
4146 case BUILT_IN_FLOORF:
4147 case BUILT_IN_FLOORL:
4148 case BUILT_IN_CEIL:
4149 case BUILT_IN_CEILF:
4150 case BUILT_IN_CEILL:
4151 case BUILT_IN_TRUNC:
4152 case BUILT_IN_TRUNCF:
4153 case BUILT_IN_TRUNCL:
4154 case BUILT_IN_ROUND:
4155 case BUILT_IN_ROUNDF:
4156 case BUILT_IN_ROUNDL:
4157 case BUILT_IN_NEARBYINT:
4158 case BUILT_IN_NEARBYINTF:
4159 case BUILT_IN_NEARBYINTL:
4160 target = expand_builtin_mathfn (exp, target, subtarget);
4161 if (target)
4162 return target;
4163 break;
4164
4165 case BUILT_IN_POW:
4166 case BUILT_IN_POWF:
4167 case BUILT_IN_POWL:
4168 case BUILT_IN_ATAN2:
4169 case BUILT_IN_ATAN2F:
4170 case BUILT_IN_ATAN2L:
4171 if (! flag_unsafe_math_optimizations)
4172 break;
4173 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4174 if (target)
4175 return target;
4176 break;
4177
4178 case BUILT_IN_APPLY_ARGS:
4179 return expand_builtin_apply_args ();
4180
4181 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4182 FUNCTION with a copy of the parameters described by
4183 ARGUMENTS, and ARGSIZE. It returns a block of memory
4184 allocated on the stack into which is stored all the registers
4185 that might possibly be used for returning the result of a
4186 function. ARGUMENTS is the value returned by
4187 __builtin_apply_args. ARGSIZE is the number of bytes of
4188 arguments that must be copied. ??? How should this value be
4189 computed? We'll also need a safe worst case value for varargs
4190 functions. */
4191 case BUILT_IN_APPLY:
4192 if (!validate_arglist (arglist, POINTER_TYPE,
4193 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4194 && !validate_arglist (arglist, REFERENCE_TYPE,
4195 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4196 return const0_rtx;
4197 else
4198 {
4199 int i;
4200 tree t;
4201 rtx ops[3];
4202
4203 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4204 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4205
4206 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4207 }
4208
4209 /* __builtin_return (RESULT) causes the function to return the
4210 value described by RESULT. RESULT is address of the block of
4211 memory returned by __builtin_apply. */
4212 case BUILT_IN_RETURN:
4213 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4214 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4215 NULL_RTX, VOIDmode, 0));
4216 return const0_rtx;
4217
4218 case BUILT_IN_SAVEREGS:
4219 return expand_builtin_saveregs ();
4220
4221 case BUILT_IN_ARGS_INFO:
4222 return expand_builtin_args_info (exp);
4223
4224 /* Return the address of the first anonymous stack arg. */
4225 case BUILT_IN_NEXT_ARG:
4226 return expand_builtin_next_arg (arglist);
4227
4228 case BUILT_IN_CLASSIFY_TYPE:
4229 return expand_builtin_classify_type (arglist);
4230
4231 case BUILT_IN_CONSTANT_P:
4232 return expand_builtin_constant_p (exp);
4233
4234 case BUILT_IN_FRAME_ADDRESS:
4235 case BUILT_IN_RETURN_ADDRESS:
4236 return expand_builtin_frame_address (exp);
4237
4238 /* Returns the address of the area where the structure is returned.
4239 0 otherwise. */
4240 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4241 if (arglist != 0
4242 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4243 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4244 return const0_rtx;
4245 else
4246 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4247
4248 case BUILT_IN_ALLOCA:
4249 target = expand_builtin_alloca (arglist, target);
4250 if (target)
4251 return target;
4252 break;
4253
4254 case BUILT_IN_FFS:
4255 case BUILT_IN_FFSL:
4256 case BUILT_IN_FFSLL:
4257 target = expand_builtin_unop (arglist, target, subtarget, ffs_optab);
4258 if (target)
4259 return target;
4260 break;
4261
4262 case BUILT_IN_CLZ:
4263 case BUILT_IN_CLZL:
4264 case BUILT_IN_CLZLL:
4265 target = expand_builtin_unop (arglist, target, subtarget, clz_optab);
4266 if (target)
4267 return target;
4268 break;
4269
4270 case BUILT_IN_CTZ:
4271 case BUILT_IN_CTZL:
4272 case BUILT_IN_CTZLL:
4273 target = expand_builtin_unop (arglist, target, subtarget, ctz_optab);
4274 if (target)
4275 return target;
4276 break;
4277
4278 case BUILT_IN_POPCOUNT:
4279 case BUILT_IN_POPCOUNTL:
4280 case BUILT_IN_POPCOUNTLL:
4281 target = expand_builtin_unop (arglist, target, subtarget,
4282 popcount_optab);
4283 if (target)
4284 return target;
4285 break;
4286
4287 case BUILT_IN_PARITY:
4288 case BUILT_IN_PARITYL:
4289 case BUILT_IN_PARITYLL:
4290 target = expand_builtin_unop (arglist, target, subtarget, parity_optab);
4291 if (target)
4292 return target;
4293 break;
4294
4295 case BUILT_IN_STRLEN:
4296 target = expand_builtin_strlen (exp, target);
4297 if (target)
4298 return target;
4299 break;
4300
4301 case BUILT_IN_STRCPY:
4302 target = expand_builtin_strcpy (exp, target, mode);
4303 if (target)
4304 return target;
4305 break;
4306
4307 case BUILT_IN_STRNCPY:
4308 target = expand_builtin_strncpy (arglist, target, mode);
4309 if (target)
4310 return target;
4311 break;
4312
4313 case BUILT_IN_STRCAT:
4314 target = expand_builtin_strcat (arglist, target, mode);
4315 if (target)
4316 return target;
4317 break;
4318
4319 case BUILT_IN_STRNCAT:
4320 target = expand_builtin_strncat (arglist, target, mode);
4321 if (target)
4322 return target;
4323 break;
4324
4325 case BUILT_IN_STRSPN:
4326 target = expand_builtin_strspn (arglist, target, mode);
4327 if (target)
4328 return target;
4329 break;
4330
4331 case BUILT_IN_STRCSPN:
4332 target = expand_builtin_strcspn (arglist, target, mode);
4333 if (target)
4334 return target;
4335 break;
4336
4337 case BUILT_IN_STRSTR:
4338 target = expand_builtin_strstr (arglist, target, mode);
4339 if (target)
4340 return target;
4341 break;
4342
4343 case BUILT_IN_STRPBRK:
4344 target = expand_builtin_strpbrk (arglist, target, mode);
4345 if (target)
4346 return target;
4347 break;
4348
4349 case BUILT_IN_INDEX:
4350 case BUILT_IN_STRCHR:
4351 target = expand_builtin_strchr (arglist, target, mode);
4352 if (target)
4353 return target;
4354 break;
4355
4356 case BUILT_IN_RINDEX:
4357 case BUILT_IN_STRRCHR:
4358 target = expand_builtin_strrchr (arglist, target, mode);
4359 if (target)
4360 return target;
4361 break;
4362
4363 case BUILT_IN_MEMCPY:
4364 target = expand_builtin_memcpy (arglist, target, mode);
4365 if (target)
4366 return target;
4367 break;
4368
4369 case BUILT_IN_MEMSET:
4370 target = expand_builtin_memset (exp, target, mode);
4371 if (target)
4372 return target;
4373 break;
4374
4375 case BUILT_IN_BZERO:
4376 target = expand_builtin_bzero (exp);
4377 if (target)
4378 return target;
4379 break;
4380
4381 case BUILT_IN_STRCMP:
4382 target = expand_builtin_strcmp (exp, target, mode);
4383 if (target)
4384 return target;
4385 break;
4386
4387 case BUILT_IN_STRNCMP:
4388 target = expand_builtin_strncmp (exp, target, mode);
4389 if (target)
4390 return target;
4391 break;
4392
4393 case BUILT_IN_BCMP:
4394 case BUILT_IN_MEMCMP:
4395 target = expand_builtin_memcmp (exp, arglist, target, mode);
4396 if (target)
4397 return target;
4398 break;
4399
4400 case BUILT_IN_SETJMP:
4401 target = expand_builtin_setjmp (arglist, target);
4402 if (target)
4403 return target;
4404 break;
4405
4406 /* __builtin_longjmp is passed a pointer to an array of five words.
4407 It's similar to the C library longjmp function but works with
4408 __builtin_setjmp above. */
4409 case BUILT_IN_LONGJMP:
4410 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4411 break;
4412 else
4413 {
4414 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4415 VOIDmode, 0);
4416 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4417 NULL_RTX, VOIDmode, 0);
4418
4419 if (value != const1_rtx)
4420 {
4421 error ("__builtin_longjmp second argument must be 1");
4422 return const0_rtx;
4423 }
4424
4425 expand_builtin_longjmp (buf_addr, value);
4426 return const0_rtx;
4427 }
4428
4429 case BUILT_IN_TRAP:
4430 expand_builtin_trap ();
4431 return const0_rtx;
4432
4433 case BUILT_IN_FPUTS:
4434 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4435 if (target)
4436 return target;
4437 break;
4438 case BUILT_IN_FPUTS_UNLOCKED:
4439 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4440 if (target)
4441 return target;
4442 break;
4443
4444 /* Various hooks for the DWARF 2 __throw routine. */
4445 case BUILT_IN_UNWIND_INIT:
4446 expand_builtin_unwind_init ();
4447 return const0_rtx;
4448 case BUILT_IN_DWARF_CFA:
4449 return virtual_cfa_rtx;
4450 #ifdef DWARF2_UNWIND_INFO
4451 case BUILT_IN_DWARF_FP_REGNUM:
4452 return expand_builtin_dwarf_fp_regnum ();
4453 case BUILT_IN_INIT_DWARF_REG_SIZES:
4454 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4455 return const0_rtx;
4456 #endif
4457 case BUILT_IN_FROB_RETURN_ADDR:
4458 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4459 case BUILT_IN_EXTRACT_RETURN_ADDR:
4460 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4461 case BUILT_IN_EH_RETURN:
4462 expand_builtin_eh_return (TREE_VALUE (arglist),
4463 TREE_VALUE (TREE_CHAIN (arglist)));
4464 return const0_rtx;
4465 #ifdef EH_RETURN_DATA_REGNO
4466 case BUILT_IN_EH_RETURN_DATA_REGNO:
4467 return expand_builtin_eh_return_data_regno (arglist);
4468 #endif
4469 case BUILT_IN_VA_START:
4470 case BUILT_IN_STDARG_START:
4471 return expand_builtin_va_start (arglist);
4472 case BUILT_IN_VA_END:
4473 return expand_builtin_va_end (arglist);
4474 case BUILT_IN_VA_COPY:
4475 return expand_builtin_va_copy (arglist);
4476 case BUILT_IN_EXPECT:
4477 return expand_builtin_expect (arglist, target);
4478 case BUILT_IN_PREFETCH:
4479 expand_builtin_prefetch (arglist);
4480 return const0_rtx;
4481
4482
4483 default: /* just do library call, if unknown builtin */
4484 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4485 error ("built-in function `%s' not currently supported",
4486 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4487 }
4488
4489 /* The switch statement above can drop through to cause the function
4490 to be called normally. */
4491 return expand_call (exp, target, ignore);
4492 }
4493
4494 /* Determine whether a tree node represents a call to a built-in
4495 math function. If the tree T is a call to a built-in function
4496 taking a single real argument, then the return value is the
4497 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
4498 the return value is END_BUILTINS. */
4499
4500 enum built_in_function
4501 builtin_mathfn_code (t)
4502 tree t;
4503 {
4504 tree fndecl, arglist;
4505
4506 if (TREE_CODE (t) != CALL_EXPR
4507 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
4508 return END_BUILTINS;
4509
4510 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4511 if (TREE_CODE (fndecl) != FUNCTION_DECL
4512 || ! DECL_BUILT_IN (fndecl)
4513 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4514 return END_BUILTINS;
4515
4516 arglist = TREE_OPERAND (t, 1);
4517 if (! arglist
4518 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4519 return END_BUILTINS;
4520
4521 arglist = TREE_CHAIN (arglist);
4522 switch (DECL_FUNCTION_CODE (fndecl))
4523 {
4524 case BUILT_IN_POW:
4525 case BUILT_IN_POWF:
4526 case BUILT_IN_POWL:
4527 case BUILT_IN_ATAN2:
4528 case BUILT_IN_ATAN2F:
4529 case BUILT_IN_ATAN2L:
4530 if (! arglist
4531 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
4532 || TREE_CHAIN (arglist))
4533 return END_BUILTINS;
4534 break;
4535
4536 default:
4537 if (arglist)
4538 return END_BUILTINS;
4539 break;
4540 }
4541
4542 return DECL_FUNCTION_CODE (fndecl);
4543 }
4544
4545 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4546 constant. ARGLIST is the argument list of the call. */
4547
4548 static tree
4549 fold_builtin_constant_p (arglist)
4550 tree arglist;
4551 {
4552 if (arglist == 0)
4553 return 0;
4554
4555 arglist = TREE_VALUE (arglist);
4556
4557 /* We return 1 for a numeric type that's known to be a constant
4558 value at compile-time or for an aggregate type that's a
4559 literal constant. */
4560 STRIP_NOPS (arglist);
4561
4562 /* If we know this is a constant, emit the constant of one. */
4563 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4564 || (TREE_CODE (arglist) == CONSTRUCTOR
4565 && TREE_CONSTANT (arglist))
4566 || (TREE_CODE (arglist) == ADDR_EXPR
4567 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4568 return integer_one_node;
4569
4570 /* If we aren't going to be running CSE or this expression
4571 has side effects, show we don't know it to be a constant.
4572 Likewise if it's a pointer or aggregate type since in those
4573 case we only want literals, since those are only optimized
4574 when generating RTL, not later.
4575 And finally, if we are compiling an initializer, not code, we
4576 need to return a definite result now; there's not going to be any
4577 more optimization done. */
4578 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4579 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4580 || POINTER_TYPE_P (TREE_TYPE (arglist))
4581 || cfun == 0)
4582 return integer_zero_node;
4583
4584 return 0;
4585 }
4586
4587 /* Fold a call to __builtin_classify_type. */
4588
4589 static tree
4590 fold_builtin_classify_type (arglist)
4591 tree arglist;
4592 {
4593 if (arglist == 0)
4594 return build_int_2 (no_type_class, 0);
4595
4596 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4597 }
4598
4599 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4600
4601 static tree
4602 fold_builtin_inf (type, warn)
4603 tree type;
4604 int warn;
4605 {
4606 REAL_VALUE_TYPE real;
4607
4608 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
4609 warning ("target format does not support infinity");
4610
4611 real_inf (&real);
4612 return build_real (type, real);
4613 }
4614
4615 /* Fold a call to __builtin_nan or __builtin_nans. */
4616
4617 static tree
4618 fold_builtin_nan (arglist, type, quiet)
4619 tree arglist, type;
4620 int quiet;
4621 {
4622 REAL_VALUE_TYPE real;
4623 const char *str;
4624
4625 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4626 return 0;
4627 str = c_getstr (TREE_VALUE (arglist));
4628 if (!str)
4629 return 0;
4630
4631 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
4632 return 0;
4633
4634 return build_real (type, real);
4635 }
4636
4637 /* EXP is assumed to me builtin call where truncation can be propagated
4638 across (for instance floor((double)f) == (double)floorf (f).
4639 Do the transformation. */
4640 static tree
4641 fold_trunc_transparent_mathfn (exp)
4642 tree exp;
4643 {
4644 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4645 tree arglist = TREE_OPERAND (exp, 1);
4646 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4647
4648 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4649 {
4650 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
4651 tree ftype = TREE_TYPE (exp);
4652 tree newtype = TREE_TYPE (arg0);
4653 tree decl;
4654
4655 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
4656 && (decl = mathfn_built_in (newtype, fcode)))
4657 {
4658 arglist =
4659 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
4660 return convert (ftype,
4661 build_function_call_expr (decl, arglist));
4662 }
4663 }
4664 return 0;
4665 }
4666
4667 /* Used by constant folding to eliminate some builtin calls early. EXP is
4668 the CALL_EXPR of a call to a builtin function. */
4669
4670 tree
4671 fold_builtin (exp)
4672 tree exp;
4673 {
4674 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4675 tree arglist = TREE_OPERAND (exp, 1);
4676 tree type = TREE_TYPE (TREE_TYPE (fndecl));
4677 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4678
4679 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4680 return 0;
4681
4682 switch (fcode)
4683 {
4684 case BUILT_IN_CONSTANT_P:
4685 return fold_builtin_constant_p (arglist);
4686
4687 case BUILT_IN_CLASSIFY_TYPE:
4688 return fold_builtin_classify_type (arglist);
4689
4690 case BUILT_IN_STRLEN:
4691 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4692 {
4693 tree len = c_strlen (TREE_VALUE (arglist));
4694 if (len)
4695 {
4696 /* Convert from the internal "sizetype" type to "size_t". */
4697 if (size_type_node)
4698 len = convert (size_type_node, len);
4699 return len;
4700 }
4701 }
4702 break;
4703
4704 case BUILT_IN_SQRT:
4705 case BUILT_IN_SQRTF:
4706 case BUILT_IN_SQRTL:
4707 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4708 {
4709 enum built_in_function fcode;
4710 tree arg = TREE_VALUE (arglist);
4711
4712 /* Optimize sqrt of constant value. */
4713 if (TREE_CODE (arg) == REAL_CST
4714 && ! TREE_CONSTANT_OVERFLOW (arg))
4715 {
4716 enum machine_mode mode;
4717 REAL_VALUE_TYPE r, x;
4718
4719 x = TREE_REAL_CST (arg);
4720 mode = TYPE_MODE (type);
4721 if (real_sqrt (&r, mode, &x)
4722 || (!flag_trapping_math && !flag_errno_math))
4723 return build_real (type, r);
4724 }
4725
4726 /* Optimize sqrt(exp(x)) = exp(x/2.0). */
4727 fcode = builtin_mathfn_code (arg);
4728 if (flag_unsafe_math_optimizations
4729 && (fcode == BUILT_IN_EXP
4730 || fcode == BUILT_IN_EXPF
4731 || fcode == BUILT_IN_EXPL))
4732 {
4733 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
4734 arg = build (RDIV_EXPR, type,
4735 TREE_VALUE (TREE_OPERAND (arg, 1)),
4736 build_real (type, dconst2));
4737 arglist = build_tree_list (NULL_TREE, arg);
4738 return build_function_call_expr (expfn, arglist);
4739 }
4740 }
4741 break;
4742
4743 case BUILT_IN_EXP:
4744 case BUILT_IN_EXPF:
4745 case BUILT_IN_EXPL:
4746 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4747 {
4748 enum built_in_function fcode;
4749 tree arg = TREE_VALUE (arglist);
4750
4751 /* Optimize exp(0.0) = 1.0. */
4752 if (real_zerop (arg))
4753 return build_real (type, dconst1);
4754
4755 /* Optimize exp(log(x)) = x. */
4756 fcode = builtin_mathfn_code (arg);
4757 if (flag_unsafe_math_optimizations
4758 && (fcode == BUILT_IN_LOG
4759 || fcode == BUILT_IN_LOGF
4760 || fcode == BUILT_IN_LOGL))
4761 return TREE_VALUE (TREE_OPERAND (arg, 1));
4762 }
4763 break;
4764
4765 case BUILT_IN_LOG:
4766 case BUILT_IN_LOGF:
4767 case BUILT_IN_LOGL:
4768 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4769 {
4770 enum built_in_function fcode;
4771 tree arg = TREE_VALUE (arglist);
4772
4773 /* Optimize log(1.0) = 0.0. */
4774 if (real_onep (arg))
4775 return build_real (type, dconst0);
4776
4777 /* Optimize log(exp(x)) = x. */
4778 fcode = builtin_mathfn_code (arg);
4779 if (flag_unsafe_math_optimizations
4780 && (fcode == BUILT_IN_EXP
4781 || fcode == BUILT_IN_EXPF
4782 || fcode == BUILT_IN_EXPL))
4783 return TREE_VALUE (TREE_OPERAND (arg, 1));
4784
4785 /* Optimize log(sqrt(x)) = log(x)/2.0. */
4786 if (flag_unsafe_math_optimizations
4787 && (fcode == BUILT_IN_SQRT
4788 || fcode == BUILT_IN_SQRTF
4789 || fcode == BUILT_IN_SQRTL))
4790 {
4791 tree logfn = build_function_call_expr (fndecl,
4792 TREE_OPERAND (arg, 1));
4793 return fold (build (RDIV_EXPR, type, logfn,
4794 build_real (type, dconst2)));
4795 }
4796 }
4797 break;
4798
4799 case BUILT_IN_POW:
4800 case BUILT_IN_POWF:
4801 case BUILT_IN_POWL:
4802 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4803 {
4804 tree arg0 = TREE_VALUE (arglist);
4805 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4806
4807 /* Optimize pow(x,0.0) = 1.0. */
4808 if (real_zerop (arg1))
4809 return omit_one_operand (type, build_real (type, dconst1), arg0);
4810
4811 /* Optimize pow(1.0,y) = 1.0. */
4812 if (real_onep (arg0))
4813 return omit_one_operand (type, build_real (type, dconst1), arg1);
4814 }
4815 break;
4816
4817 case BUILT_IN_INF:
4818 case BUILT_IN_INFF:
4819 case BUILT_IN_INFL:
4820 return fold_builtin_inf (type, true);
4821
4822 case BUILT_IN_HUGE_VAL:
4823 case BUILT_IN_HUGE_VALF:
4824 case BUILT_IN_HUGE_VALL:
4825 return fold_builtin_inf (type, false);
4826
4827 case BUILT_IN_NAN:
4828 case BUILT_IN_NANF:
4829 case BUILT_IN_NANL:
4830 return fold_builtin_nan (arglist, type, true);
4831
4832 case BUILT_IN_NANS:
4833 case BUILT_IN_NANSF:
4834 case BUILT_IN_NANSL:
4835 return fold_builtin_nan (arglist, type, false);
4836
4837 case BUILT_IN_FLOOR:
4838 case BUILT_IN_FLOORF:
4839 case BUILT_IN_FLOORL:
4840 case BUILT_IN_CEIL:
4841 case BUILT_IN_CEILF:
4842 case BUILT_IN_CEILL:
4843 case BUILT_IN_TRUNC:
4844 case BUILT_IN_TRUNCF:
4845 case BUILT_IN_TRUNCL:
4846 case BUILT_IN_ROUND:
4847 case BUILT_IN_ROUNDF:
4848 case BUILT_IN_ROUNDL:
4849 case BUILT_IN_NEARBYINT:
4850 case BUILT_IN_NEARBYINTF:
4851 case BUILT_IN_NEARBYINTL:
4852 return fold_trunc_transparent_mathfn (exp);
4853
4854 default:
4855 break;
4856 }
4857
4858 return 0;
4859 }
4860
4861 /* Conveniently construct a function call expression. */
4862
4863 tree
4864 build_function_call_expr (fn, arglist)
4865 tree fn, arglist;
4866 {
4867 tree call_expr;
4868
4869 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
4870 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
4871 call_expr, arglist);
4872 TREE_SIDE_EFFECTS (call_expr) = 1;
4873 return fold (call_expr);
4874 }
4875
4876 /* This function validates the types of a function call argument list
4877 represented as a tree chain of parameters against a specified list
4878 of tree_codes. If the last specifier is a 0, that represents an
4879 ellipses, otherwise the last specifier must be a VOID_TYPE. */
4880
4881 static int
4882 validate_arglist VPARAMS ((tree arglist, ...))
4883 {
4884 enum tree_code code;
4885 int res = 0;
4886
4887 VA_OPEN (ap, arglist);
4888 VA_FIXEDARG (ap, tree, arglist);
4889
4890 do
4891 {
4892 code = va_arg (ap, enum tree_code);
4893 switch (code)
4894 {
4895 case 0:
4896 /* This signifies an ellipses, any further arguments are all ok. */
4897 res = 1;
4898 goto end;
4899 case VOID_TYPE:
4900 /* This signifies an endlink, if no arguments remain, return
4901 true, otherwise return false. */
4902 res = arglist == 0;
4903 goto end;
4904 default:
4905 /* If no parameters remain or the parameter's code does not
4906 match the specified code, return false. Otherwise continue
4907 checking any remaining arguments. */
4908 if (arglist == 0
4909 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
4910 goto end;
4911 break;
4912 }
4913 arglist = TREE_CHAIN (arglist);
4914 }
4915 while (1);
4916
4917 /* We need gotos here since we can only have one VA_CLOSE in a
4918 function. */
4919 end: ;
4920 VA_CLOSE (ap);
4921
4922 return res;
4923 }
4924
4925 /* Default version of target-specific builtin setup that does nothing. */
4926
4927 void
4928 default_init_builtins ()
4929 {
4930 }
4931
4932 /* Default target-specific builtin expander that does nothing. */
4933
4934 rtx
4935 default_expand_builtin (exp, target, subtarget, mode, ignore)
4936 tree exp ATTRIBUTE_UNUSED;
4937 rtx target ATTRIBUTE_UNUSED;
4938 rtx subtarget ATTRIBUTE_UNUSED;
4939 enum machine_mode mode ATTRIBUTE_UNUSED;
4940 int ignore ATTRIBUTE_UNUSED;
4941 {
4942 return NULL_RTX;
4943 }
4944
4945 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
4946
4947 void
4948 purge_builtin_constant_p ()
4949 {
4950 rtx insn, set, arg, new, note;
4951
4952 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4953 if (INSN_P (insn)
4954 && (set = single_set (insn)) != NULL_RTX
4955 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
4956 || (GET_CODE (arg) == SUBREG
4957 && (GET_CODE (arg = SUBREG_REG (arg))
4958 == CONSTANT_P_RTX))))
4959 {
4960 arg = XEXP (arg, 0);
4961 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
4962 validate_change (insn, &SET_SRC (set), new, 0);
4963
4964 /* Remove the REG_EQUAL note from the insn. */
4965 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4966 remove_note (insn, note);
4967 }
4968 }
4969