Typo fix.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
50
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init = 0;
84 static REAL_VALUE_TYPE dconstpi;
85 static REAL_VALUE_TYPE dconste;
86
87 static int get_pointer_alignment (tree, unsigned int);
88 static tree c_strlen (tree, int);
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static rtx expand_builtin_setjmp (tree, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_constant_p (tree, enum machine_mode);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (tree);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_bcopy (tree);
127 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, int, int);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_classify_type (tree);
149 static tree fold_builtin_inf (tree, int);
150 static tree fold_builtin_nan (tree, tree, int);
151 static int validate_arglist (tree, ...);
152 static tree fold_trunc_transparent_mathfn (tree);
153 static bool readonly_data_expr (tree);
154 static rtx expand_builtin_fabs (tree, rtx, rtx);
155 static rtx expand_builtin_cabs (tree, rtx);
156 static void init_builtin_dconsts (void);
157 static tree fold_builtin_cabs (tree, tree, tree);
158
159 /* Initialize mathematical constants for constant folding builtins.
160 These constants need to be given to at least 160 bits precision. */
161
162 static void
163 init_builtin_dconsts (void)
164 {
165 real_from_string (&dconstpi,
166 "3.1415926535897932384626433832795028841971693993751058209749445923078");
167 real_from_string (&dconste,
168 "2.7182818284590452353602874713526624977572470936999595749669676277241");
169
170 builtin_dconsts_init = true;
171 }
172
173 /* Return the alignment in bits of EXP, a pointer valued expression.
174 But don't return more than MAX_ALIGN no matter what.
175 The alignment returned is, by default, the alignment of the thing that
176 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
177
178 Otherwise, look at the expression to see if we can do better, i.e., if the
179 expression is actually pointing at an object whose alignment is tighter. */
180
181 static int
182 get_pointer_alignment (tree exp, unsigned int max_align)
183 {
184 unsigned int align, inner;
185
186 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
187 return 0;
188
189 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
190 align = MIN (align, max_align);
191
192 while (1)
193 {
194 switch (TREE_CODE (exp))
195 {
196 case NOP_EXPR:
197 case CONVERT_EXPR:
198 case NON_LVALUE_EXPR:
199 exp = TREE_OPERAND (exp, 0);
200 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
201 return align;
202
203 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
204 align = MIN (inner, max_align);
205 break;
206
207 case PLUS_EXPR:
208 /* If sum of pointer + int, restrict our maximum alignment to that
209 imposed by the integer. If not, we can't do any better than
210 ALIGN. */
211 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
212 return align;
213
214 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
215 & (max_align / BITS_PER_UNIT - 1))
216 != 0)
217 max_align >>= 1;
218
219 exp = TREE_OPERAND (exp, 0);
220 break;
221
222 case ADDR_EXPR:
223 /* See what we are pointing at and look at its alignment. */
224 exp = TREE_OPERAND (exp, 0);
225 if (TREE_CODE (exp) == FUNCTION_DECL)
226 align = FUNCTION_BOUNDARY;
227 else if (DECL_P (exp))
228 align = DECL_ALIGN (exp);
229 #ifdef CONSTANT_ALIGNMENT
230 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
231 align = CONSTANT_ALIGNMENT (exp, align);
232 #endif
233 return MIN (align, max_align);
234
235 default:
236 return align;
237 }
238 }
239 }
240
241 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
242 way, because it could contain a zero byte in the middle.
243 TREE_STRING_LENGTH is the size of the character array, not the string.
244
245 ONLY_VALUE should be non-zero if the result is not going to be emitted
246 into the instruction stream and zero if it is going to be expanded.
247 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is non-zero, constant 3
248 is returned, otherwise NULL, since
249 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
250 evaluate the side-effects.
251
252 The value returned is of type `ssizetype'.
253
254 Unfortunately, string_constant can't access the values of const char
255 arrays with initializers, so neither can we do so here. */
256
257 static tree
258 c_strlen (tree src, int only_value)
259 {
260 tree offset_node;
261 HOST_WIDE_INT offset;
262 int max;
263 const char *ptr;
264
265 STRIP_NOPS (src);
266 if (TREE_CODE (src) == COND_EXPR
267 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
268 {
269 tree len1, len2;
270
271 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
272 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
273 if (tree_int_cst_equal (len1, len2))
274 return len1;
275 }
276
277 if (TREE_CODE (src) == COMPOUND_EXPR
278 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
279 return c_strlen (TREE_OPERAND (src, 1), only_value);
280
281 src = string_constant (src, &offset_node);
282 if (src == 0)
283 return 0;
284
285 max = TREE_STRING_LENGTH (src) - 1;
286 ptr = TREE_STRING_POINTER (src);
287
288 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
289 {
290 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
291 compute the offset to the following null if we don't know where to
292 start searching for it. */
293 int i;
294
295 for (i = 0; i < max; i++)
296 if (ptr[i] == 0)
297 return 0;
298
299 /* We don't know the starting offset, but we do know that the string
300 has no internal zero bytes. We can assume that the offset falls
301 within the bounds of the string; otherwise, the programmer deserves
302 what he gets. Subtract the offset from the length of the string,
303 and return that. This would perhaps not be valid if we were dealing
304 with named arrays in addition to literal string constants. */
305
306 return size_diffop (size_int (max), offset_node);
307 }
308
309 /* We have a known offset into the string. Start searching there for
310 a null character if we can represent it as a single HOST_WIDE_INT. */
311 if (offset_node == 0)
312 offset = 0;
313 else if (! host_integerp (offset_node, 0))
314 offset = -1;
315 else
316 offset = tree_low_cst (offset_node, 0);
317
318 /* If the offset is known to be out of bounds, warn, and call strlen at
319 runtime. */
320 if (offset < 0 || offset > max)
321 {
322 warning ("offset outside bounds of constant string");
323 return 0;
324 }
325
326 /* Use strlen to search for the first zero byte. Since any strings
327 constructed with build_string will have nulls appended, we win even
328 if we get handed something like (char[4])"abcd".
329
330 Since OFFSET is our starting index into the string, no further
331 calculation is needed. */
332 return ssize_int (strlen (ptr + offset));
333 }
334
335 /* Return a char pointer for a C string if it is a string constant
336 or sum of string constant and integer constant. */
337
338 static const char *
339 c_getstr (tree src)
340 {
341 tree offset_node;
342
343 src = string_constant (src, &offset_node);
344 if (src == 0)
345 return 0;
346
347 if (offset_node == 0)
348 return TREE_STRING_POINTER (src);
349 else if (!host_integerp (offset_node, 1)
350 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
351 return 0;
352
353 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
354 }
355
356 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
357 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
358
359 static rtx
360 c_readstr (const char *str, enum machine_mode mode)
361 {
362 HOST_WIDE_INT c[2];
363 HOST_WIDE_INT ch;
364 unsigned int i, j;
365
366 if (GET_MODE_CLASS (mode) != MODE_INT)
367 abort ();
368 c[0] = 0;
369 c[1] = 0;
370 ch = 1;
371 for (i = 0; i < GET_MODE_SIZE (mode); i++)
372 {
373 j = i;
374 if (WORDS_BIG_ENDIAN)
375 j = GET_MODE_SIZE (mode) - i - 1;
376 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
377 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
378 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
379 j *= BITS_PER_UNIT;
380 if (j > 2 * HOST_BITS_PER_WIDE_INT)
381 abort ();
382 if (ch)
383 ch = (unsigned char) str[i];
384 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
385 }
386 return immed_double_const (c[0], c[1], mode);
387 }
388
389 /* Cast a target constant CST to target CHAR and if that value fits into
390 host char type, return zero and put that value into variable pointed by
391 P. */
392
393 static int
394 target_char_cast (tree cst, char *p)
395 {
396 unsigned HOST_WIDE_INT val, hostval;
397
398 if (!host_integerp (cst, 1)
399 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
400 return 1;
401
402 val = tree_low_cst (cst, 1);
403 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
404 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
405
406 hostval = val;
407 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
408 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
409
410 if (val != hostval)
411 return 1;
412
413 *p = hostval;
414 return 0;
415 }
416
417 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
418 times to get the address of either a higher stack frame, or a return
419 address located within it (depending on FNDECL_CODE). */
420
421 rtx
422 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
423 rtx tem)
424 {
425 int i;
426
427 /* Some machines need special handling before we can access
428 arbitrary frames. For example, on the sparc, we must first flush
429 all register windows to the stack. */
430 #ifdef SETUP_FRAME_ADDRESSES
431 if (count > 0)
432 SETUP_FRAME_ADDRESSES ();
433 #endif
434
435 /* On the sparc, the return address is not in the frame, it is in a
436 register. There is no way to access it off of the current frame
437 pointer, but it can be accessed off the previous frame pointer by
438 reading the value from the register window save area. */
439 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
440 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
441 count--;
442 #endif
443
444 /* Scan back COUNT frames to the specified frame. */
445 for (i = 0; i < count; i++)
446 {
447 /* Assume the dynamic chain pointer is in the word that the
448 frame address points to, unless otherwise specified. */
449 #ifdef DYNAMIC_CHAIN_ADDRESS
450 tem = DYNAMIC_CHAIN_ADDRESS (tem);
451 #endif
452 tem = memory_address (Pmode, tem);
453 tem = gen_rtx_MEM (Pmode, tem);
454 set_mem_alias_set (tem, get_frame_alias_set ());
455 tem = copy_to_reg (tem);
456 }
457
458 /* For __builtin_frame_address, return what we've got. */
459 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
460 return tem;
461
462 /* For __builtin_return_address, Get the return address from that
463 frame. */
464 #ifdef RETURN_ADDR_RTX
465 tem = RETURN_ADDR_RTX (count, tem);
466 #else
467 tem = memory_address (Pmode,
468 plus_constant (tem, GET_MODE_SIZE (Pmode)));
469 tem = gen_rtx_MEM (Pmode, tem);
470 set_mem_alias_set (tem, get_frame_alias_set ());
471 #endif
472 return tem;
473 }
474
475 /* Alias set used for setjmp buffer. */
476 static HOST_WIDE_INT setjmp_alias_set = -1;
477
478 /* Construct the leading half of a __builtin_setjmp call. Control will
479 return to RECEIVER_LABEL. This is used directly by sjlj exception
480 handling code. */
481
482 void
483 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
484 {
485 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
486 rtx stack_save;
487 rtx mem;
488
489 if (setjmp_alias_set == -1)
490 setjmp_alias_set = new_alias_set ();
491
492 #ifdef POINTERS_EXTEND_UNSIGNED
493 if (GET_MODE (buf_addr) != Pmode)
494 buf_addr = convert_memory_address (Pmode, buf_addr);
495 #endif
496
497 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
498
499 emit_queue ();
500
501 /* We store the frame pointer and the address of receiver_label in
502 the buffer and use the rest of it for the stack save area, which
503 is machine-dependent. */
504
505 #ifndef BUILTIN_SETJMP_FRAME_VALUE
506 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
507 #endif
508
509 mem = gen_rtx_MEM (Pmode, buf_addr);
510 set_mem_alias_set (mem, setjmp_alias_set);
511 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
512
513 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
514 set_mem_alias_set (mem, setjmp_alias_set);
515
516 emit_move_insn (validize_mem (mem),
517 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
518
519 stack_save = gen_rtx_MEM (sa_mode,
520 plus_constant (buf_addr,
521 2 * GET_MODE_SIZE (Pmode)));
522 set_mem_alias_set (stack_save, setjmp_alias_set);
523 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
524
525 /* If there is further processing to do, do it. */
526 #ifdef HAVE_builtin_setjmp_setup
527 if (HAVE_builtin_setjmp_setup)
528 emit_insn (gen_builtin_setjmp_setup (buf_addr));
529 #endif
530
531 /* Tell optimize_save_area_alloca that extra work is going to
532 need to go on during alloca. */
533 current_function_calls_setjmp = 1;
534
535 /* Set this so all the registers get saved in our frame; we need to be
536 able to copy the saved values for any registers from frames we unwind. */
537 current_function_has_nonlocal_label = 1;
538 }
539
540 /* Construct the trailing part of a __builtin_setjmp call.
541 This is used directly by sjlj exception handling code. */
542
543 void
544 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
545 {
546 /* Clobber the FP when we get here, so we have to make sure it's
547 marked as used by this function. */
548 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
549
550 /* Mark the static chain as clobbered here so life information
551 doesn't get messed up for it. */
552 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
553
554 /* Now put in the code to restore the frame pointer, and argument
555 pointer, if needed. The code below is from expand_end_bindings
556 in stmt.c; see detailed documentation there. */
557 #ifdef HAVE_nonlocal_goto
558 if (! HAVE_nonlocal_goto)
559 #endif
560 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
561
562 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
563 if (fixed_regs[ARG_POINTER_REGNUM])
564 {
565 #ifdef ELIMINABLE_REGS
566 size_t i;
567 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
568
569 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
570 if (elim_regs[i].from == ARG_POINTER_REGNUM
571 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
572 break;
573
574 if (i == ARRAY_SIZE (elim_regs))
575 #endif
576 {
577 /* Now restore our arg pointer from the address at which it
578 was saved in our stack frame. */
579 emit_move_insn (virtual_incoming_args_rtx,
580 copy_to_reg (get_arg_pointer_save_area (cfun)));
581 }
582 }
583 #endif
584
585 #ifdef HAVE_builtin_setjmp_receiver
586 if (HAVE_builtin_setjmp_receiver)
587 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
588 else
589 #endif
590 #ifdef HAVE_nonlocal_goto_receiver
591 if (HAVE_nonlocal_goto_receiver)
592 emit_insn (gen_nonlocal_goto_receiver ());
593 else
594 #endif
595 { /* Nothing */ }
596
597 /* @@@ This is a kludge. Not all machine descriptions define a blockage
598 insn, but we must not allow the code we just generated to be reordered
599 by scheduling. Specifically, the update of the frame pointer must
600 happen immediately, not later. So emit an ASM_INPUT to act as blockage
601 insn. */
602 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
603 }
604
605 /* __builtin_setjmp is passed a pointer to an array of five words (not
606 all will be used on all machines). It operates similarly to the C
607 library function of the same name, but is more efficient. Much of
608 the code below (and for longjmp) is copied from the handling of
609 non-local gotos.
610
611 NOTE: This is intended for use by GNAT and the exception handling
612 scheme in the compiler and will only work in the method used by
613 them. */
614
615 static rtx
616 expand_builtin_setjmp (tree arglist, rtx target)
617 {
618 rtx buf_addr, next_lab, cont_lab;
619
620 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
621 return NULL_RTX;
622
623 if (target == 0 || GET_CODE (target) != REG
624 || REGNO (target) < FIRST_PSEUDO_REGISTER)
625 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
626
627 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
628
629 next_lab = gen_label_rtx ();
630 cont_lab = gen_label_rtx ();
631
632 expand_builtin_setjmp_setup (buf_addr, next_lab);
633
634 /* Set TARGET to zero and branch to the continue label. */
635 emit_move_insn (target, const0_rtx);
636 emit_jump_insn (gen_jump (cont_lab));
637 emit_barrier ();
638 emit_label (next_lab);
639
640 expand_builtin_setjmp_receiver (next_lab);
641
642 /* Set TARGET to one. */
643 emit_move_insn (target, const1_rtx);
644 emit_label (cont_lab);
645
646 /* Tell flow about the strange goings on. Putting `next_lab' on
647 `nonlocal_goto_handler_labels' to indicates that function
648 calls may traverse the arc back to this label. */
649
650 current_function_has_nonlocal_label = 1;
651 nonlocal_goto_handler_labels
652 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
653
654 return target;
655 }
656
657 /* __builtin_longjmp is passed a pointer to an array of five words (not
658 all will be used on all machines). It operates similarly to the C
659 library function of the same name, but is more efficient. Much of
660 the code below is copied from the handling of non-local gotos.
661
662 NOTE: This is intended for use by GNAT and the exception handling
663 scheme in the compiler and will only work in the method used by
664 them. */
665
666 void
667 expand_builtin_longjmp (rtx buf_addr, rtx value)
668 {
669 rtx fp, lab, stack, insn, last;
670 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
671
672 if (setjmp_alias_set == -1)
673 setjmp_alias_set = new_alias_set ();
674
675 #ifdef POINTERS_EXTEND_UNSIGNED
676 if (GET_MODE (buf_addr) != Pmode)
677 buf_addr = convert_memory_address (Pmode, buf_addr);
678 #endif
679
680 buf_addr = force_reg (Pmode, buf_addr);
681
682 /* We used to store value in static_chain_rtx, but that fails if pointers
683 are smaller than integers. We instead require that the user must pass
684 a second argument of 1, because that is what builtin_setjmp will
685 return. This also makes EH slightly more efficient, since we are no
686 longer copying around a value that we don't care about. */
687 if (value != const1_rtx)
688 abort ();
689
690 current_function_calls_longjmp = 1;
691
692 last = get_last_insn ();
693 #ifdef HAVE_builtin_longjmp
694 if (HAVE_builtin_longjmp)
695 emit_insn (gen_builtin_longjmp (buf_addr));
696 else
697 #endif
698 {
699 fp = gen_rtx_MEM (Pmode, buf_addr);
700 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
701 GET_MODE_SIZE (Pmode)));
702
703 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
704 2 * GET_MODE_SIZE (Pmode)));
705 set_mem_alias_set (fp, setjmp_alias_set);
706 set_mem_alias_set (lab, setjmp_alias_set);
707 set_mem_alias_set (stack, setjmp_alias_set);
708
709 /* Pick up FP, label, and SP from the block and jump. This code is
710 from expand_goto in stmt.c; see there for detailed comments. */
711 #if HAVE_nonlocal_goto
712 if (HAVE_nonlocal_goto)
713 /* We have to pass a value to the nonlocal_goto pattern that will
714 get copied into the static_chain pointer, but it does not matter
715 what that value is, because builtin_setjmp does not use it. */
716 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
717 else
718 #endif
719 {
720 lab = copy_to_reg (lab);
721
722 emit_move_insn (hard_frame_pointer_rtx, fp);
723 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
724
725 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
726 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
727 emit_indirect_jump (lab);
728 }
729 }
730
731 /* Search backwards and mark the jump insn as a non-local goto.
732 Note that this precludes the use of __builtin_longjmp to a
733 __builtin_setjmp target in the same function. However, we've
734 already cautioned the user that these functions are for
735 internal exception handling use only. */
736 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
737 {
738 if (insn == last)
739 abort ();
740 if (GET_CODE (insn) == JUMP_INSN)
741 {
742 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
743 REG_NOTES (insn));
744 break;
745 }
746 else if (GET_CODE (insn) == CALL_INSN)
747 break;
748 }
749 }
750
751 /* Expand a call to __builtin_prefetch. For a target that does not support
752 data prefetch, evaluate the memory address argument in case it has side
753 effects. */
754
755 static void
756 expand_builtin_prefetch (tree arglist)
757 {
758 tree arg0, arg1, arg2;
759 rtx op0, op1, op2;
760
761 if (!validate_arglist (arglist, POINTER_TYPE, 0))
762 return;
763
764 arg0 = TREE_VALUE (arglist);
765 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
766 zero (read) and argument 2 (locality) defaults to 3 (high degree of
767 locality). */
768 if (TREE_CHAIN (arglist))
769 {
770 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
771 if (TREE_CHAIN (TREE_CHAIN (arglist)))
772 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
773 else
774 arg2 = build_int_2 (3, 0);
775 }
776 else
777 {
778 arg1 = integer_zero_node;
779 arg2 = build_int_2 (3, 0);
780 }
781
782 /* Argument 0 is an address. */
783 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
784
785 /* Argument 1 (read/write flag) must be a compile-time constant int. */
786 if (TREE_CODE (arg1) != INTEGER_CST)
787 {
788 error ("second arg to `__builtin_prefetch' must be a constant");
789 arg1 = integer_zero_node;
790 }
791 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
792 /* Argument 1 must be either zero or one. */
793 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
794 {
795 warning ("invalid second arg to __builtin_prefetch; using zero");
796 op1 = const0_rtx;
797 }
798
799 /* Argument 2 (locality) must be a compile-time constant int. */
800 if (TREE_CODE (arg2) != INTEGER_CST)
801 {
802 error ("third arg to `__builtin_prefetch' must be a constant");
803 arg2 = integer_zero_node;
804 }
805 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
806 /* Argument 2 must be 0, 1, 2, or 3. */
807 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
808 {
809 warning ("invalid third arg to __builtin_prefetch; using zero");
810 op2 = const0_rtx;
811 }
812
813 #ifdef HAVE_prefetch
814 if (HAVE_prefetch)
815 {
816 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
817 (op0,
818 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
819 || (GET_MODE(op0) != Pmode))
820 {
821 #ifdef POINTERS_EXTEND_UNSIGNED
822 if (GET_MODE(op0) != Pmode)
823 op0 = convert_memory_address (Pmode, op0);
824 #endif
825 op0 = force_reg (Pmode, op0);
826 }
827 emit_insn (gen_prefetch (op0, op1, op2));
828 }
829 else
830 #endif
831 op0 = protect_from_queue (op0, 0);
832 /* Don't do anything with direct references to volatile memory, but
833 generate code to handle other side effects. */
834 if (GET_CODE (op0) != MEM && side_effects_p (op0))
835 emit_insn (op0);
836 }
837
838 /* Get a MEM rtx for expression EXP which is the address of an operand
839 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
840
841 static rtx
842 get_memory_rtx (tree exp)
843 {
844 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
845 rtx mem;
846
847 #ifdef POINTERS_EXTEND_UNSIGNED
848 if (GET_MODE (addr) != Pmode)
849 addr = convert_memory_address (Pmode, addr);
850 #endif
851
852 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
853
854 /* Get an expression we can use to find the attributes to assign to MEM.
855 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
856 we can. First remove any nops. */
857 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
858 || TREE_CODE (exp) == NON_LVALUE_EXPR)
859 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
860 exp = TREE_OPERAND (exp, 0);
861
862 if (TREE_CODE (exp) == ADDR_EXPR)
863 {
864 exp = TREE_OPERAND (exp, 0);
865 set_mem_attributes (mem, exp, 0);
866 }
867 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
868 {
869 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
870 /* memcpy, memset and other builtin stringops can alias with anything. */
871 set_mem_alias_set (mem, 0);
872 }
873
874 return mem;
875 }
876 \f
877 /* Built-in functions to perform an untyped call and return. */
878
879 /* For each register that may be used for calling a function, this
880 gives a mode used to copy the register's value. VOIDmode indicates
881 the register is not used for calling a function. If the machine
882 has register windows, this gives only the outbound registers.
883 INCOMING_REGNO gives the corresponding inbound register. */
884 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
885
886 /* For each register that may be used for returning values, this gives
887 a mode used to copy the register's value. VOIDmode indicates the
888 register is not used for returning values. If the machine has
889 register windows, this gives only the outbound registers.
890 INCOMING_REGNO gives the corresponding inbound register. */
891 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
892
893 /* For each register that may be used for calling a function, this
894 gives the offset of that register into the block returned by
895 __builtin_apply_args. 0 indicates that the register is not
896 used for calling a function. */
897 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
898
899 /* Return the offset of register REGNO into the block returned by
900 __builtin_apply_args. This is not declared static, since it is
901 needed in objc-act.c. */
902
903 int
904 apply_args_register_offset (int regno)
905 {
906 apply_args_size ();
907
908 /* Arguments are always put in outgoing registers (in the argument
909 block) if such make sense. */
910 #ifdef OUTGOING_REGNO
911 regno = OUTGOING_REGNO (regno);
912 #endif
913 return apply_args_reg_offset[regno];
914 }
915
916 /* Return the size required for the block returned by __builtin_apply_args,
917 and initialize apply_args_mode. */
918
919 static int
920 apply_args_size (void)
921 {
922 static int size = -1;
923 int align;
924 unsigned int regno;
925 enum machine_mode mode;
926
927 /* The values computed by this function never change. */
928 if (size < 0)
929 {
930 /* The first value is the incoming arg-pointer. */
931 size = GET_MODE_SIZE (Pmode);
932
933 /* The second value is the structure value address unless this is
934 passed as an "invisible" first argument. */
935 if (struct_value_rtx)
936 size += GET_MODE_SIZE (Pmode);
937
938 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
939 if (FUNCTION_ARG_REGNO_P (regno))
940 {
941 /* Search for the proper mode for copying this register's
942 value. I'm not sure this is right, but it works so far. */
943 enum machine_mode best_mode = VOIDmode;
944
945 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
946 mode != VOIDmode;
947 mode = GET_MODE_WIDER_MODE (mode))
948 if (HARD_REGNO_MODE_OK (regno, mode)
949 && HARD_REGNO_NREGS (regno, mode) == 1)
950 best_mode = mode;
951
952 if (best_mode == VOIDmode)
953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
954 mode != VOIDmode;
955 mode = GET_MODE_WIDER_MODE (mode))
956 if (HARD_REGNO_MODE_OK (regno, mode)
957 && have_insn_for (SET, mode))
958 best_mode = mode;
959
960 if (best_mode == VOIDmode)
961 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
962 mode != VOIDmode;
963 mode = GET_MODE_WIDER_MODE (mode))
964 if (HARD_REGNO_MODE_OK (regno, mode)
965 && have_insn_for (SET, mode))
966 best_mode = mode;
967
968 if (best_mode == VOIDmode)
969 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
970 mode != VOIDmode;
971 mode = GET_MODE_WIDER_MODE (mode))
972 if (HARD_REGNO_MODE_OK (regno, mode)
973 && have_insn_for (SET, mode))
974 best_mode = mode;
975
976 mode = best_mode;
977 if (mode == VOIDmode)
978 abort ();
979
980 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
981 if (size % align != 0)
982 size = CEIL (size, align) * align;
983 apply_args_reg_offset[regno] = size;
984 size += GET_MODE_SIZE (mode);
985 apply_args_mode[regno] = mode;
986 }
987 else
988 {
989 apply_args_mode[regno] = VOIDmode;
990 apply_args_reg_offset[regno] = 0;
991 }
992 }
993 return size;
994 }
995
996 /* Return the size required for the block returned by __builtin_apply,
997 and initialize apply_result_mode. */
998
999 static int
1000 apply_result_size (void)
1001 {
1002 static int size = -1;
1003 int align, regno;
1004 enum machine_mode mode;
1005
1006 /* The values computed by this function never change. */
1007 if (size < 0)
1008 {
1009 size = 0;
1010
1011 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1012 if (FUNCTION_VALUE_REGNO_P (regno))
1013 {
1014 /* Search for the proper mode for copying this register's
1015 value. I'm not sure this is right, but it works so far. */
1016 enum machine_mode best_mode = VOIDmode;
1017
1018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1019 mode != TImode;
1020 mode = GET_MODE_WIDER_MODE (mode))
1021 if (HARD_REGNO_MODE_OK (regno, mode))
1022 best_mode = mode;
1023
1024 if (best_mode == VOIDmode)
1025 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1026 mode != VOIDmode;
1027 mode = GET_MODE_WIDER_MODE (mode))
1028 if (HARD_REGNO_MODE_OK (regno, mode)
1029 && have_insn_for (SET, mode))
1030 best_mode = mode;
1031
1032 if (best_mode == VOIDmode)
1033 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1034 mode != VOIDmode;
1035 mode = GET_MODE_WIDER_MODE (mode))
1036 if (HARD_REGNO_MODE_OK (regno, mode)
1037 && have_insn_for (SET, mode))
1038 best_mode = mode;
1039
1040 if (best_mode == VOIDmode)
1041 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1042 mode != VOIDmode;
1043 mode = GET_MODE_WIDER_MODE (mode))
1044 if (HARD_REGNO_MODE_OK (regno, mode)
1045 && have_insn_for (SET, mode))
1046 best_mode = mode;
1047
1048 mode = best_mode;
1049 if (mode == VOIDmode)
1050 abort ();
1051
1052 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1053 if (size % align != 0)
1054 size = CEIL (size, align) * align;
1055 size += GET_MODE_SIZE (mode);
1056 apply_result_mode[regno] = mode;
1057 }
1058 else
1059 apply_result_mode[regno] = VOIDmode;
1060
1061 /* Allow targets that use untyped_call and untyped_return to override
1062 the size so that machine-specific information can be stored here. */
1063 #ifdef APPLY_RESULT_SIZE
1064 size = APPLY_RESULT_SIZE;
1065 #endif
1066 }
1067 return size;
1068 }
1069
1070 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1071 /* Create a vector describing the result block RESULT. If SAVEP is true,
1072 the result block is used to save the values; otherwise it is used to
1073 restore the values. */
1074
1075 static rtx
1076 result_vector (int savep, rtx result)
1077 {
1078 int regno, size, align, nelts;
1079 enum machine_mode mode;
1080 rtx reg, mem;
1081 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1082
1083 size = nelts = 0;
1084 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1085 if ((mode = apply_result_mode[regno]) != VOIDmode)
1086 {
1087 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1088 if (size % align != 0)
1089 size = CEIL (size, align) * align;
1090 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1091 mem = adjust_address (result, mode, size);
1092 savevec[nelts++] = (savep
1093 ? gen_rtx_SET (VOIDmode, mem, reg)
1094 : gen_rtx_SET (VOIDmode, reg, mem));
1095 size += GET_MODE_SIZE (mode);
1096 }
1097 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1098 }
1099 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1100
1101 /* Save the state required to perform an untyped call with the same
1102 arguments as were passed to the current function. */
1103
1104 static rtx
1105 expand_builtin_apply_args_1 (void)
1106 {
1107 rtx registers;
1108 int size, align, regno;
1109 enum machine_mode mode;
1110
1111 /* Create a block where the arg-pointer, structure value address,
1112 and argument registers can be saved. */
1113 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1114
1115 /* Walk past the arg-pointer and structure value address. */
1116 size = GET_MODE_SIZE (Pmode);
1117 if (struct_value_rtx)
1118 size += GET_MODE_SIZE (Pmode);
1119
1120 /* Save each register used in calling a function to the block. */
1121 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1122 if ((mode = apply_args_mode[regno]) != VOIDmode)
1123 {
1124 rtx tem;
1125
1126 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1127 if (size % align != 0)
1128 size = CEIL (size, align) * align;
1129
1130 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1131
1132 emit_move_insn (adjust_address (registers, mode, size), tem);
1133 size += GET_MODE_SIZE (mode);
1134 }
1135
1136 /* Save the arg pointer to the block. */
1137 emit_move_insn (adjust_address (registers, Pmode, 0),
1138 copy_to_reg (virtual_incoming_args_rtx));
1139 size = GET_MODE_SIZE (Pmode);
1140
1141 /* Save the structure value address unless this is passed as an
1142 "invisible" first argument. */
1143 if (struct_value_incoming_rtx)
1144 {
1145 emit_move_insn (adjust_address (registers, Pmode, size),
1146 copy_to_reg (struct_value_incoming_rtx));
1147 size += GET_MODE_SIZE (Pmode);
1148 }
1149
1150 /* Return the address of the block. */
1151 return copy_addr_to_reg (XEXP (registers, 0));
1152 }
1153
1154 /* __builtin_apply_args returns block of memory allocated on
1155 the stack into which is stored the arg pointer, structure
1156 value address, static chain, and all the registers that might
1157 possibly be used in performing a function call. The code is
1158 moved to the start of the function so the incoming values are
1159 saved. */
1160
1161 static rtx
1162 expand_builtin_apply_args (void)
1163 {
1164 /* Don't do __builtin_apply_args more than once in a function.
1165 Save the result of the first call and reuse it. */
1166 if (apply_args_value != 0)
1167 return apply_args_value;
1168 {
1169 /* When this function is called, it means that registers must be
1170 saved on entry to this function. So we migrate the
1171 call to the first insn of this function. */
1172 rtx temp;
1173 rtx seq;
1174
1175 start_sequence ();
1176 temp = expand_builtin_apply_args_1 ();
1177 seq = get_insns ();
1178 end_sequence ();
1179
1180 apply_args_value = temp;
1181
1182 /* Put the insns after the NOTE that starts the function.
1183 If this is inside a start_sequence, make the outer-level insn
1184 chain current, so the code is placed at the start of the
1185 function. */
1186 push_topmost_sequence ();
1187 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1188 pop_topmost_sequence ();
1189 return temp;
1190 }
1191 }
1192
1193 /* Perform an untyped call and save the state required to perform an
1194 untyped return of whatever value was returned by the given function. */
1195
1196 static rtx
1197 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1198 {
1199 int size, align, regno;
1200 enum machine_mode mode;
1201 rtx incoming_args, result, reg, dest, src, call_insn;
1202 rtx old_stack_level = 0;
1203 rtx call_fusage = 0;
1204
1205 #ifdef POINTERS_EXTEND_UNSIGNED
1206 if (GET_MODE (arguments) != Pmode)
1207 arguments = convert_memory_address (Pmode, arguments);
1208 #endif
1209
1210 /* Create a block where the return registers can be saved. */
1211 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1212
1213 /* Fetch the arg pointer from the ARGUMENTS block. */
1214 incoming_args = gen_reg_rtx (Pmode);
1215 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1216 #ifndef STACK_GROWS_DOWNWARD
1217 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1218 incoming_args, 0, OPTAB_LIB_WIDEN);
1219 #endif
1220
1221 /* Perform postincrements before actually calling the function. */
1222 emit_queue ();
1223
1224 /* Push a new argument block and copy the arguments. Do not allow
1225 the (potential) memcpy call below to interfere with our stack
1226 manipulations. */
1227 do_pending_stack_adjust ();
1228 NO_DEFER_POP;
1229
1230 /* Save the stack with nonlocal if available */
1231 #ifdef HAVE_save_stack_nonlocal
1232 if (HAVE_save_stack_nonlocal)
1233 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1234 else
1235 #endif
1236 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1237
1238 /* Push a block of memory onto the stack to store the memory arguments.
1239 Save the address in a register, and copy the memory arguments. ??? I
1240 haven't figured out how the calling convention macros effect this,
1241 but it's likely that the source and/or destination addresses in
1242 the block copy will need updating in machine specific ways. */
1243 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1244 dest = gen_rtx_MEM (BLKmode, dest);
1245 set_mem_align (dest, PARM_BOUNDARY);
1246 src = gen_rtx_MEM (BLKmode, incoming_args);
1247 set_mem_align (src, PARM_BOUNDARY);
1248 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1249
1250 /* Refer to the argument block. */
1251 apply_args_size ();
1252 arguments = gen_rtx_MEM (BLKmode, arguments);
1253 set_mem_align (arguments, PARM_BOUNDARY);
1254
1255 /* Walk past the arg-pointer and structure value address. */
1256 size = GET_MODE_SIZE (Pmode);
1257 if (struct_value_rtx)
1258 size += GET_MODE_SIZE (Pmode);
1259
1260 /* Restore each of the registers previously saved. Make USE insns
1261 for each of these registers for use in making the call. */
1262 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1263 if ((mode = apply_args_mode[regno]) != VOIDmode)
1264 {
1265 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1266 if (size % align != 0)
1267 size = CEIL (size, align) * align;
1268 reg = gen_rtx_REG (mode, regno);
1269 emit_move_insn (reg, adjust_address (arguments, mode, size));
1270 use_reg (&call_fusage, reg);
1271 size += GET_MODE_SIZE (mode);
1272 }
1273
1274 /* Restore the structure value address unless this is passed as an
1275 "invisible" first argument. */
1276 size = GET_MODE_SIZE (Pmode);
1277 if (struct_value_rtx)
1278 {
1279 rtx value = gen_reg_rtx (Pmode);
1280 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1281 emit_move_insn (struct_value_rtx, value);
1282 if (GET_CODE (struct_value_rtx) == REG)
1283 use_reg (&call_fusage, struct_value_rtx);
1284 size += GET_MODE_SIZE (Pmode);
1285 }
1286
1287 /* All arguments and registers used for the call are set up by now! */
1288 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1289
1290 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1291 and we don't want to load it into a register as an optimization,
1292 because prepare_call_address already did it if it should be done. */
1293 if (GET_CODE (function) != SYMBOL_REF)
1294 function = memory_address (FUNCTION_MODE, function);
1295
1296 /* Generate the actual call instruction and save the return value. */
1297 #ifdef HAVE_untyped_call
1298 if (HAVE_untyped_call)
1299 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1300 result, result_vector (1, result)));
1301 else
1302 #endif
1303 #ifdef HAVE_call_value
1304 if (HAVE_call_value)
1305 {
1306 rtx valreg = 0;
1307
1308 /* Locate the unique return register. It is not possible to
1309 express a call that sets more than one return register using
1310 call_value; use untyped_call for that. In fact, untyped_call
1311 only needs to save the return registers in the given block. */
1312 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1313 if ((mode = apply_result_mode[regno]) != VOIDmode)
1314 {
1315 if (valreg)
1316 abort (); /* HAVE_untyped_call required. */
1317 valreg = gen_rtx_REG (mode, regno);
1318 }
1319
1320 emit_call_insn (GEN_CALL_VALUE (valreg,
1321 gen_rtx_MEM (FUNCTION_MODE, function),
1322 const0_rtx, NULL_RTX, const0_rtx));
1323
1324 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1325 }
1326 else
1327 #endif
1328 abort ();
1329
1330 /* Find the CALL insn we just emitted, and attach the register usage
1331 information. */
1332 call_insn = last_call_insn ();
1333 add_function_usage_to (call_insn, call_fusage);
1334
1335 /* Restore the stack. */
1336 #ifdef HAVE_save_stack_nonlocal
1337 if (HAVE_save_stack_nonlocal)
1338 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1339 else
1340 #endif
1341 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1342
1343 OK_DEFER_POP;
1344
1345 /* Return the address of the result block. */
1346 return copy_addr_to_reg (XEXP (result, 0));
1347 }
1348
1349 /* Perform an untyped return. */
1350
1351 static void
1352 expand_builtin_return (rtx result)
1353 {
1354 int size, align, regno;
1355 enum machine_mode mode;
1356 rtx reg;
1357 rtx call_fusage = 0;
1358
1359 #ifdef POINTERS_EXTEND_UNSIGNED
1360 if (GET_MODE (result) != Pmode)
1361 result = convert_memory_address (Pmode, result);
1362 #endif
1363
1364 apply_result_size ();
1365 result = gen_rtx_MEM (BLKmode, result);
1366
1367 #ifdef HAVE_untyped_return
1368 if (HAVE_untyped_return)
1369 {
1370 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1371 emit_barrier ();
1372 return;
1373 }
1374 #endif
1375
1376 /* Restore the return value and note that each value is used. */
1377 size = 0;
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if ((mode = apply_result_mode[regno]) != VOIDmode)
1380 {
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1385 emit_move_insn (reg, adjust_address (result, mode, size));
1386
1387 push_to_sequence (call_fusage);
1388 emit_insn (gen_rtx_USE (VOIDmode, reg));
1389 call_fusage = get_insns ();
1390 end_sequence ();
1391 size += GET_MODE_SIZE (mode);
1392 }
1393
1394 /* Put the USE insns before the return. */
1395 emit_insn (call_fusage);
1396
1397 /* Return whatever values was restored by jumping directly to the end
1398 of the function. */
1399 expand_null_return ();
1400 }
1401
1402 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1403
1404 static enum type_class
1405 type_to_class (tree type)
1406 {
1407 switch (TREE_CODE (type))
1408 {
1409 case VOID_TYPE: return void_type_class;
1410 case INTEGER_TYPE: return integer_type_class;
1411 case CHAR_TYPE: return char_type_class;
1412 case ENUMERAL_TYPE: return enumeral_type_class;
1413 case BOOLEAN_TYPE: return boolean_type_class;
1414 case POINTER_TYPE: return pointer_type_class;
1415 case REFERENCE_TYPE: return reference_type_class;
1416 case OFFSET_TYPE: return offset_type_class;
1417 case REAL_TYPE: return real_type_class;
1418 case COMPLEX_TYPE: return complex_type_class;
1419 case FUNCTION_TYPE: return function_type_class;
1420 case METHOD_TYPE: return method_type_class;
1421 case RECORD_TYPE: return record_type_class;
1422 case UNION_TYPE:
1423 case QUAL_UNION_TYPE: return union_type_class;
1424 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1425 ? string_type_class : array_type_class);
1426 case SET_TYPE: return set_type_class;
1427 case FILE_TYPE: return file_type_class;
1428 case LANG_TYPE: return lang_type_class;
1429 default: return no_type_class;
1430 }
1431 }
1432
1433 /* Expand a call to __builtin_classify_type with arguments found in
1434 ARGLIST. */
1435
1436 static rtx
1437 expand_builtin_classify_type (tree arglist)
1438 {
1439 if (arglist != 0)
1440 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1441 return GEN_INT (no_type_class);
1442 }
1443
1444 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1445
1446 static rtx
1447 expand_builtin_constant_p (tree arglist, enum machine_mode target_mode)
1448 {
1449 rtx tmp;
1450
1451 if (arglist == 0)
1452 return const0_rtx;
1453 arglist = TREE_VALUE (arglist);
1454
1455 /* We have taken care of the easy cases during constant folding. This
1456 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1457 get a chance to see if it can deduce whether ARGLIST is constant. */
1458
1459 current_function_calls_constant_p = 1;
1460
1461 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1462 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1463 return tmp;
1464 }
1465
1466 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1467 if available. */
1468 tree
1469 mathfn_built_in (tree type, enum built_in_function fn)
1470 {
1471 enum built_in_function fcode = NOT_BUILT_IN;
1472 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1473 switch (fn)
1474 {
1475 case BUILT_IN_SQRT:
1476 case BUILT_IN_SQRTF:
1477 case BUILT_IN_SQRTL:
1478 fcode = BUILT_IN_SQRT;
1479 break;
1480 case BUILT_IN_SIN:
1481 case BUILT_IN_SINF:
1482 case BUILT_IN_SINL:
1483 fcode = BUILT_IN_SIN;
1484 break;
1485 case BUILT_IN_COS:
1486 case BUILT_IN_COSF:
1487 case BUILT_IN_COSL:
1488 fcode = BUILT_IN_COS;
1489 break;
1490 case BUILT_IN_EXP:
1491 case BUILT_IN_EXPF:
1492 case BUILT_IN_EXPL:
1493 fcode = BUILT_IN_EXP;
1494 break;
1495 case BUILT_IN_LOG:
1496 case BUILT_IN_LOGF:
1497 case BUILT_IN_LOGL:
1498 fcode = BUILT_IN_LOG;
1499 break;
1500 case BUILT_IN_TAN:
1501 case BUILT_IN_TANF:
1502 case BUILT_IN_TANL:
1503 fcode = BUILT_IN_TAN;
1504 break;
1505 case BUILT_IN_ATAN:
1506 case BUILT_IN_ATANF:
1507 case BUILT_IN_ATANL:
1508 fcode = BUILT_IN_ATAN;
1509 break;
1510 case BUILT_IN_FLOOR:
1511 case BUILT_IN_FLOORF:
1512 case BUILT_IN_FLOORL:
1513 fcode = BUILT_IN_FLOOR;
1514 break;
1515 case BUILT_IN_CEIL:
1516 case BUILT_IN_CEILF:
1517 case BUILT_IN_CEILL:
1518 fcode = BUILT_IN_CEIL;
1519 break;
1520 case BUILT_IN_TRUNC:
1521 case BUILT_IN_TRUNCF:
1522 case BUILT_IN_TRUNCL:
1523 fcode = BUILT_IN_TRUNC;
1524 break;
1525 case BUILT_IN_ROUND:
1526 case BUILT_IN_ROUNDF:
1527 case BUILT_IN_ROUNDL:
1528 fcode = BUILT_IN_ROUND;
1529 break;
1530 case BUILT_IN_NEARBYINT:
1531 case BUILT_IN_NEARBYINTF:
1532 case BUILT_IN_NEARBYINTL:
1533 fcode = BUILT_IN_NEARBYINT;
1534 break;
1535 default:
1536 abort ();
1537 }
1538 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1539 switch (fn)
1540 {
1541 case BUILT_IN_SQRT:
1542 case BUILT_IN_SQRTF:
1543 case BUILT_IN_SQRTL:
1544 fcode = BUILT_IN_SQRTF;
1545 break;
1546 case BUILT_IN_SIN:
1547 case BUILT_IN_SINF:
1548 case BUILT_IN_SINL:
1549 fcode = BUILT_IN_SINF;
1550 break;
1551 case BUILT_IN_COS:
1552 case BUILT_IN_COSF:
1553 case BUILT_IN_COSL:
1554 fcode = BUILT_IN_COSF;
1555 break;
1556 case BUILT_IN_EXP:
1557 case BUILT_IN_EXPF:
1558 case BUILT_IN_EXPL:
1559 fcode = BUILT_IN_EXPF;
1560 break;
1561 case BUILT_IN_LOG:
1562 case BUILT_IN_LOGF:
1563 case BUILT_IN_LOGL:
1564 fcode = BUILT_IN_LOGF;
1565 break;
1566 case BUILT_IN_TAN:
1567 case BUILT_IN_TANF:
1568 case BUILT_IN_TANL:
1569 fcode = BUILT_IN_TANF;
1570 break;
1571 case BUILT_IN_ATAN:
1572 case BUILT_IN_ATANF:
1573 case BUILT_IN_ATANL:
1574 fcode = BUILT_IN_ATANF;
1575 break;
1576 case BUILT_IN_FLOOR:
1577 case BUILT_IN_FLOORF:
1578 case BUILT_IN_FLOORL:
1579 fcode = BUILT_IN_FLOORF;
1580 break;
1581 case BUILT_IN_CEIL:
1582 case BUILT_IN_CEILF:
1583 case BUILT_IN_CEILL:
1584 fcode = BUILT_IN_CEILF;
1585 break;
1586 case BUILT_IN_TRUNC:
1587 case BUILT_IN_TRUNCF:
1588 case BUILT_IN_TRUNCL:
1589 fcode = BUILT_IN_TRUNCF;
1590 break;
1591 case BUILT_IN_ROUND:
1592 case BUILT_IN_ROUNDF:
1593 case BUILT_IN_ROUNDL:
1594 fcode = BUILT_IN_ROUNDF;
1595 break;
1596 case BUILT_IN_NEARBYINT:
1597 case BUILT_IN_NEARBYINTF:
1598 case BUILT_IN_NEARBYINTL:
1599 fcode = BUILT_IN_NEARBYINTF;
1600 break;
1601 default:
1602 abort ();
1603 }
1604 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1605 switch (fn)
1606 {
1607 case BUILT_IN_SQRT:
1608 case BUILT_IN_SQRTF:
1609 case BUILT_IN_SQRTL:
1610 fcode = BUILT_IN_SQRTL;
1611 break;
1612 case BUILT_IN_SIN:
1613 case BUILT_IN_SINF:
1614 case BUILT_IN_SINL:
1615 fcode = BUILT_IN_SINL;
1616 break;
1617 case BUILT_IN_COS:
1618 case BUILT_IN_COSF:
1619 case BUILT_IN_COSL:
1620 fcode = BUILT_IN_COSL;
1621 break;
1622 case BUILT_IN_EXP:
1623 case BUILT_IN_EXPF:
1624 case BUILT_IN_EXPL:
1625 fcode = BUILT_IN_EXPL;
1626 break;
1627 case BUILT_IN_LOG:
1628 case BUILT_IN_LOGF:
1629 case BUILT_IN_LOGL:
1630 fcode = BUILT_IN_LOGL;
1631 break;
1632 case BUILT_IN_TAN:
1633 case BUILT_IN_TANF:
1634 case BUILT_IN_TANL:
1635 fcode = BUILT_IN_TANL;
1636 break;
1637 case BUILT_IN_ATAN:
1638 case BUILT_IN_ATANF:
1639 case BUILT_IN_ATANL:
1640 fcode = BUILT_IN_ATANL;
1641 break;
1642 case BUILT_IN_FLOOR:
1643 case BUILT_IN_FLOORF:
1644 case BUILT_IN_FLOORL:
1645 fcode = BUILT_IN_FLOORL;
1646 break;
1647 case BUILT_IN_CEIL:
1648 case BUILT_IN_CEILF:
1649 case BUILT_IN_CEILL:
1650 fcode = BUILT_IN_CEILL;
1651 break;
1652 case BUILT_IN_TRUNC:
1653 case BUILT_IN_TRUNCF:
1654 case BUILT_IN_TRUNCL:
1655 fcode = BUILT_IN_TRUNCL;
1656 break;
1657 case BUILT_IN_ROUND:
1658 case BUILT_IN_ROUNDF:
1659 case BUILT_IN_ROUNDL:
1660 fcode = BUILT_IN_ROUNDL;
1661 break;
1662 case BUILT_IN_NEARBYINT:
1663 case BUILT_IN_NEARBYINTF:
1664 case BUILT_IN_NEARBYINTL:
1665 fcode = BUILT_IN_NEARBYINTL;
1666 break;
1667 default:
1668 abort ();
1669 }
1670 return implicit_built_in_decls[fcode];
1671 }
1672
1673 /* If errno must be maintained, expand the RTL to check if the result,
1674 TARGET, of a built-in function call, EXP, is NaN, and if so set
1675 errno to EDOM. */
1676
1677 static void
1678 expand_errno_check (tree exp, rtx target)
1679 {
1680 rtx lab = gen_label_rtx ();
1681
1682 /* Test the result; if it is NaN, set errno=EDOM because
1683 the argument was not in the domain. */
1684 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1685 0, lab);
1686
1687 #ifdef TARGET_EDOM
1688 /* If this built-in doesn't throw an exception, set errno directly. */
1689 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1690 {
1691 #ifdef GEN_ERRNO_RTX
1692 rtx errno_rtx = GEN_ERRNO_RTX;
1693 #else
1694 rtx errno_rtx
1695 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1696 #endif
1697 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1698 emit_label (lab);
1699 return;
1700 }
1701 #endif
1702
1703 /* We can't set errno=EDOM directly; let the library call do it.
1704 Pop the arguments right away in case the call gets deleted. */
1705 NO_DEFER_POP;
1706 expand_call (exp, target, 0);
1707 OK_DEFER_POP;
1708 emit_label (lab);
1709 }
1710
1711
1712 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1713 Return 0 if a normal call should be emitted rather than expanding the
1714 function in-line. EXP is the expression that is a call to the builtin
1715 function; if convenient, the result should be placed in TARGET.
1716 SUBTARGET may be used as the target for computing one of EXP's operands. */
1717
1718 static rtx
1719 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1720 {
1721 optab builtin_optab;
1722 rtx op0, insns;
1723 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1724 tree arglist = TREE_OPERAND (exp, 1);
1725 enum machine_mode mode;
1726 bool errno_set = false;
1727 tree arg, narg;
1728
1729 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1730 return 0;
1731
1732 arg = TREE_VALUE (arglist);
1733
1734 switch (DECL_FUNCTION_CODE (fndecl))
1735 {
1736 case BUILT_IN_SIN:
1737 case BUILT_IN_SINF:
1738 case BUILT_IN_SINL:
1739 builtin_optab = sin_optab; break;
1740 case BUILT_IN_COS:
1741 case BUILT_IN_COSF:
1742 case BUILT_IN_COSL:
1743 builtin_optab = cos_optab; break;
1744 case BUILT_IN_SQRT:
1745 case BUILT_IN_SQRTF:
1746 case BUILT_IN_SQRTL:
1747 errno_set = ! tree_expr_nonnegative_p (arg);
1748 builtin_optab = sqrt_optab;
1749 break;
1750 case BUILT_IN_EXP:
1751 case BUILT_IN_EXPF:
1752 case BUILT_IN_EXPL:
1753 errno_set = true; builtin_optab = exp_optab; break;
1754 case BUILT_IN_LOG:
1755 case BUILT_IN_LOGF:
1756 case BUILT_IN_LOGL:
1757 errno_set = true; builtin_optab = log_optab; break;
1758 case BUILT_IN_TAN:
1759 case BUILT_IN_TANF:
1760 case BUILT_IN_TANL:
1761 builtin_optab = tan_optab; break;
1762 case BUILT_IN_ATAN:
1763 case BUILT_IN_ATANF:
1764 case BUILT_IN_ATANL:
1765 builtin_optab = atan_optab; break;
1766 case BUILT_IN_FLOOR:
1767 case BUILT_IN_FLOORF:
1768 case BUILT_IN_FLOORL:
1769 builtin_optab = floor_optab; break;
1770 case BUILT_IN_CEIL:
1771 case BUILT_IN_CEILF:
1772 case BUILT_IN_CEILL:
1773 builtin_optab = ceil_optab; break;
1774 case BUILT_IN_TRUNC:
1775 case BUILT_IN_TRUNCF:
1776 case BUILT_IN_TRUNCL:
1777 builtin_optab = trunc_optab; break;
1778 case BUILT_IN_ROUND:
1779 case BUILT_IN_ROUNDF:
1780 case BUILT_IN_ROUNDL:
1781 builtin_optab = round_optab; break;
1782 case BUILT_IN_NEARBYINT:
1783 case BUILT_IN_NEARBYINTF:
1784 case BUILT_IN_NEARBYINTL:
1785 builtin_optab = nearbyint_optab; break;
1786 default:
1787 abort ();
1788 }
1789
1790 /* Make a suitable register to place result in. */
1791 mode = TYPE_MODE (TREE_TYPE (exp));
1792 target = gen_reg_rtx (mode);
1793
1794 if (! flag_errno_math || ! HONOR_NANS (mode))
1795 errno_set = false;
1796
1797 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1798 need to expand the argument again. This way, we will not perform
1799 side-effects more the once. */
1800 narg = save_expr (arg);
1801 if (narg != arg)
1802 {
1803 arglist = build_tree_list (NULL_TREE, arg);
1804 exp = build_function_call_expr (fndecl, arglist);
1805 }
1806
1807 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1808
1809 emit_queue ();
1810 start_sequence ();
1811
1812 /* Compute into TARGET.
1813 Set TARGET to wherever the result comes back. */
1814 target = expand_unop (mode, builtin_optab, op0, target, 0);
1815
1816 /* If we were unable to expand via the builtin, stop the sequence
1817 (without outputting the insns) and call to the library function
1818 with the stabilized argument list. */
1819 if (target == 0)
1820 {
1821 end_sequence ();
1822 return expand_call (exp, target, target == const0_rtx);
1823 }
1824
1825 if (errno_set)
1826 expand_errno_check (exp, target);
1827
1828 /* Output the entire sequence. */
1829 insns = get_insns ();
1830 end_sequence ();
1831 emit_insn (insns);
1832
1833 return target;
1834 }
1835
1836 /* Expand a call to the builtin binary math functions (pow and atan2).
1837 Return 0 if a normal call should be emitted rather than expanding the
1838 function in-line. EXP is the expression that is a call to the builtin
1839 function; if convenient, the result should be placed in TARGET.
1840 SUBTARGET may be used as the target for computing one of EXP's
1841 operands. */
1842
1843 static rtx
1844 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1845 {
1846 optab builtin_optab;
1847 rtx op0, op1, insns;
1848 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1849 tree arglist = TREE_OPERAND (exp, 1);
1850 tree arg0, arg1, temp, narg;
1851 enum machine_mode mode;
1852 bool errno_set = true;
1853 bool stable = true;
1854
1855 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1856 return 0;
1857
1858 arg0 = TREE_VALUE (arglist);
1859 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1860
1861 switch (DECL_FUNCTION_CODE (fndecl))
1862 {
1863 case BUILT_IN_POW:
1864 case BUILT_IN_POWF:
1865 case BUILT_IN_POWL:
1866 builtin_optab = pow_optab; break;
1867 case BUILT_IN_ATAN2:
1868 case BUILT_IN_ATAN2F:
1869 case BUILT_IN_ATAN2L:
1870 builtin_optab = atan2_optab; break;
1871 default:
1872 abort ();
1873 }
1874
1875 /* Make a suitable register to place result in. */
1876 mode = TYPE_MODE (TREE_TYPE (exp));
1877 target = gen_reg_rtx (mode);
1878
1879 if (! flag_errno_math || ! HONOR_NANS (mode))
1880 errno_set = false;
1881
1882 /* Alway stabilize the argument list. */
1883 narg = save_expr (arg1);
1884 if (narg != arg1)
1885 {
1886 temp = build_tree_list (NULL_TREE, narg);
1887 stable = false;
1888 }
1889 else
1890 temp = TREE_CHAIN (arglist);
1891
1892 narg = save_expr (arg0);
1893 if (narg != arg0)
1894 {
1895 arglist = tree_cons (NULL_TREE, narg, temp);
1896 stable = false;
1897 }
1898 else if (! stable)
1899 arglist = tree_cons (NULL_TREE, arg0, temp);
1900
1901 if (! stable)
1902 exp = build_function_call_expr (fndecl, arglist);
1903
1904 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1905 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1906
1907 emit_queue ();
1908 start_sequence ();
1909
1910 /* Compute into TARGET.
1911 Set TARGET to wherever the result comes back. */
1912 target = expand_binop (mode, builtin_optab, op0, op1,
1913 target, 0, OPTAB_DIRECT);
1914
1915 /* If we were unable to expand via the builtin, stop the sequence
1916 (without outputting the insns) and call to the library function
1917 with the stabilized argument list. */
1918 if (target == 0)
1919 {
1920 end_sequence ();
1921 return expand_call (exp, target, target == const0_rtx);
1922 }
1923
1924 if (errno_set)
1925 expand_errno_check (exp, target);
1926
1927 /* Output the entire sequence. */
1928 insns = get_insns ();
1929 end_sequence ();
1930 emit_insn (insns);
1931
1932 return target;
1933 }
1934
1935 /* To evaluate powi(x,n), the floating point value x raised to the
1936 constant integer exponent n, we use a hybrid algorithm that
1937 combines the "window method" with look-up tables. For an
1938 introduction to exponentiation algorithms and "addition chains",
1939 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
1940 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
1941 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
1942 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
1943
1944 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
1945 multiplications to inline before calling the system library's pow
1946 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
1947 so this default never requires calling pow, powf or powl. */
1948
1949 #ifndef POWI_MAX_MULTS
1950 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
1951 #endif
1952
1953 /* The size of the "optimal power tree" lookup table. All
1954 exponents less than this value are simply looked up in the
1955 powi_table below. This threshold is also used to size the
1956 cache of pseudo registers that hold intermediate results. */
1957 #define POWI_TABLE_SIZE 256
1958
1959 /* The size, in bits of the window, used in the "window method"
1960 exponentiation algorithm. This is equivalent to a radix of
1961 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
1962 #define POWI_WINDOW_SIZE 3
1963
1964 /* The following table is an efficient representation of an
1965 "optimal power tree". For each value, i, the corresponding
1966 value, j, in the table states than an optimal evaluation
1967 sequence for calculating pow(x,i) can be found by evaluating
1968 pow(x,j)*pow(x,i-j). An optimal power tree for the first
1969 100 integers is given in Knuth's "Seminumerical algorithms". */
1970
1971 static const unsigned char powi_table[POWI_TABLE_SIZE] =
1972 {
1973 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
1974 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
1975 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
1976 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
1977 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
1978 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
1979 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
1980 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
1981 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
1982 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
1983 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
1984 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
1985 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
1986 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
1987 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
1988 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
1989 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
1990 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
1991 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
1992 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
1993 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
1994 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
1995 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
1996 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
1997 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
1998 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
1999 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2000 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2001 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2002 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2003 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2004 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2005 };
2006
2007
2008 /* Return the number of multiplications required to calculate
2009 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2010 subroutine of powi_cost. CACHE is an array indicating
2011 which exponents have already been calculated. */
2012
2013 static int
2014 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2015 {
2016 /* If we've already calculated this exponent, then this evaluation
2017 doesn't require any additional multiplications. */
2018 if (cache[n])
2019 return 0;
2020
2021 cache[n] = true;
2022 return powi_lookup_cost (n - powi_table[n], cache)
2023 + powi_lookup_cost (powi_table[n], cache) + 1;
2024 }
2025
2026 /* Return the number of multiplications required to calculate
2027 powi(x,n) for an arbitrary x, given the exponent N. This
2028 function needs to be kept in sync with expand_powi below. */
2029
2030 static int
2031 powi_cost (HOST_WIDE_INT n)
2032 {
2033 bool cache[POWI_TABLE_SIZE];
2034 unsigned HOST_WIDE_INT digit;
2035 unsigned HOST_WIDE_INT val;
2036 int result;
2037
2038 if (n == 0)
2039 return 0;
2040
2041 /* Ignore the reciprocal when calculating the cost. */
2042 val = (n < 0) ? -n : n;
2043
2044 /* Initialize the exponent cache. */
2045 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2046 cache[1] = true;
2047
2048 result = 0;
2049
2050 while (val >= POWI_TABLE_SIZE)
2051 {
2052 if (val & 1)
2053 {
2054 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2055 result += powi_lookup_cost (digit, cache)
2056 + POWI_WINDOW_SIZE + 1;
2057 val >>= POWI_WINDOW_SIZE;
2058 }
2059 else
2060 {
2061 val >>= 1;
2062 result++;
2063 }
2064 }
2065
2066 return result + powi_lookup_cost (val, cache);
2067 }
2068
2069 /* Recursive subroutine of expand_powi. This function takes the array,
2070 CACHE, of already calculated exponents and an exponent N and returns
2071 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2072
2073 static rtx
2074 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2075 {
2076 unsigned HOST_WIDE_INT digit;
2077 rtx target, result;
2078 rtx op0, op1;
2079
2080 if (n < POWI_TABLE_SIZE)
2081 {
2082 if (cache[n])
2083 return cache[n];
2084
2085 target = gen_reg_rtx (mode);
2086 cache[n] = target;
2087
2088 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2089 op1 = expand_powi_1 (mode, powi_table[n], cache);
2090 }
2091 else if (n & 1)
2092 {
2093 target = gen_reg_rtx (mode);
2094 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2095 op0 = expand_powi_1 (mode, n - digit, cache);
2096 op1 = expand_powi_1 (mode, digit, cache);
2097 }
2098 else
2099 {
2100 target = gen_reg_rtx (mode);
2101 op0 = expand_powi_1 (mode, n >> 1, cache);
2102 op1 = op0;
2103 }
2104
2105 result = expand_mult (mode, op0, op1, target, 0);
2106 if (result != target)
2107 emit_move_insn (target, result);
2108 return target;
2109 }
2110
2111 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2112 floating point operand in mode MODE, and N is the exponent. This
2113 function needs to be kept in sync with powi_cost above. */
2114
2115 static rtx
2116 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2117 {
2118 unsigned HOST_WIDE_INT val;
2119 rtx cache[POWI_TABLE_SIZE];
2120 rtx result;
2121
2122 if (n == 0)
2123 return CONST1_RTX (mode);
2124
2125 val = (n < 0) ? -n : n;
2126
2127 memset (cache, 0, sizeof(cache));
2128 cache[1] = x;
2129
2130 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2131
2132 /* If the original exponent was negative, reciprocate the result. */
2133 if (n < 0)
2134 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2135 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2136
2137 return result;
2138 }
2139
2140 /* Expand a call to the pow built-in mathematical function. Return 0 if
2141 a normal call should be emitted rather than expanding the function
2142 in-line. EXP is the expression that is a call to the builtin
2143 function; if convenient, the result should be placed in TARGET. */
2144
2145 static rtx
2146 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2147 {
2148 tree arglist = TREE_OPERAND (exp, 1);
2149 tree arg0, arg1;
2150
2151 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2152 return 0;
2153
2154 arg0 = TREE_VALUE (arglist);
2155 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2156
2157 if (flag_unsafe_math_optimizations
2158 && ! flag_errno_math
2159 && ! optimize_size
2160 && TREE_CODE (arg1) == REAL_CST
2161 && ! TREE_CONSTANT_OVERFLOW (arg1))
2162 {
2163 REAL_VALUE_TYPE cint;
2164 REAL_VALUE_TYPE c;
2165 HOST_WIDE_INT n;
2166
2167 c = TREE_REAL_CST (arg1);
2168 n = real_to_integer (&c);
2169 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2170 if (real_identical (&c, &cint)
2171 && powi_cost (n) <= POWI_MAX_MULTS)
2172 {
2173 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2174 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2175 op = force_reg (mode, op);
2176 return expand_powi (op, mode, n);
2177 }
2178 }
2179 return expand_builtin_mathfn_2 (exp, target, NULL_RTX);
2180 }
2181
2182 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2183 if we failed the caller should emit a normal call, otherwise
2184 try to get the result in TARGET, if convenient. */
2185
2186 static rtx
2187 expand_builtin_strlen (tree arglist, rtx target,
2188 enum machine_mode target_mode)
2189 {
2190 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2191 return 0;
2192 else
2193 {
2194 rtx pat;
2195 tree len, src = TREE_VALUE (arglist);
2196 rtx result, src_reg, char_rtx, before_strlen;
2197 enum machine_mode insn_mode = target_mode, char_mode;
2198 enum insn_code icode = CODE_FOR_nothing;
2199 int align;
2200
2201 /* If the length can be computed at compile-time, return it. */
2202 len = c_strlen (src, 0);
2203 if (len)
2204 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2205
2206 /* If the length can be computed at compile-time and is constant
2207 integer, but there are side-effects in src, evaluate
2208 src for side-effects, then return len.
2209 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2210 can be optimized into: i++; x = 3; */
2211 len = c_strlen (src, 1);
2212 if (len && TREE_CODE (len) == INTEGER_CST)
2213 {
2214 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2215 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2216 }
2217
2218 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2219
2220 /* If SRC is not a pointer type, don't do this operation inline. */
2221 if (align == 0)
2222 return 0;
2223
2224 /* Bail out if we can't compute strlen in the right mode. */
2225 while (insn_mode != VOIDmode)
2226 {
2227 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2228 if (icode != CODE_FOR_nothing)
2229 break;
2230
2231 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2232 }
2233 if (insn_mode == VOIDmode)
2234 return 0;
2235
2236 /* Make a place to write the result of the instruction. */
2237 result = target;
2238 if (! (result != 0
2239 && GET_CODE (result) == REG
2240 && GET_MODE (result) == insn_mode
2241 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2242 result = gen_reg_rtx (insn_mode);
2243
2244 /* Make a place to hold the source address. We will not expand
2245 the actual source until we are sure that the expansion will
2246 not fail -- there are trees that cannot be expanded twice. */
2247 src_reg = gen_reg_rtx (Pmode);
2248
2249 /* Mark the beginning of the strlen sequence so we can emit the
2250 source operand later. */
2251 before_strlen = get_last_insn ();
2252
2253 char_rtx = const0_rtx;
2254 char_mode = insn_data[(int) icode].operand[2].mode;
2255 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2256 char_mode))
2257 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2258
2259 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2260 char_rtx, GEN_INT (align));
2261 if (! pat)
2262 return 0;
2263 emit_insn (pat);
2264
2265 /* Now that we are assured of success, expand the source. */
2266 start_sequence ();
2267 pat = memory_address (BLKmode,
2268 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2269 if (pat != src_reg)
2270 emit_move_insn (src_reg, pat);
2271 pat = get_insns ();
2272 end_sequence ();
2273
2274 if (before_strlen)
2275 emit_insn_after (pat, before_strlen);
2276 else
2277 emit_insn_before (pat, get_insns ());
2278
2279 /* Return the value in the proper mode for this function. */
2280 if (GET_MODE (result) == target_mode)
2281 target = result;
2282 else if (target != 0)
2283 convert_move (target, result, 0);
2284 else
2285 target = convert_to_mode (target_mode, result, 0);
2286
2287 return target;
2288 }
2289 }
2290
2291 /* Expand a call to the strstr builtin. Return 0 if we failed the
2292 caller should emit a normal call, otherwise try to get the result
2293 in TARGET, if convenient (and in mode MODE if that's convenient). */
2294
2295 static rtx
2296 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2297 {
2298 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2299 return 0;
2300 else
2301 {
2302 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2303 tree fn;
2304 const char *p1, *p2;
2305
2306 p2 = c_getstr (s2);
2307 if (p2 == NULL)
2308 return 0;
2309
2310 p1 = c_getstr (s1);
2311 if (p1 != NULL)
2312 {
2313 const char *r = strstr (p1, p2);
2314
2315 if (r == NULL)
2316 return const0_rtx;
2317
2318 /* Return an offset into the constant string argument. */
2319 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2320 s1, ssize_int (r - p1))),
2321 target, mode, EXPAND_NORMAL);
2322 }
2323
2324 if (p2[0] == '\0')
2325 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2326
2327 if (p2[1] != '\0')
2328 return 0;
2329
2330 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2331 if (!fn)
2332 return 0;
2333
2334 /* New argument list transforming strstr(s1, s2) to
2335 strchr(s1, s2[0]). */
2336 arglist =
2337 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2338 arglist = tree_cons (NULL_TREE, s1, arglist);
2339 return expand_expr (build_function_call_expr (fn, arglist),
2340 target, mode, EXPAND_NORMAL);
2341 }
2342 }
2343
2344 /* Expand a call to the strchr builtin. Return 0 if we failed the
2345 caller should emit a normal call, otherwise try to get the result
2346 in TARGET, if convenient (and in mode MODE if that's convenient). */
2347
2348 static rtx
2349 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2350 {
2351 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2352 return 0;
2353 else
2354 {
2355 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2356 const char *p1;
2357
2358 if (TREE_CODE (s2) != INTEGER_CST)
2359 return 0;
2360
2361 p1 = c_getstr (s1);
2362 if (p1 != NULL)
2363 {
2364 char c;
2365 const char *r;
2366
2367 if (target_char_cast (s2, &c))
2368 return 0;
2369
2370 r = strchr (p1, c);
2371
2372 if (r == NULL)
2373 return const0_rtx;
2374
2375 /* Return an offset into the constant string argument. */
2376 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2377 s1, ssize_int (r - p1))),
2378 target, mode, EXPAND_NORMAL);
2379 }
2380
2381 /* FIXME: Should use here strchrM optab so that ports can optimize
2382 this. */
2383 return 0;
2384 }
2385 }
2386
2387 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2388 caller should emit a normal call, otherwise try to get the result
2389 in TARGET, if convenient (and in mode MODE if that's convenient). */
2390
2391 static rtx
2392 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2393 {
2394 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2395 return 0;
2396 else
2397 {
2398 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2399 tree fn;
2400 const char *p1;
2401
2402 if (TREE_CODE (s2) != INTEGER_CST)
2403 return 0;
2404
2405 p1 = c_getstr (s1);
2406 if (p1 != NULL)
2407 {
2408 char c;
2409 const char *r;
2410
2411 if (target_char_cast (s2, &c))
2412 return 0;
2413
2414 r = strrchr (p1, c);
2415
2416 if (r == NULL)
2417 return const0_rtx;
2418
2419 /* Return an offset into the constant string argument. */
2420 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2421 s1, ssize_int (r - p1))),
2422 target, mode, EXPAND_NORMAL);
2423 }
2424
2425 if (! integer_zerop (s2))
2426 return 0;
2427
2428 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2429 if (!fn)
2430 return 0;
2431
2432 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2433 return expand_expr (build_function_call_expr (fn, arglist),
2434 target, mode, EXPAND_NORMAL);
2435 }
2436 }
2437
2438 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2439 caller should emit a normal call, otherwise try to get the result
2440 in TARGET, if convenient (and in mode MODE if that's convenient). */
2441
2442 static rtx
2443 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2444 {
2445 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2446 return 0;
2447 else
2448 {
2449 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2450 tree fn;
2451 const char *p1, *p2;
2452
2453 p2 = c_getstr (s2);
2454 if (p2 == NULL)
2455 return 0;
2456
2457 p1 = c_getstr (s1);
2458 if (p1 != NULL)
2459 {
2460 const char *r = strpbrk (p1, p2);
2461
2462 if (r == NULL)
2463 return const0_rtx;
2464
2465 /* Return an offset into the constant string argument. */
2466 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2467 s1, ssize_int (r - p1))),
2468 target, mode, EXPAND_NORMAL);
2469 }
2470
2471 if (p2[0] == '\0')
2472 {
2473 /* strpbrk(x, "") == NULL.
2474 Evaluate and ignore the arguments in case they had
2475 side-effects. */
2476 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2477 return const0_rtx;
2478 }
2479
2480 if (p2[1] != '\0')
2481 return 0; /* Really call strpbrk. */
2482
2483 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2484 if (!fn)
2485 return 0;
2486
2487 /* New argument list transforming strpbrk(s1, s2) to
2488 strchr(s1, s2[0]). */
2489 arglist =
2490 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2491 arglist = tree_cons (NULL_TREE, s1, arglist);
2492 return expand_expr (build_function_call_expr (fn, arglist),
2493 target, mode, EXPAND_NORMAL);
2494 }
2495 }
2496
2497 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2498 bytes from constant string DATA + OFFSET and return it as target
2499 constant. */
2500
2501 static rtx
2502 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2503 enum machine_mode mode)
2504 {
2505 const char *str = (const char *) data;
2506
2507 if (offset < 0
2508 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2509 > strlen (str) + 1))
2510 abort (); /* Attempt to read past the end of constant string. */
2511
2512 return c_readstr (str + offset, mode);
2513 }
2514
2515 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2516 Return 0 if we failed, the caller should emit a normal call,
2517 otherwise try to get the result in TARGET, if convenient (and in
2518 mode MODE if that's convenient). */
2519 static rtx
2520 expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
2521 {
2522 if (!validate_arglist (arglist,
2523 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2524 return 0;
2525 else
2526 {
2527 tree dest = TREE_VALUE (arglist);
2528 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2529 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2530 const char *src_str;
2531 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2532 unsigned int dest_align
2533 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2534 rtx dest_mem, src_mem, dest_addr, len_rtx;
2535
2536 /* If DEST is not a pointer type, call the normal function. */
2537 if (dest_align == 0)
2538 return 0;
2539
2540 /* If the LEN parameter is zero, return DEST. */
2541 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2542 {
2543 /* Evaluate and ignore SRC in case it has side-effects. */
2544 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2545 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2546 }
2547
2548 /* If either SRC is not a pointer type, don't do this
2549 operation in-line. */
2550 if (src_align == 0)
2551 return 0;
2552
2553 dest_mem = get_memory_rtx (dest);
2554 set_mem_align (dest_mem, dest_align);
2555 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2556 src_str = c_getstr (src);
2557
2558 /* If SRC is a string constant and block move would be done
2559 by pieces, we can avoid loading the string from memory
2560 and only stored the computed constants. */
2561 if (src_str
2562 && GET_CODE (len_rtx) == CONST_INT
2563 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2564 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2565 (void *) src_str, dest_align))
2566 {
2567 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2568 builtin_memcpy_read_str,
2569 (void *) src_str, dest_align, 0);
2570 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2571 #ifdef POINTERS_EXTEND_UNSIGNED
2572 if (GET_MODE (dest_mem) != ptr_mode)
2573 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2574 #endif
2575 return dest_mem;
2576 }
2577
2578 src_mem = get_memory_rtx (src);
2579 set_mem_align (src_mem, src_align);
2580
2581 /* Copy word part most expediently. */
2582 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2583 BLOCK_OP_NORMAL);
2584
2585 if (dest_addr == 0)
2586 {
2587 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2588 #ifdef POINTERS_EXTEND_UNSIGNED
2589 if (GET_MODE (dest_addr) != ptr_mode)
2590 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2591 #endif
2592 }
2593 return dest_addr;
2594 }
2595 }
2596
2597 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2598 Return 0 if we failed the caller should emit a normal call,
2599 otherwise try to get the result in TARGET, if convenient (and in
2600 mode MODE if that's convenient). If ENDP is 0 return the
2601 destination pointer, if ENDP is 1 return the end pointer ala
2602 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2603 stpcpy. */
2604
2605 static rtx
2606 expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
2607 int endp)
2608 {
2609 if (!validate_arglist (arglist,
2610 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2611 return 0;
2612 /* If return value is ignored, transform mempcpy into memcpy. */
2613 else if (target == const0_rtx)
2614 {
2615 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2616
2617 if (!fn)
2618 return 0;
2619
2620 return expand_expr (build_function_call_expr (fn, arglist),
2621 target, mode, EXPAND_NORMAL);
2622 }
2623 else
2624 {
2625 tree dest = TREE_VALUE (arglist);
2626 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2627 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2628 const char *src_str;
2629 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2630 unsigned int dest_align
2631 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2632 rtx dest_mem, src_mem, len_rtx;
2633
2634 /* If DEST is not a pointer type or LEN is not constant,
2635 call the normal function. */
2636 if (dest_align == 0 || !host_integerp (len, 1))
2637 return 0;
2638
2639 /* If the LEN parameter is zero, return DEST. */
2640 if (tree_low_cst (len, 1) == 0)
2641 {
2642 /* Evaluate and ignore SRC in case it has side-effects. */
2643 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2644 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2645 }
2646
2647 /* If either SRC is not a pointer type, don't do this
2648 operation in-line. */
2649 if (src_align == 0)
2650 return 0;
2651
2652 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2653 src_str = c_getstr (src);
2654
2655 /* If SRC is a string constant and block move would be done
2656 by pieces, we can avoid loading the string from memory
2657 and only stored the computed constants. */
2658 if (src_str
2659 && GET_CODE (len_rtx) == CONST_INT
2660 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2661 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2662 (void *) src_str, dest_align))
2663 {
2664 dest_mem = get_memory_rtx (dest);
2665 set_mem_align (dest_mem, dest_align);
2666 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2667 builtin_memcpy_read_str,
2668 (void *) src_str, dest_align, endp);
2669 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2670 #ifdef POINTERS_EXTEND_UNSIGNED
2671 if (GET_MODE (dest_mem) != ptr_mode)
2672 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2673 #endif
2674 return dest_mem;
2675 }
2676
2677 if (GET_CODE (len_rtx) == CONST_INT
2678 && can_move_by_pieces (INTVAL (len_rtx),
2679 MIN (dest_align, src_align)))
2680 {
2681 dest_mem = get_memory_rtx (dest);
2682 set_mem_align (dest_mem, dest_align);
2683 src_mem = get_memory_rtx (src);
2684 set_mem_align (src_mem, src_align);
2685 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2686 MIN (dest_align, src_align), endp);
2687 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2688 #ifdef POINTERS_EXTEND_UNSIGNED
2689 if (GET_MODE (dest_mem) != ptr_mode)
2690 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2691 #endif
2692 return dest_mem;
2693 }
2694
2695 return 0;
2696 }
2697 }
2698
2699 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2700 if we failed the caller should emit a normal call. */
2701
2702 static rtx
2703 expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
2704 {
2705 if (!validate_arglist (arglist,
2706 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2707 return 0;
2708 else
2709 {
2710 tree dest = TREE_VALUE (arglist);
2711 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2712 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2713
2714 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2715 unsigned int dest_align
2716 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2717
2718 /* If DEST is not a pointer type, call the normal function. */
2719 if (dest_align == 0)
2720 return 0;
2721
2722 /* If the LEN parameter is zero, return DEST. */
2723 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2724 {
2725 /* Evaluate and ignore SRC in case it has side-effects. */
2726 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2727 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2728 }
2729
2730 /* If either SRC is not a pointer type, don't do this
2731 operation in-line. */
2732 if (src_align == 0)
2733 return 0;
2734
2735 /* If src is categorized for a readonly section we can use
2736 normal memcpy. */
2737 if (readonly_data_expr (src))
2738 {
2739 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2740 if (!fn)
2741 return 0;
2742 return expand_expr (build_function_call_expr (fn, arglist),
2743 target, mode, EXPAND_NORMAL);
2744 }
2745
2746 /* Otherwise, call the normal function. */
2747 return 0;
2748 }
2749 }
2750
2751 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2752 if we failed the caller should emit a normal call. */
2753
2754 static rtx
2755 expand_builtin_bcopy (tree arglist)
2756 {
2757 tree src, dest, size, newarglist;
2758
2759 if (!validate_arglist (arglist,
2760 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2761 return NULL_RTX;
2762
2763 src = TREE_VALUE (arglist);
2764 dest = TREE_VALUE (TREE_CHAIN (arglist));
2765 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2766
2767 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2768 memmove(ptr y, ptr x, size_t z). This is done this way
2769 so that if it isn't expanded inline, we fallback to
2770 calling bcopy instead of memmove. */
2771
2772 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2773 newarglist = tree_cons (NULL_TREE, src, newarglist);
2774 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2775
2776 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2777 }
2778
2779 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2780 if we failed the caller should emit a normal call, otherwise try to get
2781 the result in TARGET, if convenient (and in mode MODE if that's
2782 convenient). */
2783
2784 static rtx
2785 expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
2786 {
2787 tree fn, len, src, dst;
2788
2789 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2790 return 0;
2791
2792 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2793 if (!fn)
2794 return 0;
2795
2796 src = TREE_VALUE (TREE_CHAIN (arglist));
2797 len = c_strlen (src, 1);
2798 if (len == 0 || TREE_SIDE_EFFECTS (len))
2799 return 0;
2800
2801 dst = TREE_VALUE (arglist);
2802 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2803 arglist = build_tree_list (NULL_TREE, len);
2804 arglist = tree_cons (NULL_TREE, src, arglist);
2805 arglist = tree_cons (NULL_TREE, dst, arglist);
2806 return expand_expr (build_function_call_expr (fn, arglist),
2807 target, mode, EXPAND_NORMAL);
2808 }
2809
2810 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2811 Return 0 if we failed the caller should emit a normal call,
2812 otherwise try to get the result in TARGET, if convenient (and in
2813 mode MODE if that's convenient). */
2814
2815 static rtx
2816 expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
2817 {
2818 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2819 return 0;
2820 else
2821 {
2822 tree dst, src, len;
2823
2824 /* If return value is ignored, transform stpcpy into strcpy. */
2825 if (target == const0_rtx)
2826 {
2827 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2828 if (!fn)
2829 return 0;
2830
2831 return expand_expr (build_function_call_expr (fn, arglist),
2832 target, mode, EXPAND_NORMAL);
2833 }
2834
2835 /* Ensure we get an actual string whose length can be evaluated at
2836 compile-time, not an expression containing a string. This is
2837 because the latter will potentially produce pessimized code
2838 when used to produce the return value. */
2839 src = TREE_VALUE (TREE_CHAIN (arglist));
2840 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
2841 return 0;
2842
2843 dst = TREE_VALUE (arglist);
2844 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2845 arglist = build_tree_list (NULL_TREE, len);
2846 arglist = tree_cons (NULL_TREE, src, arglist);
2847 arglist = tree_cons (NULL_TREE, dst, arglist);
2848 return expand_builtin_mempcpy (arglist, target, mode, /*endp=*/2);
2849 }
2850 }
2851
2852 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2853 bytes from constant string DATA + OFFSET and return it as target
2854 constant. */
2855
2856 static rtx
2857 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
2858 enum machine_mode mode)
2859 {
2860 const char *str = (const char *) data;
2861
2862 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2863 return const0_rtx;
2864
2865 return c_readstr (str + offset, mode);
2866 }
2867
2868 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2869 if we failed the caller should emit a normal call. */
2870
2871 static rtx
2872 expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
2873 {
2874 if (!validate_arglist (arglist,
2875 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2876 return 0;
2877 else
2878 {
2879 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
2880 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2881 tree fn;
2882
2883 /* We must be passed a constant len parameter. */
2884 if (TREE_CODE (len) != INTEGER_CST)
2885 return 0;
2886
2887 /* If the len parameter is zero, return the dst parameter. */
2888 if (integer_zerop (len))
2889 {
2890 /* Evaluate and ignore the src argument in case it has
2891 side-effects. */
2892 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2893 VOIDmode, EXPAND_NORMAL);
2894 /* Return the dst parameter. */
2895 return expand_expr (TREE_VALUE (arglist), target, mode,
2896 EXPAND_NORMAL);
2897 }
2898
2899 /* Now, we must be passed a constant src ptr parameter. */
2900 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2901 return 0;
2902
2903 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2904
2905 /* We're required to pad with trailing zeros if the requested
2906 len is greater than strlen(s2)+1. In that case try to
2907 use store_by_pieces, if it fails, punt. */
2908 if (tree_int_cst_lt (slen, len))
2909 {
2910 tree dest = TREE_VALUE (arglist);
2911 unsigned int dest_align
2912 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2913 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2914 rtx dest_mem;
2915
2916 if (!p || dest_align == 0 || !host_integerp (len, 1)
2917 || !can_store_by_pieces (tree_low_cst (len, 1),
2918 builtin_strncpy_read_str,
2919 (void *) p, dest_align))
2920 return 0;
2921
2922 dest_mem = get_memory_rtx (dest);
2923 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2924 builtin_strncpy_read_str,
2925 (void *) p, dest_align, 0);
2926 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2927 #ifdef POINTERS_EXTEND_UNSIGNED
2928 if (GET_MODE (dest_mem) != ptr_mode)
2929 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2930 #endif
2931 return dest_mem;
2932 }
2933
2934 /* OK transform into builtin memcpy. */
2935 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2936 if (!fn)
2937 return 0;
2938 return expand_expr (build_function_call_expr (fn, arglist),
2939 target, mode, EXPAND_NORMAL);
2940 }
2941 }
2942
2943 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2944 bytes from constant string DATA + OFFSET and return it as target
2945 constant. */
2946
2947 static rtx
2948 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2949 enum machine_mode mode)
2950 {
2951 const char *c = (const char *) data;
2952 char *p = alloca (GET_MODE_SIZE (mode));
2953
2954 memset (p, *c, GET_MODE_SIZE (mode));
2955
2956 return c_readstr (p, mode);
2957 }
2958
2959 /* Callback routine for store_by_pieces. Return the RTL of a register
2960 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2961 char value given in the RTL register data. For example, if mode is
2962 4 bytes wide, return the RTL for 0x01010101*data. */
2963
2964 static rtx
2965 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2966 enum machine_mode mode)
2967 {
2968 rtx target, coeff;
2969 size_t size;
2970 char *p;
2971
2972 size = GET_MODE_SIZE (mode);
2973 if (size == 1)
2974 return (rtx) data;
2975
2976 p = alloca (size);
2977 memset (p, 1, size);
2978 coeff = c_readstr (p, mode);
2979
2980 target = convert_to_mode (mode, (rtx) data, 1);
2981 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2982 return force_reg (mode, target);
2983 }
2984
2985 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2986 if we failed the caller should emit a normal call, otherwise try to get
2987 the result in TARGET, if convenient (and in mode MODE if that's
2988 convenient). */
2989
2990 static rtx
2991 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
2992 {
2993 if (!validate_arglist (arglist,
2994 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2995 return 0;
2996 else
2997 {
2998 tree dest = TREE_VALUE (arglist);
2999 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3000 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3001 char c;
3002
3003 unsigned int dest_align
3004 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3005 rtx dest_mem, dest_addr, len_rtx;
3006
3007 /* If DEST is not a pointer type, don't do this
3008 operation in-line. */
3009 if (dest_align == 0)
3010 return 0;
3011
3012 /* If the LEN parameter is zero, return DEST. */
3013 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3014 {
3015 /* Evaluate and ignore VAL in case it has side-effects. */
3016 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3017 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3018 }
3019
3020 if (TREE_CODE (val) != INTEGER_CST)
3021 {
3022 rtx val_rtx;
3023
3024 if (!host_integerp (len, 1))
3025 return 0;
3026
3027 if (optimize_size && tree_low_cst (len, 1) > 1)
3028 return 0;
3029
3030 /* Assume that we can memset by pieces if we can store the
3031 * the coefficients by pieces (in the required modes).
3032 * We can't pass builtin_memset_gen_str as that emits RTL. */
3033 c = 1;
3034 if (!can_store_by_pieces (tree_low_cst (len, 1),
3035 builtin_memset_read_str,
3036 &c, dest_align))
3037 return 0;
3038
3039 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3040 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3041 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3042 val_rtx);
3043 dest_mem = get_memory_rtx (dest);
3044 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3045 builtin_memset_gen_str,
3046 val_rtx, dest_align, 0);
3047 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3048 #ifdef POINTERS_EXTEND_UNSIGNED
3049 if (GET_MODE (dest_mem) != ptr_mode)
3050 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3051 #endif
3052 return dest_mem;
3053 }
3054
3055 if (target_char_cast (val, &c))
3056 return 0;
3057
3058 if (c)
3059 {
3060 if (!host_integerp (len, 1))
3061 return 0;
3062 if (!can_store_by_pieces (tree_low_cst (len, 1),
3063 builtin_memset_read_str, &c,
3064 dest_align))
3065 return 0;
3066
3067 dest_mem = get_memory_rtx (dest);
3068 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3069 builtin_memset_read_str,
3070 &c, dest_align, 0);
3071 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3072 #ifdef POINTERS_EXTEND_UNSIGNED
3073 if (GET_MODE (dest_mem) != ptr_mode)
3074 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3075 #endif
3076 return dest_mem;
3077 }
3078
3079 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3080
3081 dest_mem = get_memory_rtx (dest);
3082 set_mem_align (dest_mem, dest_align);
3083 dest_addr = clear_storage (dest_mem, len_rtx);
3084
3085 if (dest_addr == 0)
3086 {
3087 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3088 #ifdef POINTERS_EXTEND_UNSIGNED
3089 if (GET_MODE (dest_addr) != ptr_mode)
3090 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3091 #endif
3092 }
3093
3094 return dest_addr;
3095 }
3096 }
3097
3098 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3099 if we failed the caller should emit a normal call. */
3100
3101 static rtx
3102 expand_builtin_bzero (tree arglist)
3103 {
3104 tree dest, size, newarglist;
3105
3106 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3107 return NULL_RTX;
3108
3109 dest = TREE_VALUE (arglist);
3110 size = TREE_VALUE (TREE_CHAIN (arglist));
3111
3112 /* New argument list transforming bzero(ptr x, int y) to
3113 memset(ptr x, int 0, size_t y). This is done this way
3114 so that if it isn't expanded inline, we fallback to
3115 calling bzero instead of memset. */
3116
3117 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
3118 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3119 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3120
3121 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
3122 }
3123
3124 /* Expand expression EXP, which is a call to the memcmp built-in function.
3125 ARGLIST is the argument list for this call. Return 0 if we failed and the
3126 caller should emit a normal call, otherwise try to get the result in
3127 TARGET, if convenient (and in mode MODE, if that's convenient). */
3128
3129 static rtx
3130 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3131 enum machine_mode mode)
3132 {
3133 tree arg1, arg2, len;
3134 const char *p1, *p2;
3135
3136 if (!validate_arglist (arglist,
3137 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3138 return 0;
3139
3140 arg1 = TREE_VALUE (arglist);
3141 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3142 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3143
3144 /* If the len parameter is zero, return zero. */
3145 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3146 {
3147 /* Evaluate and ignore arg1 and arg2 in case they have
3148 side-effects. */
3149 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3150 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3151 return const0_rtx;
3152 }
3153
3154 p1 = c_getstr (arg1);
3155 p2 = c_getstr (arg2);
3156
3157 /* If all arguments are constant, and the value of len is not greater
3158 than the lengths of arg1 and arg2, evaluate at compile-time. */
3159 if (host_integerp (len, 1) && p1 && p2
3160 && compare_tree_int (len, strlen (p1) + 1) <= 0
3161 && compare_tree_int (len, strlen (p2) + 1) <= 0)
3162 {
3163 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
3164
3165 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3166 }
3167
3168 /* If len parameter is one, return an expression corresponding to
3169 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3170 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
3171 {
3172 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3173 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3174 tree ind1 =
3175 fold (build1 (CONVERT_EXPR, integer_type_node,
3176 build1 (INDIRECT_REF, cst_uchar_node,
3177 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3178 tree ind2 =
3179 fold (build1 (CONVERT_EXPR, integer_type_node,
3180 build1 (INDIRECT_REF, cst_uchar_node,
3181 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3182 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3183 return expand_expr (result, target, mode, EXPAND_NORMAL);
3184 }
3185
3186 #ifdef HAVE_cmpstrsi
3187 {
3188 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3189 rtx result;
3190 rtx insn;
3191
3192 int arg1_align
3193 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3194 int arg2_align
3195 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3196 enum machine_mode insn_mode
3197 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3198
3199 /* If we don't have POINTER_TYPE, call the function. */
3200 if (arg1_align == 0 || arg2_align == 0)
3201 return 0;
3202
3203 /* Make a place to write the result of the instruction. */
3204 result = target;
3205 if (! (result != 0
3206 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3207 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3208 result = gen_reg_rtx (insn_mode);
3209
3210 arg1_rtx = get_memory_rtx (arg1);
3211 arg2_rtx = get_memory_rtx (arg2);
3212 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3213 if (!HAVE_cmpstrsi)
3214 insn = NULL_RTX;
3215 else
3216 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3217 GEN_INT (MIN (arg1_align, arg2_align)));
3218
3219 if (insn)
3220 emit_insn (insn);
3221 else
3222 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3223 TYPE_MODE (integer_type_node), 3,
3224 XEXP (arg1_rtx, 0), Pmode,
3225 XEXP (arg2_rtx, 0), Pmode,
3226 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3227 TREE_UNSIGNED (sizetype)),
3228 TYPE_MODE (sizetype));
3229
3230 /* Return the value in the proper mode for this function. */
3231 mode = TYPE_MODE (TREE_TYPE (exp));
3232 if (GET_MODE (result) == mode)
3233 return result;
3234 else if (target != 0)
3235 {
3236 convert_move (target, result, 0);
3237 return target;
3238 }
3239 else
3240 return convert_to_mode (mode, result, 0);
3241 }
3242 #endif
3243
3244 return 0;
3245 }
3246
3247 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3248 if we failed the caller should emit a normal call, otherwise try to get
3249 the result in TARGET, if convenient. */
3250
3251 static rtx
3252 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3253 {
3254 tree arglist = TREE_OPERAND (exp, 1);
3255 tree arg1, arg2;
3256 const char *p1, *p2;
3257
3258 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3259 return 0;
3260
3261 arg1 = TREE_VALUE (arglist);
3262 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3263
3264 p1 = c_getstr (arg1);
3265 p2 = c_getstr (arg2);
3266
3267 if (p1 && p2)
3268 {
3269 const int i = strcmp (p1, p2);
3270 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
3271 }
3272
3273 /* If either arg is "", return an expression corresponding to
3274 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3275 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3276 {
3277 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3278 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3279 tree ind1 =
3280 fold (build1 (CONVERT_EXPR, integer_type_node,
3281 build1 (INDIRECT_REF, cst_uchar_node,
3282 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3283 tree ind2 =
3284 fold (build1 (CONVERT_EXPR, integer_type_node,
3285 build1 (INDIRECT_REF, cst_uchar_node,
3286 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3287 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3288 return expand_expr (result, target, mode, EXPAND_NORMAL);
3289 }
3290
3291 #ifdef HAVE_cmpstrsi
3292 if (HAVE_cmpstrsi)
3293 {
3294 tree len, len1, len2;
3295 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3296 rtx result, insn;
3297
3298 int arg1_align
3299 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3300 int arg2_align
3301 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3302 enum machine_mode insn_mode
3303 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3304
3305 len1 = c_strlen (arg1, 1);
3306 len2 = c_strlen (arg2, 1);
3307
3308 if (len1)
3309 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3310 if (len2)
3311 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3312
3313 /* If we don't have a constant length for the first, use the length
3314 of the second, if we know it. We don't require a constant for
3315 this case; some cost analysis could be done if both are available
3316 but neither is constant. For now, assume they're equally cheap,
3317 unless one has side effects. If both strings have constant lengths,
3318 use the smaller. */
3319
3320 if (!len1)
3321 len = len2;
3322 else if (!len2)
3323 len = len1;
3324 else if (TREE_SIDE_EFFECTS (len1))
3325 len = len2;
3326 else if (TREE_SIDE_EFFECTS (len2))
3327 len = len1;
3328 else if (TREE_CODE (len1) != INTEGER_CST)
3329 len = len2;
3330 else if (TREE_CODE (len2) != INTEGER_CST)
3331 len = len1;
3332 else if (tree_int_cst_lt (len1, len2))
3333 len = len1;
3334 else
3335 len = len2;
3336
3337 /* If both arguments have side effects, we cannot optimize. */
3338 if (!len || TREE_SIDE_EFFECTS (len))
3339 return 0;
3340
3341 /* If we don't have POINTER_TYPE, call the function. */
3342 if (arg1_align == 0 || arg2_align == 0)
3343 return 0;
3344
3345 /* Make a place to write the result of the instruction. */
3346 result = target;
3347 if (! (result != 0
3348 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3349 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3350 result = gen_reg_rtx (insn_mode);
3351
3352 arg1_rtx = get_memory_rtx (arg1);
3353 arg2_rtx = get_memory_rtx (arg2);
3354 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3355 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3356 GEN_INT (MIN (arg1_align, arg2_align)));
3357 if (!insn)
3358 return 0;
3359
3360 emit_insn (insn);
3361
3362 /* Return the value in the proper mode for this function. */
3363 mode = TYPE_MODE (TREE_TYPE (exp));
3364 if (GET_MODE (result) == mode)
3365 return result;
3366 if (target == 0)
3367 return convert_to_mode (mode, result, 0);
3368 convert_move (target, result, 0);
3369 return target;
3370 }
3371 #endif
3372 return 0;
3373 }
3374
3375 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3376 if we failed the caller should emit a normal call, otherwise try to get
3377 the result in TARGET, if convenient. */
3378
3379 static rtx
3380 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3381 {
3382 tree arglist = TREE_OPERAND (exp, 1);
3383 tree arg1, arg2, arg3;
3384 const char *p1, *p2;
3385
3386 if (!validate_arglist (arglist,
3387 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3388 return 0;
3389
3390 arg1 = TREE_VALUE (arglist);
3391 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3392 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3393
3394 /* If the len parameter is zero, return zero. */
3395 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3396 {
3397 /* Evaluate and ignore arg1 and arg2 in case they have
3398 side-effects. */
3399 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3400 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3401 return const0_rtx;
3402 }
3403
3404 p1 = c_getstr (arg1);
3405 p2 = c_getstr (arg2);
3406
3407 /* If all arguments are constant, evaluate at compile-time. */
3408 if (host_integerp (arg3, 1) && p1 && p2)
3409 {
3410 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3411 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3412 }
3413
3414 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3415 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3416 if (host_integerp (arg3, 1)
3417 && (tree_low_cst (arg3, 1) == 1
3418 || (tree_low_cst (arg3, 1) > 1
3419 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3420 {
3421 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3422 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3423 tree ind1 =
3424 fold (build1 (CONVERT_EXPR, integer_type_node,
3425 build1 (INDIRECT_REF, cst_uchar_node,
3426 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3427 tree ind2 =
3428 fold (build1 (CONVERT_EXPR, integer_type_node,
3429 build1 (INDIRECT_REF, cst_uchar_node,
3430 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3431 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3432 return expand_expr (result, target, mode, EXPAND_NORMAL);
3433 }
3434
3435 /* If c_strlen can determine an expression for one of the string
3436 lengths, and it doesn't have side effects, then emit cmpstrsi
3437 using length MIN(strlen(string)+1, arg3). */
3438 #ifdef HAVE_cmpstrsi
3439 if (HAVE_cmpstrsi)
3440 {
3441 tree len, len1, len2;
3442 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3443 rtx result, insn;
3444
3445 int arg1_align
3446 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3447 int arg2_align
3448 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3449 enum machine_mode insn_mode
3450 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3451
3452 len1 = c_strlen (arg1, 1);
3453 len2 = c_strlen (arg2, 1);
3454
3455 if (len1)
3456 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3457 if (len2)
3458 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3459
3460 /* If we don't have a constant length for the first, use the length
3461 of the second, if we know it. We don't require a constant for
3462 this case; some cost analysis could be done if both are available
3463 but neither is constant. For now, assume they're equally cheap,
3464 unless one has side effects. If both strings have constant lengths,
3465 use the smaller. */
3466
3467 if (!len1)
3468 len = len2;
3469 else if (!len2)
3470 len = len1;
3471 else if (TREE_SIDE_EFFECTS (len1))
3472 len = len2;
3473 else if (TREE_SIDE_EFFECTS (len2))
3474 len = len1;
3475 else if (TREE_CODE (len1) != INTEGER_CST)
3476 len = len2;
3477 else if (TREE_CODE (len2) != INTEGER_CST)
3478 len = len1;
3479 else if (tree_int_cst_lt (len1, len2))
3480 len = len1;
3481 else
3482 len = len2;
3483
3484 /* If both arguments have side effects, we cannot optimize. */
3485 if (!len || TREE_SIDE_EFFECTS (len))
3486 return 0;
3487
3488 /* The actual new length parameter is MIN(len,arg3). */
3489 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3490
3491 /* If we don't have POINTER_TYPE, call the function. */
3492 if (arg1_align == 0 || arg2_align == 0)
3493 return 0;
3494
3495 /* Make a place to write the result of the instruction. */
3496 result = target;
3497 if (! (result != 0
3498 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3499 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3500 result = gen_reg_rtx (insn_mode);
3501
3502 arg1_rtx = get_memory_rtx (arg1);
3503 arg2_rtx = get_memory_rtx (arg2);
3504 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3505 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3506 GEN_INT (MIN (arg1_align, arg2_align)));
3507 if (!insn)
3508 return 0;
3509
3510 emit_insn (insn);
3511
3512 /* Return the value in the proper mode for this function. */
3513 mode = TYPE_MODE (TREE_TYPE (exp));
3514 if (GET_MODE (result) == mode)
3515 return result;
3516 if (target == 0)
3517 return convert_to_mode (mode, result, 0);
3518 convert_move (target, result, 0);
3519 return target;
3520 }
3521 #endif
3522 return 0;
3523 }
3524
3525 /* Expand expression EXP, which is a call to the strcat builtin.
3526 Return 0 if we failed the caller should emit a normal call,
3527 otherwise try to get the result in TARGET, if convenient. */
3528
3529 static rtx
3530 expand_builtin_strcat (tree arglist, rtx target, enum machine_mode mode)
3531 {
3532 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3533 return 0;
3534 else
3535 {
3536 tree dst = TREE_VALUE (arglist),
3537 src = TREE_VALUE (TREE_CHAIN (arglist));
3538 const char *p = c_getstr (src);
3539
3540 /* If the string length is zero, return the dst parameter. */
3541 if (p && *p == '\0')
3542 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3543
3544 return 0;
3545 }
3546 }
3547
3548 /* Expand expression EXP, which is a call to the strncat builtin.
3549 Return 0 if we failed the caller should emit a normal call,
3550 otherwise try to get the result in TARGET, if convenient. */
3551
3552 static rtx
3553 expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
3554 {
3555 if (!validate_arglist (arglist,
3556 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3557 return 0;
3558 else
3559 {
3560 tree dst = TREE_VALUE (arglist),
3561 src = TREE_VALUE (TREE_CHAIN (arglist)),
3562 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3563 const char *p = c_getstr (src);
3564
3565 /* If the requested length is zero, or the src parameter string
3566 length is zero, return the dst parameter. */
3567 if (integer_zerop (len) || (p && *p == '\0'))
3568 {
3569 /* Evaluate and ignore the src and len parameters in case
3570 they have side-effects. */
3571 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3572 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3573 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3574 }
3575
3576 /* If the requested len is greater than or equal to the string
3577 length, call strcat. */
3578 if (TREE_CODE (len) == INTEGER_CST && p
3579 && compare_tree_int (len, strlen (p)) >= 0)
3580 {
3581 tree newarglist
3582 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3583 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3584
3585 /* If the replacement _DECL isn't initialized, don't do the
3586 transformation. */
3587 if (!fn)
3588 return 0;
3589
3590 return expand_expr (build_function_call_expr (fn, newarglist),
3591 target, mode, EXPAND_NORMAL);
3592 }
3593 return 0;
3594 }
3595 }
3596
3597 /* Expand expression EXP, which is a call to the strspn builtin.
3598 Return 0 if we failed the caller should emit a normal call,
3599 otherwise try to get the result in TARGET, if convenient. */
3600
3601 static rtx
3602 expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
3603 {
3604 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3605 return 0;
3606 else
3607 {
3608 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3609 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3610
3611 /* If both arguments are constants, evaluate at compile-time. */
3612 if (p1 && p2)
3613 {
3614 const size_t r = strspn (p1, p2);
3615 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3616 }
3617
3618 /* If either argument is "", return 0. */
3619 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3620 {
3621 /* Evaluate and ignore both arguments in case either one has
3622 side-effects. */
3623 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3624 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3625 return const0_rtx;
3626 }
3627 return 0;
3628 }
3629 }
3630
3631 /* Expand expression EXP, which is a call to the strcspn builtin.
3632 Return 0 if we failed the caller should emit a normal call,
3633 otherwise try to get the result in TARGET, if convenient. */
3634
3635 static rtx
3636 expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
3637 {
3638 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3639 return 0;
3640 else
3641 {
3642 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3643 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3644
3645 /* If both arguments are constants, evaluate at compile-time. */
3646 if (p1 && p2)
3647 {
3648 const size_t r = strcspn (p1, p2);
3649 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3650 }
3651
3652 /* If the first argument is "", return 0. */
3653 if (p1 && *p1 == '\0')
3654 {
3655 /* Evaluate and ignore argument s2 in case it has
3656 side-effects. */
3657 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3658 return const0_rtx;
3659 }
3660
3661 /* If the second argument is "", return __builtin_strlen(s1). */
3662 if (p2 && *p2 == '\0')
3663 {
3664 tree newarglist = build_tree_list (NULL_TREE, s1),
3665 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3666
3667 /* If the replacement _DECL isn't initialized, don't do the
3668 transformation. */
3669 if (!fn)
3670 return 0;
3671
3672 return expand_expr (build_function_call_expr (fn, newarglist),
3673 target, mode, EXPAND_NORMAL);
3674 }
3675 return 0;
3676 }
3677 }
3678
3679 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3680 if that's convenient. */
3681
3682 rtx
3683 expand_builtin_saveregs (void)
3684 {
3685 rtx val, seq;
3686
3687 /* Don't do __builtin_saveregs more than once in a function.
3688 Save the result of the first call and reuse it. */
3689 if (saveregs_value != 0)
3690 return saveregs_value;
3691
3692 /* When this function is called, it means that registers must be
3693 saved on entry to this function. So we migrate the call to the
3694 first insn of this function. */
3695
3696 start_sequence ();
3697
3698 #ifdef EXPAND_BUILTIN_SAVEREGS
3699 /* Do whatever the machine needs done in this case. */
3700 val = EXPAND_BUILTIN_SAVEREGS ();
3701 #else
3702 /* ??? We used to try and build up a call to the out of line function,
3703 guessing about what registers needed saving etc. This became much
3704 harder with __builtin_va_start, since we don't have a tree for a
3705 call to __builtin_saveregs to fall back on. There was exactly one
3706 port (i860) that used this code, and I'm unconvinced it could actually
3707 handle the general case. So we no longer try to handle anything
3708 weird and make the backend absorb the evil. */
3709
3710 error ("__builtin_saveregs not supported by this target");
3711 val = const0_rtx;
3712 #endif
3713
3714 seq = get_insns ();
3715 end_sequence ();
3716
3717 saveregs_value = val;
3718
3719 /* Put the insns after the NOTE that starts the function. If this
3720 is inside a start_sequence, make the outer-level insn chain current, so
3721 the code is placed at the start of the function. */
3722 push_topmost_sequence ();
3723 emit_insn_after (seq, get_insns ());
3724 pop_topmost_sequence ();
3725
3726 return val;
3727 }
3728
3729 /* __builtin_args_info (N) returns word N of the arg space info
3730 for the current function. The number and meanings of words
3731 is controlled by the definition of CUMULATIVE_ARGS. */
3732
3733 static rtx
3734 expand_builtin_args_info (tree arglist)
3735 {
3736 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3737 int *word_ptr = (int *) &current_function_args_info;
3738
3739 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3740 abort ();
3741
3742 if (arglist != 0)
3743 {
3744 if (!host_integerp (TREE_VALUE (arglist), 0))
3745 error ("argument of `__builtin_args_info' must be constant");
3746 else
3747 {
3748 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3749
3750 if (wordnum < 0 || wordnum >= nwords)
3751 error ("argument of `__builtin_args_info' out of range");
3752 else
3753 return GEN_INT (word_ptr[wordnum]);
3754 }
3755 }
3756 else
3757 error ("missing argument in `__builtin_args_info'");
3758
3759 return const0_rtx;
3760 }
3761
3762 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3763
3764 static rtx
3765 expand_builtin_next_arg (tree arglist)
3766 {
3767 tree fntype = TREE_TYPE (current_function_decl);
3768
3769 if (TYPE_ARG_TYPES (fntype) == 0
3770 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3771 == void_type_node))
3772 {
3773 error ("`va_start' used in function with fixed args");
3774 return const0_rtx;
3775 }
3776
3777 if (arglist)
3778 {
3779 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3780 tree arg = TREE_VALUE (arglist);
3781
3782 /* Strip off all nops for the sake of the comparison. This
3783 is not quite the same as STRIP_NOPS. It does more.
3784 We must also strip off INDIRECT_EXPR for C++ reference
3785 parameters. */
3786 while (TREE_CODE (arg) == NOP_EXPR
3787 || TREE_CODE (arg) == CONVERT_EXPR
3788 || TREE_CODE (arg) == NON_LVALUE_EXPR
3789 || TREE_CODE (arg) == INDIRECT_REF)
3790 arg = TREE_OPERAND (arg, 0);
3791 if (arg != last_parm)
3792 warning ("second parameter of `va_start' not last named argument");
3793 }
3794 else
3795 /* Evidently an out of date version of <stdarg.h>; can't validate
3796 va_start's second argument, but can still work as intended. */
3797 warning ("`__builtin_next_arg' called without an argument");
3798
3799 return expand_binop (Pmode, add_optab,
3800 current_function_internal_arg_pointer,
3801 current_function_arg_offset_rtx,
3802 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3803 }
3804
3805 /* Make it easier for the backends by protecting the valist argument
3806 from multiple evaluations. */
3807
3808 static tree
3809 stabilize_va_list (tree valist, int needs_lvalue)
3810 {
3811 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3812 {
3813 if (TREE_SIDE_EFFECTS (valist))
3814 valist = save_expr (valist);
3815
3816 /* For this case, the backends will be expecting a pointer to
3817 TREE_TYPE (va_list_type_node), but it's possible we've
3818 actually been given an array (an actual va_list_type_node).
3819 So fix it. */
3820 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3821 {
3822 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3823 tree p2 = build_pointer_type (va_list_type_node);
3824
3825 valist = build1 (ADDR_EXPR, p2, valist);
3826 valist = fold (build1 (NOP_EXPR, p1, valist));
3827 }
3828 }
3829 else
3830 {
3831 tree pt;
3832
3833 if (! needs_lvalue)
3834 {
3835 if (! TREE_SIDE_EFFECTS (valist))
3836 return valist;
3837
3838 pt = build_pointer_type (va_list_type_node);
3839 valist = fold (build1 (ADDR_EXPR, pt, valist));
3840 TREE_SIDE_EFFECTS (valist) = 1;
3841 }
3842
3843 if (TREE_SIDE_EFFECTS (valist))
3844 valist = save_expr (valist);
3845 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3846 valist));
3847 }
3848
3849 return valist;
3850 }
3851
3852 /* The "standard" implementation of va_start: just assign `nextarg' to
3853 the variable. */
3854
3855 void
3856 std_expand_builtin_va_start (tree valist, rtx nextarg)
3857 {
3858 tree t;
3859
3860 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3861 make_tree (ptr_type_node, nextarg));
3862 TREE_SIDE_EFFECTS (t) = 1;
3863
3864 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3865 }
3866
3867 /* Expand ARGLIST, from a call to __builtin_va_start. */
3868
3869 static rtx
3870 expand_builtin_va_start (tree arglist)
3871 {
3872 rtx nextarg;
3873 tree chain, valist;
3874
3875 chain = TREE_CHAIN (arglist);
3876
3877 if (TREE_CHAIN (chain))
3878 error ("too many arguments to function `va_start'");
3879
3880 nextarg = expand_builtin_next_arg (chain);
3881 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3882
3883 #ifdef EXPAND_BUILTIN_VA_START
3884 EXPAND_BUILTIN_VA_START (valist, nextarg);
3885 #else
3886 std_expand_builtin_va_start (valist, nextarg);
3887 #endif
3888
3889 return const0_rtx;
3890 }
3891
3892 /* The "standard" implementation of va_arg: read the value from the
3893 current (padded) address and increment by the (padded) size. */
3894
3895 rtx
3896 std_expand_builtin_va_arg (tree valist, tree type)
3897 {
3898 tree addr_tree, t, type_size = NULL;
3899 tree align, alignm1;
3900 tree rounded_size;
3901 rtx addr;
3902
3903 /* Compute the rounded size of the type. */
3904 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3905 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3906 if (type == error_mark_node
3907 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3908 || TREE_OVERFLOW (type_size))
3909 rounded_size = size_zero_node;
3910 else
3911 rounded_size = fold (build (MULT_EXPR, sizetype,
3912 fold (build (TRUNC_DIV_EXPR, sizetype,
3913 fold (build (PLUS_EXPR, sizetype,
3914 type_size, alignm1)),
3915 align)),
3916 align));
3917
3918 /* Get AP. */
3919 addr_tree = valist;
3920 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3921 {
3922 /* Small args are padded downward. */
3923 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3924 fold (build (COND_EXPR, sizetype,
3925 fold (build (GT_EXPR, sizetype,
3926 rounded_size,
3927 align)),
3928 size_zero_node,
3929 fold (build (MINUS_EXPR, sizetype,
3930 rounded_size,
3931 type_size))))));
3932 }
3933
3934 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3935 addr = copy_to_reg (addr);
3936
3937 /* Compute new value for AP. */
3938 if (! integer_zerop (rounded_size))
3939 {
3940 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3941 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3942 rounded_size));
3943 TREE_SIDE_EFFECTS (t) = 1;
3944 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3945 }
3946
3947 return addr;
3948 }
3949
3950 /* Expand __builtin_va_arg, which is not really a builtin function, but
3951 a very special sort of operator. */
3952
3953 rtx
3954 expand_builtin_va_arg (tree valist, tree type)
3955 {
3956 rtx addr, result;
3957 tree promoted_type, want_va_type, have_va_type;
3958
3959 /* Verify that valist is of the proper type. */
3960
3961 want_va_type = va_list_type_node;
3962 have_va_type = TREE_TYPE (valist);
3963 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3964 {
3965 /* If va_list is an array type, the argument may have decayed
3966 to a pointer type, e.g. by being passed to another function.
3967 In that case, unwrap both types so that we can compare the
3968 underlying records. */
3969 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3970 || TREE_CODE (have_va_type) == POINTER_TYPE)
3971 {
3972 want_va_type = TREE_TYPE (want_va_type);
3973 have_va_type = TREE_TYPE (have_va_type);
3974 }
3975 }
3976 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3977 {
3978 error ("first argument to `va_arg' not of type `va_list'");
3979 addr = const0_rtx;
3980 }
3981
3982 /* Generate a diagnostic for requesting data of a type that cannot
3983 be passed through `...' due to type promotion at the call site. */
3984 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3985 != type)
3986 {
3987 const char *name = "<anonymous type>", *pname = 0;
3988 static bool gave_help;
3989
3990 if (TYPE_NAME (type))
3991 {
3992 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3993 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3994 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3995 && DECL_NAME (TYPE_NAME (type)))
3996 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3997 }
3998 if (TYPE_NAME (promoted_type))
3999 {
4000 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
4001 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
4002 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
4003 && DECL_NAME (TYPE_NAME (promoted_type)))
4004 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
4005 }
4006
4007 /* Unfortunately, this is merely undefined, rather than a constraint
4008 violation, so we cannot make this an error. If this call is never
4009 executed, the program is still strictly conforming. */
4010 warning ("`%s' is promoted to `%s' when passed through `...'",
4011 name, pname);
4012 if (! gave_help)
4013 {
4014 gave_help = true;
4015 warning ("(so you should pass `%s' not `%s' to `va_arg')",
4016 pname, name);
4017 }
4018
4019 /* We can, however, treat "undefined" any way we please.
4020 Call abort to encourage the user to fix the program. */
4021 expand_builtin_trap ();
4022
4023 /* This is dead code, but go ahead and finish so that the
4024 mode of the result comes out right. */
4025 addr = const0_rtx;
4026 }
4027 else
4028 {
4029 /* Make it easier for the backends by protecting the valist argument
4030 from multiple evaluations. */
4031 valist = stabilize_va_list (valist, 0);
4032
4033 #ifdef EXPAND_BUILTIN_VA_ARG
4034 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
4035 #else
4036 addr = std_expand_builtin_va_arg (valist, type);
4037 #endif
4038 }
4039
4040 #ifdef POINTERS_EXTEND_UNSIGNED
4041 if (GET_MODE (addr) != Pmode)
4042 addr = convert_memory_address (Pmode, addr);
4043 #endif
4044
4045 result = gen_rtx_MEM (TYPE_MODE (type), addr);
4046 set_mem_alias_set (result, get_varargs_alias_set ());
4047
4048 return result;
4049 }
4050
4051 /* Expand ARGLIST, from a call to __builtin_va_end. */
4052
4053 static rtx
4054 expand_builtin_va_end (tree arglist)
4055 {
4056 tree valist = TREE_VALUE (arglist);
4057
4058 #ifdef EXPAND_BUILTIN_VA_END
4059 valist = stabilize_va_list (valist, 0);
4060 EXPAND_BUILTIN_VA_END (arglist);
4061 #else
4062 /* Evaluate for side effects, if needed. I hate macros that don't
4063 do that. */
4064 if (TREE_SIDE_EFFECTS (valist))
4065 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4066 #endif
4067
4068 return const0_rtx;
4069 }
4070
4071 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
4072 builtin rather than just as an assignment in stdarg.h because of the
4073 nastiness of array-type va_list types. */
4074
4075 static rtx
4076 expand_builtin_va_copy (tree arglist)
4077 {
4078 tree dst, src, t;
4079
4080 dst = TREE_VALUE (arglist);
4081 src = TREE_VALUE (TREE_CHAIN (arglist));
4082
4083 dst = stabilize_va_list (dst, 1);
4084 src = stabilize_va_list (src, 0);
4085
4086 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4087 {
4088 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
4089 TREE_SIDE_EFFECTS (t) = 1;
4090 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4091 }
4092 else
4093 {
4094 rtx dstb, srcb, size;
4095
4096 /* Evaluate to pointers. */
4097 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4098 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4099 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4100 VOIDmode, EXPAND_NORMAL);
4101
4102 #ifdef POINTERS_EXTEND_UNSIGNED
4103 if (GET_MODE (dstb) != Pmode)
4104 dstb = convert_memory_address (Pmode, dstb);
4105
4106 if (GET_MODE (srcb) != Pmode)
4107 srcb = convert_memory_address (Pmode, srcb);
4108 #endif
4109
4110 /* "Dereference" to BLKmode memories. */
4111 dstb = gen_rtx_MEM (BLKmode, dstb);
4112 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4113 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4114 srcb = gen_rtx_MEM (BLKmode, srcb);
4115 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4116 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4117
4118 /* Copy. */
4119 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4120 }
4121
4122 return const0_rtx;
4123 }
4124
4125 /* Expand a call to one of the builtin functions __builtin_frame_address or
4126 __builtin_return_address. */
4127
4128 static rtx
4129 expand_builtin_frame_address (tree fndecl, tree arglist)
4130 {
4131 /* The argument must be a nonnegative integer constant.
4132 It counts the number of frames to scan up the stack.
4133 The value is the return address saved in that frame. */
4134 if (arglist == 0)
4135 /* Warning about missing arg was already issued. */
4136 return const0_rtx;
4137 else if (! host_integerp (TREE_VALUE (arglist), 1))
4138 {
4139 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4140 error ("invalid arg to `__builtin_frame_address'");
4141 else
4142 error ("invalid arg to `__builtin_return_address'");
4143 return const0_rtx;
4144 }
4145 else
4146 {
4147 rtx tem
4148 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4149 tree_low_cst (TREE_VALUE (arglist), 1),
4150 hard_frame_pointer_rtx);
4151
4152 /* Some ports cannot access arbitrary stack frames. */
4153 if (tem == NULL)
4154 {
4155 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4156 warning ("unsupported arg to `__builtin_frame_address'");
4157 else
4158 warning ("unsupported arg to `__builtin_return_address'");
4159 return const0_rtx;
4160 }
4161
4162 /* For __builtin_frame_address, return what we've got. */
4163 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4164 return tem;
4165
4166 if (GET_CODE (tem) != REG
4167 && ! CONSTANT_P (tem))
4168 tem = copy_to_mode_reg (Pmode, tem);
4169 return tem;
4170 }
4171 }
4172
4173 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4174 we failed and the caller should emit a normal call, otherwise try to get
4175 the result in TARGET, if convenient. */
4176
4177 static rtx
4178 expand_builtin_alloca (tree arglist, rtx target)
4179 {
4180 rtx op0;
4181 rtx result;
4182
4183 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4184 return 0;
4185
4186 /* Compute the argument. */
4187 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
4188
4189 /* Allocate the desired space. */
4190 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4191
4192 #ifdef POINTERS_EXTEND_UNSIGNED
4193 if (GET_MODE (result) != ptr_mode)
4194 result = convert_memory_address (ptr_mode, result);
4195 #endif
4196
4197 return result;
4198 }
4199
4200 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4201 Return 0 if a normal call should be emitted rather than expanding the
4202 function in-line. If convenient, the result should be placed in TARGET.
4203 SUBTARGET may be used as the target for computing one of EXP's operands. */
4204
4205 static rtx
4206 expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
4207 rtx subtarget, optab op_optab)
4208 {
4209 rtx op0;
4210 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4211 return 0;
4212
4213 /* Compute the argument. */
4214 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4215 /* Compute op, into TARGET if possible.
4216 Set TARGET to wherever the result comes back. */
4217 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4218 op_optab, op0, target, 1);
4219 if (target == 0)
4220 abort ();
4221
4222 return convert_to_mode (target_mode, target, 0);
4223 }
4224
4225 /* If the string passed to fputs is a constant and is one character
4226 long, we attempt to transform this call into __builtin_fputc(). */
4227
4228 static rtx
4229 expand_builtin_fputs (tree arglist, int ignore, int unlocked)
4230 {
4231 tree len, fn;
4232 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
4233 : implicit_built_in_decls[BUILT_IN_FPUTC];
4234 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
4235 : implicit_built_in_decls[BUILT_IN_FWRITE];
4236
4237 /* If the return value is used, or the replacement _DECL isn't
4238 initialized, don't do the transformation. */
4239 if (!ignore || !fn_fputc || !fn_fwrite)
4240 return 0;
4241
4242 /* Verify the arguments in the original call. */
4243 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4244 return 0;
4245
4246 /* Get the length of the string passed to fputs. If the length
4247 can't be determined, punt. */
4248 if (!(len = c_strlen (TREE_VALUE (arglist), 1))
4249 || TREE_CODE (len) != INTEGER_CST)
4250 return 0;
4251
4252 switch (compare_tree_int (len, 1))
4253 {
4254 case -1: /* length is 0, delete the call entirely . */
4255 {
4256 /* Evaluate and ignore the argument in case it has
4257 side-effects. */
4258 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
4259 VOIDmode, EXPAND_NORMAL);
4260 return const0_rtx;
4261 }
4262 case 0: /* length is 1, call fputc. */
4263 {
4264 const char *p = c_getstr (TREE_VALUE (arglist));
4265
4266 if (p != NULL)
4267 {
4268 /* New argument list transforming fputs(string, stream) to
4269 fputc(string[0], stream). */
4270 arglist =
4271 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4272 arglist =
4273 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
4274 fn = fn_fputc;
4275 break;
4276 }
4277 }
4278 /* FALLTHROUGH */
4279 case 1: /* length is greater than 1, call fwrite. */
4280 {
4281 tree string_arg;
4282
4283 /* If optimizing for size keep fputs. */
4284 if (optimize_size)
4285 return 0;
4286 string_arg = TREE_VALUE (arglist);
4287 /* New argument list transforming fputs(string, stream) to
4288 fwrite(string, 1, len, stream). */
4289 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4290 arglist = tree_cons (NULL_TREE, len, arglist);
4291 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
4292 arglist = tree_cons (NULL_TREE, string_arg, arglist);
4293 fn = fn_fwrite;
4294 break;
4295 }
4296 default:
4297 abort ();
4298 }
4299
4300 return expand_expr (build_function_call_expr (fn, arglist),
4301 (ignore ? const0_rtx : NULL_RTX),
4302 VOIDmode, EXPAND_NORMAL);
4303 }
4304
4305 /* Expand a call to __builtin_expect. We return our argument and emit a
4306 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4307 a non-jump context. */
4308
4309 static rtx
4310 expand_builtin_expect (tree arglist, rtx target)
4311 {
4312 tree exp, c;
4313 rtx note, rtx_c;
4314
4315 if (arglist == NULL_TREE
4316 || TREE_CHAIN (arglist) == NULL_TREE)
4317 return const0_rtx;
4318 exp = TREE_VALUE (arglist);
4319 c = TREE_VALUE (TREE_CHAIN (arglist));
4320
4321 if (TREE_CODE (c) != INTEGER_CST)
4322 {
4323 error ("second arg to `__builtin_expect' must be a constant");
4324 c = integer_zero_node;
4325 }
4326
4327 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4328
4329 /* Don't bother with expected value notes for integral constants. */
4330 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4331 {
4332 /* We do need to force this into a register so that we can be
4333 moderately sure to be able to correctly interpret the branch
4334 condition later. */
4335 target = force_reg (GET_MODE (target), target);
4336
4337 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4338
4339 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
4340 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4341 }
4342
4343 return target;
4344 }
4345
4346 /* Like expand_builtin_expect, except do this in a jump context. This is
4347 called from do_jump if the conditional is a __builtin_expect. Return either
4348 a list of insns to emit the jump or NULL if we cannot optimize
4349 __builtin_expect. We need to optimize this at jump time so that machines
4350 like the PowerPC don't turn the test into a SCC operation, and then jump
4351 based on the test being 0/1. */
4352
4353 rtx
4354 expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
4355 {
4356 tree arglist = TREE_OPERAND (exp, 1);
4357 tree arg0 = TREE_VALUE (arglist);
4358 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4359 rtx ret = NULL_RTX;
4360
4361 /* Only handle __builtin_expect (test, 0) and
4362 __builtin_expect (test, 1). */
4363 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4364 && (integer_zerop (arg1) || integer_onep (arg1)))
4365 {
4366 int num_jumps = 0;
4367 rtx insn;
4368
4369 /* If we fail to locate an appropriate conditional jump, we'll
4370 fall back to normal evaluation. Ensure that the expression
4371 can be re-evaluated. */
4372 switch (unsafe_for_reeval (arg0))
4373 {
4374 case 0: /* Safe. */
4375 break;
4376
4377 case 1: /* Mildly unsafe. */
4378 arg0 = unsave_expr (arg0);
4379 break;
4380
4381 case 2: /* Wildly unsafe. */
4382 return NULL_RTX;
4383 }
4384
4385 /* Expand the jump insns. */
4386 start_sequence ();
4387 do_jump (arg0, if_false_label, if_true_label);
4388 ret = get_insns ();
4389 end_sequence ();
4390
4391 /* Now that the __builtin_expect has been validated, go through and add
4392 the expect's to each of the conditional jumps. If we run into an
4393 error, just give up and generate the 'safe' code of doing a SCC
4394 operation and then doing a branch on that. */
4395 insn = ret;
4396 while (insn != NULL_RTX)
4397 {
4398 rtx next = NEXT_INSN (insn);
4399
4400 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
4401 {
4402 rtx ifelse = SET_SRC (pc_set (insn));
4403 rtx label;
4404 int taken;
4405
4406 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4407 {
4408 taken = 1;
4409 label = XEXP (XEXP (ifelse, 1), 0);
4410 }
4411 /* An inverted jump reverses the probabilities. */
4412 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4413 {
4414 taken = 0;
4415 label = XEXP (XEXP (ifelse, 2), 0);
4416 }
4417 /* We shouldn't have to worry about conditional returns during
4418 the expansion stage, but handle it gracefully anyway. */
4419 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4420 {
4421 taken = 1;
4422 label = NULL_RTX;
4423 }
4424 /* An inverted return reverses the probabilities. */
4425 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4426 {
4427 taken = 0;
4428 label = NULL_RTX;
4429 }
4430 else
4431 goto do_next_insn;
4432
4433 /* If the test is expected to fail, reverse the
4434 probabilities. */
4435 if (integer_zerop (arg1))
4436 taken = 1 - taken;
4437
4438 /* If we are jumping to the false label, reverse the
4439 probabilities. */
4440 if (label == NULL_RTX)
4441 ; /* conditional return */
4442 else if (label == if_false_label)
4443 taken = 1 - taken;
4444 else if (label != if_true_label)
4445 goto do_next_insn;
4446
4447 num_jumps++;
4448 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4449 }
4450
4451 do_next_insn:
4452 insn = next;
4453 }
4454
4455 /* If no jumps were modified, fail and do __builtin_expect the normal
4456 way. */
4457 if (num_jumps == 0)
4458 ret = NULL_RTX;
4459 }
4460
4461 return ret;
4462 }
4463
4464 void
4465 expand_builtin_trap (void)
4466 {
4467 #ifdef HAVE_trap
4468 if (HAVE_trap)
4469 emit_insn (gen_trap ());
4470 else
4471 #endif
4472 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4473 emit_barrier ();
4474 }
4475
4476 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4477 Return 0 if a normal call should be emitted rather than expanding
4478 the function inline. If convenient, the result should be placed
4479 in TARGET. SUBTARGET may be used as the target for computing
4480 the operand. */
4481
4482 static rtx
4483 expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
4484 {
4485 enum machine_mode mode;
4486 tree arg;
4487 rtx op0;
4488
4489 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4490 return 0;
4491
4492 arg = TREE_VALUE (arglist);
4493 mode = TYPE_MODE (TREE_TYPE (arg));
4494 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4495 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4496 }
4497
4498 /* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
4499 Return 0 if a normal call should be emitted rather than expanding
4500 the function inline. If convenient, the result should be placed
4501 in target. */
4502
4503 static rtx
4504 expand_builtin_cabs (tree arglist, rtx target)
4505 {
4506 enum machine_mode mode;
4507 tree arg;
4508 rtx op0;
4509
4510 if (arglist == 0 || TREE_CHAIN (arglist))
4511 return 0;
4512 arg = TREE_VALUE (arglist);
4513 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
4514 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
4515 return 0;
4516
4517 mode = TYPE_MODE (TREE_TYPE (arg));
4518 op0 = expand_expr (arg, NULL_RTX, VOIDmode, 0);
4519 return expand_complex_abs (mode, op0, target, 0);
4520 }
4521
4522 /* Expand a call to sprintf with argument list ARGLIST. Return 0 if
4523 a normal call should be emitted rather than expanding the function
4524 inline. If convenient, the result should be placed in TARGET with
4525 mode MODE. */
4526
4527 static rtx
4528 expand_builtin_sprintf (tree arglist, rtx target, enum machine_mode mode)
4529 {
4530 tree orig_arglist, dest, fmt;
4531 const char *fmt_str;
4532
4533 orig_arglist = arglist;
4534
4535 /* Verify the required arguments in the original call. */
4536 if (! arglist)
4537 return 0;
4538 dest = TREE_VALUE (arglist);
4539 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4540 return 0;
4541 arglist = TREE_CHAIN (arglist);
4542 if (! arglist)
4543 return 0;
4544 fmt = TREE_VALUE (arglist);
4545 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4546 return 0;
4547 arglist = TREE_CHAIN (arglist);
4548
4549 /* Check whether the format is a literal string constant. */
4550 fmt_str = c_getstr (fmt);
4551 if (fmt_str == NULL)
4552 return 0;
4553
4554 /* If the format doesn't contain % args or %%, use strcpy. */
4555 if (strchr (fmt_str, '%') == 0)
4556 {
4557 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4558 tree exp;
4559
4560 if (arglist || ! fn)
4561 return 0;
4562 expand_expr (build_function_call_expr (fn, orig_arglist),
4563 const0_rtx, VOIDmode, EXPAND_NORMAL);
4564 if (target == const0_rtx)
4565 return const0_rtx;
4566 exp = build_int_2 (strlen (fmt_str), 0);
4567 exp = fold (build1 (NOP_EXPR, integer_type_node, exp));
4568 return expand_expr (exp, target, mode, EXPAND_NORMAL);
4569 }
4570 /* If the format is "%s", use strcpy if the result isn't used. */
4571 else if (strcmp (fmt_str, "%s") == 0)
4572 {
4573 tree fn, arg, len;
4574 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4575
4576 if (! fn)
4577 return 0;
4578
4579 if (! arglist || TREE_CHAIN (arglist))
4580 return 0;
4581 arg = TREE_VALUE (arglist);
4582 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
4583 return 0;
4584
4585 if (target != const0_rtx)
4586 {
4587 len = c_strlen (arg, 1);
4588 if (! len || TREE_CODE (len) != INTEGER_CST)
4589 return 0;
4590 }
4591 else
4592 len = NULL_TREE;
4593
4594 arglist = build_tree_list (NULL_TREE, arg);
4595 arglist = tree_cons (NULL_TREE, dest, arglist);
4596 expand_expr (build_function_call_expr (fn, arglist),
4597 const0_rtx, VOIDmode, EXPAND_NORMAL);
4598
4599 if (target == const0_rtx)
4600 return const0_rtx;
4601 return expand_expr (len, target, mode, EXPAND_NORMAL);
4602 }
4603
4604 return 0;
4605 }
4606 \f
4607 /* Expand an expression EXP that calls a built-in function,
4608 with result going to TARGET if that's convenient
4609 (and in mode MODE if that's convenient).
4610 SUBTARGET may be used as the target for computing one of EXP's operands.
4611 IGNORE is nonzero if the value is to be ignored. */
4612
4613 rtx
4614 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
4615 int ignore)
4616 {
4617 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4618 tree arglist = TREE_OPERAND (exp, 1);
4619 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4620 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4621
4622 /* Perform postincrements before expanding builtin functions.  */
4623 emit_queue ();
4624
4625 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4626 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4627
4628 /* When not optimizing, generate calls to library functions for a certain
4629 set of builtins. */
4630 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4631 switch (fcode)
4632 {
4633 case BUILT_IN_SQRT:
4634 case BUILT_IN_SQRTF:
4635 case BUILT_IN_SQRTL:
4636 case BUILT_IN_SIN:
4637 case BUILT_IN_SINF:
4638 case BUILT_IN_SINL:
4639 case BUILT_IN_COS:
4640 case BUILT_IN_COSF:
4641 case BUILT_IN_COSL:
4642 case BUILT_IN_EXP:
4643 case BUILT_IN_EXPF:
4644 case BUILT_IN_EXPL:
4645 case BUILT_IN_LOG:
4646 case BUILT_IN_LOGF:
4647 case BUILT_IN_LOGL:
4648 case BUILT_IN_TAN:
4649 case BUILT_IN_TANF:
4650 case BUILT_IN_TANL:
4651 case BUILT_IN_ATAN:
4652 case BUILT_IN_ATANF:
4653 case BUILT_IN_ATANL:
4654 case BUILT_IN_POW:
4655 case BUILT_IN_POWF:
4656 case BUILT_IN_POWL:
4657 case BUILT_IN_ATAN2:
4658 case BUILT_IN_ATAN2F:
4659 case BUILT_IN_ATAN2L:
4660 case BUILT_IN_MEMSET:
4661 case BUILT_IN_MEMCPY:
4662 case BUILT_IN_MEMCMP:
4663 case BUILT_IN_MEMPCPY:
4664 case BUILT_IN_MEMMOVE:
4665 case BUILT_IN_BCMP:
4666 case BUILT_IN_BZERO:
4667 case BUILT_IN_BCOPY:
4668 case BUILT_IN_INDEX:
4669 case BUILT_IN_RINDEX:
4670 case BUILT_IN_SPRINTF:
4671 case BUILT_IN_STPCPY:
4672 case BUILT_IN_STRCHR:
4673 case BUILT_IN_STRRCHR:
4674 case BUILT_IN_STRLEN:
4675 case BUILT_IN_STRCPY:
4676 case BUILT_IN_STRNCPY:
4677 case BUILT_IN_STRNCMP:
4678 case BUILT_IN_STRSTR:
4679 case BUILT_IN_STRPBRK:
4680 case BUILT_IN_STRCAT:
4681 case BUILT_IN_STRNCAT:
4682 case BUILT_IN_STRSPN:
4683 case BUILT_IN_STRCSPN:
4684 case BUILT_IN_STRCMP:
4685 case BUILT_IN_FFS:
4686 case BUILT_IN_PUTCHAR:
4687 case BUILT_IN_PUTS:
4688 case BUILT_IN_PRINTF:
4689 case BUILT_IN_FPUTC:
4690 case BUILT_IN_FPUTS:
4691 case BUILT_IN_FWRITE:
4692 case BUILT_IN_PUTCHAR_UNLOCKED:
4693 case BUILT_IN_PUTS_UNLOCKED:
4694 case BUILT_IN_PRINTF_UNLOCKED:
4695 case BUILT_IN_FPUTC_UNLOCKED:
4696 case BUILT_IN_FPUTS_UNLOCKED:
4697 case BUILT_IN_FWRITE_UNLOCKED:
4698 case BUILT_IN_FLOOR:
4699 case BUILT_IN_FLOORF:
4700 case BUILT_IN_FLOORL:
4701 case BUILT_IN_CEIL:
4702 case BUILT_IN_CEILF:
4703 case BUILT_IN_CEILL:
4704 case BUILT_IN_TRUNC:
4705 case BUILT_IN_TRUNCF:
4706 case BUILT_IN_TRUNCL:
4707 case BUILT_IN_ROUND:
4708 case BUILT_IN_ROUNDF:
4709 case BUILT_IN_ROUNDL:
4710 case BUILT_IN_NEARBYINT:
4711 case BUILT_IN_NEARBYINTF:
4712 case BUILT_IN_NEARBYINTL:
4713 return expand_call (exp, target, ignore);
4714
4715 default:
4716 break;
4717 }
4718
4719 /* The built-in function expanders test for target == const0_rtx
4720 to determine whether the function's result will be ignored. */
4721 if (ignore)
4722 target = const0_rtx;
4723
4724 /* If the result of a pure or const built-in function is ignored, and
4725 none of its arguments are volatile, we can avoid expanding the
4726 built-in call and just evaluate the arguments for side-effects. */
4727 if (target == const0_rtx
4728 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4729 {
4730 bool volatilep = false;
4731 tree arg;
4732
4733 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4734 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4735 {
4736 volatilep = true;
4737 break;
4738 }
4739
4740 if (! volatilep)
4741 {
4742 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4743 expand_expr (TREE_VALUE (arg), const0_rtx,
4744 VOIDmode, EXPAND_NORMAL);
4745 return const0_rtx;
4746 }
4747 }
4748
4749 switch (fcode)
4750 {
4751 case BUILT_IN_ABS:
4752 case BUILT_IN_LABS:
4753 case BUILT_IN_LLABS:
4754 case BUILT_IN_IMAXABS:
4755 /* build_function_call changes these into ABS_EXPR. */
4756 abort ();
4757
4758 case BUILT_IN_FABS:
4759 case BUILT_IN_FABSF:
4760 case BUILT_IN_FABSL:
4761 target = expand_builtin_fabs (arglist, target, subtarget);
4762 if (target)
4763 return target;
4764 break;
4765
4766 case BUILT_IN_CABS:
4767 case BUILT_IN_CABSF:
4768 case BUILT_IN_CABSL:
4769 if (flag_unsafe_math_optimizations)
4770 {
4771 target = expand_builtin_cabs (arglist, target);
4772 if (target)
4773 return target;
4774 }
4775 break;
4776
4777 case BUILT_IN_CONJ:
4778 case BUILT_IN_CONJF:
4779 case BUILT_IN_CONJL:
4780 case BUILT_IN_CREAL:
4781 case BUILT_IN_CREALF:
4782 case BUILT_IN_CREALL:
4783 case BUILT_IN_CIMAG:
4784 case BUILT_IN_CIMAGF:
4785 case BUILT_IN_CIMAGL:
4786 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4787 and IMAGPART_EXPR. */
4788 abort ();
4789
4790 case BUILT_IN_SIN:
4791 case BUILT_IN_SINF:
4792 case BUILT_IN_SINL:
4793 case BUILT_IN_COS:
4794 case BUILT_IN_COSF:
4795 case BUILT_IN_COSL:
4796 case BUILT_IN_EXP:
4797 case BUILT_IN_EXPF:
4798 case BUILT_IN_EXPL:
4799 case BUILT_IN_LOG:
4800 case BUILT_IN_LOGF:
4801 case BUILT_IN_LOGL:
4802 case BUILT_IN_TAN:
4803 case BUILT_IN_TANF:
4804 case BUILT_IN_TANL:
4805 case BUILT_IN_ATAN:
4806 case BUILT_IN_ATANF:
4807 case BUILT_IN_ATANL:
4808 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4809 because of possible accuracy problems. */
4810 if (! flag_unsafe_math_optimizations)
4811 break;
4812 case BUILT_IN_SQRT:
4813 case BUILT_IN_SQRTF:
4814 case BUILT_IN_SQRTL:
4815 case BUILT_IN_FLOOR:
4816 case BUILT_IN_FLOORF:
4817 case BUILT_IN_FLOORL:
4818 case BUILT_IN_CEIL:
4819 case BUILT_IN_CEILF:
4820 case BUILT_IN_CEILL:
4821 case BUILT_IN_TRUNC:
4822 case BUILT_IN_TRUNCF:
4823 case BUILT_IN_TRUNCL:
4824 case BUILT_IN_ROUND:
4825 case BUILT_IN_ROUNDF:
4826 case BUILT_IN_ROUNDL:
4827 case BUILT_IN_NEARBYINT:
4828 case BUILT_IN_NEARBYINTF:
4829 case BUILT_IN_NEARBYINTL:
4830 target = expand_builtin_mathfn (exp, target, subtarget);
4831 if (target)
4832 return target;
4833 break;
4834
4835 case BUILT_IN_POW:
4836 case BUILT_IN_POWF:
4837 case BUILT_IN_POWL:
4838 if (! flag_unsafe_math_optimizations)
4839 break;
4840 target = expand_builtin_pow (exp, target, subtarget);
4841 if (target)
4842 return target;
4843 break;
4844
4845 case BUILT_IN_ATAN2:
4846 case BUILT_IN_ATAN2F:
4847 case BUILT_IN_ATAN2L:
4848 if (! flag_unsafe_math_optimizations)
4849 break;
4850 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4851 if (target)
4852 return target;
4853 break;
4854
4855 case BUILT_IN_APPLY_ARGS:
4856 return expand_builtin_apply_args ();
4857
4858 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4859 FUNCTION with a copy of the parameters described by
4860 ARGUMENTS, and ARGSIZE. It returns a block of memory
4861 allocated on the stack into which is stored all the registers
4862 that might possibly be used for returning the result of a
4863 function. ARGUMENTS is the value returned by
4864 __builtin_apply_args. ARGSIZE is the number of bytes of
4865 arguments that must be copied. ??? How should this value be
4866 computed? We'll also need a safe worst case value for varargs
4867 functions. */
4868 case BUILT_IN_APPLY:
4869 if (!validate_arglist (arglist, POINTER_TYPE,
4870 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4871 && !validate_arglist (arglist, REFERENCE_TYPE,
4872 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4873 return const0_rtx;
4874 else
4875 {
4876 int i;
4877 tree t;
4878 rtx ops[3];
4879
4880 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4881 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4882
4883 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4884 }
4885
4886 /* __builtin_return (RESULT) causes the function to return the
4887 value described by RESULT. RESULT is address of the block of
4888 memory returned by __builtin_apply. */
4889 case BUILT_IN_RETURN:
4890 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4891 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4892 NULL_RTX, VOIDmode, 0));
4893 return const0_rtx;
4894
4895 case BUILT_IN_SAVEREGS:
4896 return expand_builtin_saveregs ();
4897
4898 case BUILT_IN_ARGS_INFO:
4899 return expand_builtin_args_info (arglist);
4900
4901 /* Return the address of the first anonymous stack arg. */
4902 case BUILT_IN_NEXT_ARG:
4903 return expand_builtin_next_arg (arglist);
4904
4905 case BUILT_IN_CLASSIFY_TYPE:
4906 return expand_builtin_classify_type (arglist);
4907
4908 case BUILT_IN_CONSTANT_P:
4909 return expand_builtin_constant_p (arglist, target_mode);
4910
4911 case BUILT_IN_FRAME_ADDRESS:
4912 case BUILT_IN_RETURN_ADDRESS:
4913 return expand_builtin_frame_address (fndecl, arglist);
4914
4915 /* Returns the address of the area where the structure is returned.
4916 0 otherwise. */
4917 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4918 if (arglist != 0
4919 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4920 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4921 return const0_rtx;
4922 else
4923 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4924
4925 case BUILT_IN_ALLOCA:
4926 target = expand_builtin_alloca (arglist, target);
4927 if (target)
4928 return target;
4929 break;
4930
4931 case BUILT_IN_FFS:
4932 case BUILT_IN_FFSL:
4933 case BUILT_IN_FFSLL:
4934 target = expand_builtin_unop (target_mode, arglist, target,
4935 subtarget, ffs_optab);
4936 if (target)
4937 return target;
4938 break;
4939
4940 case BUILT_IN_CLZ:
4941 case BUILT_IN_CLZL:
4942 case BUILT_IN_CLZLL:
4943 target = expand_builtin_unop (target_mode, arglist, target,
4944 subtarget, clz_optab);
4945 if (target)
4946 return target;
4947 break;
4948
4949 case BUILT_IN_CTZ:
4950 case BUILT_IN_CTZL:
4951 case BUILT_IN_CTZLL:
4952 target = expand_builtin_unop (target_mode, arglist, target,
4953 subtarget, ctz_optab);
4954 if (target)
4955 return target;
4956 break;
4957
4958 case BUILT_IN_POPCOUNT:
4959 case BUILT_IN_POPCOUNTL:
4960 case BUILT_IN_POPCOUNTLL:
4961 target = expand_builtin_unop (target_mode, arglist, target,
4962 subtarget, popcount_optab);
4963 if (target)
4964 return target;
4965 break;
4966
4967 case BUILT_IN_PARITY:
4968 case BUILT_IN_PARITYL:
4969 case BUILT_IN_PARITYLL:
4970 target = expand_builtin_unop (target_mode, arglist, target,
4971 subtarget, parity_optab);
4972 if (target)
4973 return target;
4974 break;
4975
4976 case BUILT_IN_STRLEN:
4977 target = expand_builtin_strlen (arglist, target, target_mode);
4978 if (target)
4979 return target;
4980 break;
4981
4982 case BUILT_IN_STRCPY:
4983 target = expand_builtin_strcpy (arglist, target, mode);
4984 if (target)
4985 return target;
4986 break;
4987
4988 case BUILT_IN_STRNCPY:
4989 target = expand_builtin_strncpy (arglist, target, mode);
4990 if (target)
4991 return target;
4992 break;
4993
4994 case BUILT_IN_STPCPY:
4995 target = expand_builtin_stpcpy (arglist, target, mode);
4996 if (target)
4997 return target;
4998 break;
4999
5000 case BUILT_IN_STRCAT:
5001 target = expand_builtin_strcat (arglist, target, mode);
5002 if (target)
5003 return target;
5004 break;
5005
5006 case BUILT_IN_STRNCAT:
5007 target = expand_builtin_strncat (arglist, target, mode);
5008 if (target)
5009 return target;
5010 break;
5011
5012 case BUILT_IN_STRSPN:
5013 target = expand_builtin_strspn (arglist, target, mode);
5014 if (target)
5015 return target;
5016 break;
5017
5018 case BUILT_IN_STRCSPN:
5019 target = expand_builtin_strcspn (arglist, target, mode);
5020 if (target)
5021 return target;
5022 break;
5023
5024 case BUILT_IN_STRSTR:
5025 target = expand_builtin_strstr (arglist, target, mode);
5026 if (target)
5027 return target;
5028 break;
5029
5030 case BUILT_IN_STRPBRK:
5031 target = expand_builtin_strpbrk (arglist, target, mode);
5032 if (target)
5033 return target;
5034 break;
5035
5036 case BUILT_IN_INDEX:
5037 case BUILT_IN_STRCHR:
5038 target = expand_builtin_strchr (arglist, target, mode);
5039 if (target)
5040 return target;
5041 break;
5042
5043 case BUILT_IN_RINDEX:
5044 case BUILT_IN_STRRCHR:
5045 target = expand_builtin_strrchr (arglist, target, mode);
5046 if (target)
5047 return target;
5048 break;
5049
5050 case BUILT_IN_MEMCPY:
5051 target = expand_builtin_memcpy (arglist, target, mode);
5052 if (target)
5053 return target;
5054 break;
5055
5056 case BUILT_IN_MEMPCPY:
5057 target = expand_builtin_mempcpy (arglist, target, mode, /*endp=*/ 1);
5058 if (target)
5059 return target;
5060 break;
5061
5062 case BUILT_IN_MEMMOVE:
5063 target = expand_builtin_memmove (arglist, target, mode);
5064 if (target)
5065 return target;
5066 break;
5067
5068 case BUILT_IN_BCOPY:
5069 target = expand_builtin_bcopy (arglist);
5070 if (target)
5071 return target;
5072 break;
5073
5074 case BUILT_IN_MEMSET:
5075 target = expand_builtin_memset (arglist, target, mode);
5076 if (target)
5077 return target;
5078 break;
5079
5080 case BUILT_IN_BZERO:
5081 target = expand_builtin_bzero (arglist);
5082 if (target)
5083 return target;
5084 break;
5085
5086 case BUILT_IN_STRCMP:
5087 target = expand_builtin_strcmp (exp, target, mode);
5088 if (target)
5089 return target;
5090 break;
5091
5092 case BUILT_IN_STRNCMP:
5093 target = expand_builtin_strncmp (exp, target, mode);
5094 if (target)
5095 return target;
5096 break;
5097
5098 case BUILT_IN_BCMP:
5099 case BUILT_IN_MEMCMP:
5100 target = expand_builtin_memcmp (exp, arglist, target, mode);
5101 if (target)
5102 return target;
5103 break;
5104
5105 case BUILT_IN_SETJMP:
5106 target = expand_builtin_setjmp (arglist, target);
5107 if (target)
5108 return target;
5109 break;
5110
5111 /* __builtin_longjmp is passed a pointer to an array of five words.
5112 It's similar to the C library longjmp function but works with
5113 __builtin_setjmp above. */
5114 case BUILT_IN_LONGJMP:
5115 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5116 break;
5117 else
5118 {
5119 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
5120 VOIDmode, 0);
5121 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
5122 NULL_RTX, VOIDmode, 0);
5123
5124 if (value != const1_rtx)
5125 {
5126 error ("__builtin_longjmp second argument must be 1");
5127 return const0_rtx;
5128 }
5129
5130 expand_builtin_longjmp (buf_addr, value);
5131 return const0_rtx;
5132 }
5133
5134 case BUILT_IN_TRAP:
5135 expand_builtin_trap ();
5136 return const0_rtx;
5137
5138 case BUILT_IN_FPUTS:
5139 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
5140 if (target)
5141 return target;
5142 break;
5143 case BUILT_IN_FPUTS_UNLOCKED:
5144 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
5145 if (target)
5146 return target;
5147 break;
5148
5149 case BUILT_IN_SPRINTF:
5150 target = expand_builtin_sprintf (arglist, target, mode);
5151 if (target)
5152 return target;
5153 break;
5154
5155 /* Various hooks for the DWARF 2 __throw routine. */
5156 case BUILT_IN_UNWIND_INIT:
5157 expand_builtin_unwind_init ();
5158 return const0_rtx;
5159 case BUILT_IN_DWARF_CFA:
5160 return virtual_cfa_rtx;
5161 #ifdef DWARF2_UNWIND_INFO
5162 case BUILT_IN_DWARF_SP_COLUMN:
5163 return expand_builtin_dwarf_sp_column ();
5164 case BUILT_IN_INIT_DWARF_REG_SIZES:
5165 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
5166 return const0_rtx;
5167 #endif
5168 case BUILT_IN_FROB_RETURN_ADDR:
5169 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
5170 case BUILT_IN_EXTRACT_RETURN_ADDR:
5171 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
5172 case BUILT_IN_EH_RETURN:
5173 expand_builtin_eh_return (TREE_VALUE (arglist),
5174 TREE_VALUE (TREE_CHAIN (arglist)));
5175 return const0_rtx;
5176 #ifdef EH_RETURN_DATA_REGNO
5177 case BUILT_IN_EH_RETURN_DATA_REGNO:
5178 return expand_builtin_eh_return_data_regno (arglist);
5179 #endif
5180 case BUILT_IN_VA_START:
5181 case BUILT_IN_STDARG_START:
5182 return expand_builtin_va_start (arglist);
5183 case BUILT_IN_VA_END:
5184 return expand_builtin_va_end (arglist);
5185 case BUILT_IN_VA_COPY:
5186 return expand_builtin_va_copy (arglist);
5187 case BUILT_IN_EXPECT:
5188 return expand_builtin_expect (arglist, target);
5189 case BUILT_IN_PREFETCH:
5190 expand_builtin_prefetch (arglist);
5191 return const0_rtx;
5192
5193
5194 default: /* just do library call, if unknown builtin */
5195 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
5196 error ("built-in function `%s' not currently supported",
5197 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5198 }
5199
5200 /* The switch statement above can drop through to cause the function
5201 to be called normally. */
5202 return expand_call (exp, target, ignore);
5203 }
5204
5205 /* Determine whether a tree node represents a call to a built-in
5206 math function. If the tree T is a call to a built-in function
5207 taking a single real argument, then the return value is the
5208 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
5209 the return value is END_BUILTINS. */
5210
5211 enum built_in_function
5212 builtin_mathfn_code (tree t)
5213 {
5214 tree fndecl, arglist;
5215
5216 if (TREE_CODE (t) != CALL_EXPR
5217 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
5218 return END_BUILTINS;
5219
5220 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
5221 if (TREE_CODE (fndecl) != FUNCTION_DECL
5222 || ! DECL_BUILT_IN (fndecl)
5223 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5224 return END_BUILTINS;
5225
5226 arglist = TREE_OPERAND (t, 1);
5227 if (! arglist
5228 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5229 return END_BUILTINS;
5230
5231 arglist = TREE_CHAIN (arglist);
5232 switch (DECL_FUNCTION_CODE (fndecl))
5233 {
5234 case BUILT_IN_POW:
5235 case BUILT_IN_POWF:
5236 case BUILT_IN_POWL:
5237 case BUILT_IN_ATAN2:
5238 case BUILT_IN_ATAN2F:
5239 case BUILT_IN_ATAN2L:
5240 if (! arglist
5241 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
5242 || TREE_CHAIN (arglist))
5243 return END_BUILTINS;
5244 break;
5245
5246 default:
5247 if (arglist)
5248 return END_BUILTINS;
5249 break;
5250 }
5251
5252 return DECL_FUNCTION_CODE (fndecl);
5253 }
5254
5255 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
5256 constant. ARGLIST is the argument list of the call. */
5257
5258 static tree
5259 fold_builtin_constant_p (tree arglist)
5260 {
5261 if (arglist == 0)
5262 return 0;
5263
5264 arglist = TREE_VALUE (arglist);
5265
5266 /* We return 1 for a numeric type that's known to be a constant
5267 value at compile-time or for an aggregate type that's a
5268 literal constant. */
5269 STRIP_NOPS (arglist);
5270
5271 /* If we know this is a constant, emit the constant of one. */
5272 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
5273 || (TREE_CODE (arglist) == CONSTRUCTOR
5274 && TREE_CONSTANT (arglist))
5275 || (TREE_CODE (arglist) == ADDR_EXPR
5276 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
5277 return integer_one_node;
5278
5279 /* If we aren't going to be running CSE or this expression
5280 has side effects, show we don't know it to be a constant.
5281 Likewise if it's a pointer or aggregate type since in those
5282 case we only want literals, since those are only optimized
5283 when generating RTL, not later.
5284 And finally, if we are compiling an initializer, not code, we
5285 need to return a definite result now; there's not going to be any
5286 more optimization done. */
5287 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
5288 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
5289 || POINTER_TYPE_P (TREE_TYPE (arglist))
5290 || cfun == 0)
5291 return integer_zero_node;
5292
5293 return 0;
5294 }
5295
5296 /* Fold a call to __builtin_classify_type. */
5297
5298 static tree
5299 fold_builtin_classify_type (tree arglist)
5300 {
5301 if (arglist == 0)
5302 return build_int_2 (no_type_class, 0);
5303
5304 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
5305 }
5306
5307 /* Fold a call to __builtin_inf or __builtin_huge_val. */
5308
5309 static tree
5310 fold_builtin_inf (tree type, int warn)
5311 {
5312 REAL_VALUE_TYPE real;
5313
5314 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
5315 warning ("target format does not support infinity");
5316
5317 real_inf (&real);
5318 return build_real (type, real);
5319 }
5320
5321 /* Fold a call to __builtin_nan or __builtin_nans. */
5322
5323 static tree
5324 fold_builtin_nan (tree arglist, tree type, int quiet)
5325 {
5326 REAL_VALUE_TYPE real;
5327 const char *str;
5328
5329 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5330 return 0;
5331 str = c_getstr (TREE_VALUE (arglist));
5332 if (!str)
5333 return 0;
5334
5335 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
5336 return 0;
5337
5338 return build_real (type, real);
5339 }
5340
5341 /* EXP is assumed to me builtin call where truncation can be propagated
5342 across (for instance floor((double)f) == (double)floorf (f).
5343 Do the transformation. */
5344 static tree
5345 fold_trunc_transparent_mathfn (tree exp)
5346 {
5347 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5348 tree arglist = TREE_OPERAND (exp, 1);
5349 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5350
5351 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5352 {
5353 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
5354 tree ftype = TREE_TYPE (exp);
5355 tree newtype = TREE_TYPE (arg0);
5356 tree decl;
5357
5358 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
5359 && (decl = mathfn_built_in (newtype, fcode)))
5360 {
5361 arglist =
5362 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
5363 return convert (ftype,
5364 build_function_call_expr (decl, arglist));
5365 }
5366 }
5367 return 0;
5368 }
5369
5370 /* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
5371 function's DECL, ARGLIST is the argument list and TYPE is the return
5372 type. Return NULL_TREE if no simplification can be made. */
5373
5374 static tree
5375 fold_builtin_cabs (tree fndecl, tree arglist, tree type)
5376 {
5377 tree arg;
5378
5379 if (!arglist || TREE_CHAIN (arglist))
5380 return NULL_TREE;
5381
5382 arg = TREE_VALUE (arglist);
5383 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
5384 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
5385 return NULL_TREE;
5386
5387 /* Evaluate cabs of a constant at compile-time. */
5388 if (flag_unsafe_math_optimizations
5389 && TREE_CODE (arg) == COMPLEX_CST
5390 && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
5391 && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
5392 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
5393 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
5394 {
5395 REAL_VALUE_TYPE r, i;
5396
5397 r = TREE_REAL_CST (TREE_REALPART (arg));
5398 i = TREE_REAL_CST (TREE_IMAGPART (arg));
5399
5400 real_arithmetic (&r, MULT_EXPR, &r, &r);
5401 real_arithmetic (&i, MULT_EXPR, &i, &i);
5402 real_arithmetic (&r, PLUS_EXPR, &r, &i);
5403 if (real_sqrt (&r, TYPE_MODE (type), &r)
5404 || ! flag_trapping_math)
5405 return build_real (type, r);
5406 }
5407
5408 /* If either part is zero, cabs is fabs of the other. */
5409 if (TREE_CODE (arg) == COMPLEX_EXPR
5410 && real_zerop (TREE_OPERAND (arg, 0)))
5411 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
5412 if (TREE_CODE (arg) == COMPLEX_EXPR
5413 && real_zerop (TREE_OPERAND (arg, 1)))
5414 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
5415
5416 if (flag_unsafe_math_optimizations)
5417 {
5418 enum built_in_function fcode;
5419 tree sqrtfn;
5420
5421 fcode = DECL_FUNCTION_CODE (fndecl);
5422 if (fcode == BUILT_IN_CABS)
5423 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5424 else if (fcode == BUILT_IN_CABSF)
5425 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5426 else if (fcode == BUILT_IN_CABSL)
5427 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5428 else
5429 sqrtfn = NULL_TREE;
5430
5431 if (sqrtfn != NULL_TREE)
5432 {
5433 tree rpart, ipart, result, arglist;
5434
5435 rpart = fold (build1 (REALPART_EXPR, type, arg));
5436 ipart = fold (build1 (IMAGPART_EXPR, type, arg));
5437
5438 rpart = save_expr (rpart);
5439 ipart = save_expr (ipart);
5440
5441 result = fold (build (PLUS_EXPR, type,
5442 fold (build (MULT_EXPR, type,
5443 rpart, rpart)),
5444 fold (build (MULT_EXPR, type,
5445 ipart, ipart))));
5446
5447 arglist = build_tree_list (NULL_TREE, result);
5448 return build_function_call_expr (sqrtfn, arglist);
5449 }
5450 }
5451
5452 return NULL_TREE;
5453 }
5454
5455 /* Used by constant folding to eliminate some builtin calls early. EXP is
5456 the CALL_EXPR of a call to a builtin function. */
5457
5458 tree
5459 fold_builtin (tree exp)
5460 {
5461 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5462 tree arglist = TREE_OPERAND (exp, 1);
5463 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5464
5465 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5466 return 0;
5467
5468 switch (DECL_FUNCTION_CODE (fndecl))
5469 {
5470 case BUILT_IN_CONSTANT_P:
5471 return fold_builtin_constant_p (arglist);
5472
5473 case BUILT_IN_CLASSIFY_TYPE:
5474 return fold_builtin_classify_type (arglist);
5475
5476 case BUILT_IN_STRLEN:
5477 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5478 {
5479 tree len = c_strlen (TREE_VALUE (arglist), 0);
5480 if (len)
5481 {
5482 /* Convert from the internal "sizetype" type to "size_t". */
5483 if (size_type_node)
5484 len = convert (size_type_node, len);
5485 return len;
5486 }
5487 }
5488 break;
5489
5490 case BUILT_IN_FABS:
5491 case BUILT_IN_FABSF:
5492 case BUILT_IN_FABSL:
5493 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5494 return fold (build1 (ABS_EXPR, type, TREE_VALUE (arglist)));
5495 break;
5496
5497 case BUILT_IN_CABS:
5498 case BUILT_IN_CABSF:
5499 case BUILT_IN_CABSL:
5500 return fold_builtin_cabs (fndecl, arglist, type);
5501
5502 case BUILT_IN_SQRT:
5503 case BUILT_IN_SQRTF:
5504 case BUILT_IN_SQRTL:
5505 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5506 {
5507 enum built_in_function fcode;
5508 tree arg = TREE_VALUE (arglist);
5509
5510 /* Optimize sqrt of constant value. */
5511 if (TREE_CODE (arg) == REAL_CST
5512 && ! TREE_CONSTANT_OVERFLOW (arg))
5513 {
5514 REAL_VALUE_TYPE r, x;
5515
5516 x = TREE_REAL_CST (arg);
5517 if (real_sqrt (&r, TYPE_MODE (type), &x)
5518 || (!flag_trapping_math && !flag_errno_math))
5519 return build_real (type, r);
5520 }
5521
5522 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5523 fcode = builtin_mathfn_code (arg);
5524 if (flag_unsafe_math_optimizations
5525 && (fcode == BUILT_IN_EXP
5526 || fcode == BUILT_IN_EXPF
5527 || fcode == BUILT_IN_EXPL))
5528 {
5529 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5530 arg = fold (build (MULT_EXPR, type,
5531 TREE_VALUE (TREE_OPERAND (arg, 1)),
5532 build_real (type, dconsthalf)));
5533 arglist = build_tree_list (NULL_TREE, arg);
5534 return build_function_call_expr (expfn, arglist);
5535 }
5536
5537 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5538 if (flag_unsafe_math_optimizations
5539 && (fcode == BUILT_IN_POW
5540 || fcode == BUILT_IN_POWF
5541 || fcode == BUILT_IN_POWL))
5542 {
5543 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5544 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5545 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5546 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5547 build_real (type, dconsthalf)));
5548 arglist = tree_cons (NULL_TREE, arg0,
5549 build_tree_list (NULL_TREE, narg1));
5550 return build_function_call_expr (powfn, arglist);
5551 }
5552 }
5553 break;
5554
5555 case BUILT_IN_SIN:
5556 case BUILT_IN_SINF:
5557 case BUILT_IN_SINL:
5558 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5559 {
5560 tree arg = TREE_VALUE (arglist);
5561
5562 /* Optimize sin(0.0) = 0.0. */
5563 if (real_zerop (arg))
5564 return arg;
5565 }
5566 break;
5567
5568 case BUILT_IN_COS:
5569 case BUILT_IN_COSF:
5570 case BUILT_IN_COSL:
5571 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5572 {
5573 tree arg = TREE_VALUE (arglist);
5574
5575 /* Optimize cos(0.0) = 1.0. */
5576 if (real_zerop (arg))
5577 return build_real (type, dconst1);
5578
5579 /* Optimize cos(-x) into cos(x). */
5580 if (TREE_CODE (arg) == NEGATE_EXPR)
5581 {
5582 tree arglist = build_tree_list (NULL_TREE,
5583 TREE_OPERAND (arg, 0));
5584 return build_function_call_expr (fndecl, arglist);
5585 }
5586 }
5587 break;
5588
5589 case BUILT_IN_EXP:
5590 case BUILT_IN_EXPF:
5591 case BUILT_IN_EXPL:
5592 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5593 {
5594 enum built_in_function fcode;
5595 tree arg = TREE_VALUE (arglist);
5596
5597 /* Optimize exp(0.0) = 1.0. */
5598 if (real_zerop (arg))
5599 return build_real (type, dconst1);
5600
5601 /* Optimize exp(1.0) = e. */
5602 if (real_onep (arg))
5603 {
5604 REAL_VALUE_TYPE cst;
5605
5606 if (! builtin_dconsts_init)
5607 init_builtin_dconsts ();
5608 real_convert (&cst, TYPE_MODE (type), &dconste);
5609 return build_real (type, cst);
5610 }
5611
5612 /* Attempt to evaluate exp at compile-time. */
5613 if (flag_unsafe_math_optimizations
5614 && TREE_CODE (arg) == REAL_CST
5615 && ! TREE_CONSTANT_OVERFLOW (arg))
5616 {
5617 REAL_VALUE_TYPE cint;
5618 REAL_VALUE_TYPE c;
5619 HOST_WIDE_INT n;
5620
5621 c = TREE_REAL_CST (arg);
5622 n = real_to_integer (&c);
5623 real_from_integer (&cint, VOIDmode, n,
5624 n < 0 ? -1 : 0, 0);
5625 if (real_identical (&c, &cint))
5626 {
5627 REAL_VALUE_TYPE x;
5628
5629 if (! builtin_dconsts_init)
5630 init_builtin_dconsts ();
5631 real_powi (&x, TYPE_MODE (type), &dconste, n);
5632 return build_real (type, x);
5633 }
5634 }
5635
5636 /* Optimize exp(log(x)) = x. */
5637 fcode = builtin_mathfn_code (arg);
5638 if (flag_unsafe_math_optimizations
5639 && (fcode == BUILT_IN_LOG
5640 || fcode == BUILT_IN_LOGF
5641 || fcode == BUILT_IN_LOGL))
5642 return TREE_VALUE (TREE_OPERAND (arg, 1));
5643 }
5644 break;
5645
5646 case BUILT_IN_LOG:
5647 case BUILT_IN_LOGF:
5648 case BUILT_IN_LOGL:
5649 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5650 {
5651 enum built_in_function fcode;
5652 tree arg = TREE_VALUE (arglist);
5653
5654 /* Optimize log(1.0) = 0.0. */
5655 if (real_onep (arg))
5656 return build_real (type, dconst0);
5657
5658 /* Optimize log(exp(x)) = x. */
5659 fcode = builtin_mathfn_code (arg);
5660 if (flag_unsafe_math_optimizations
5661 && (fcode == BUILT_IN_EXP
5662 || fcode == BUILT_IN_EXPF
5663 || fcode == BUILT_IN_EXPL))
5664 return TREE_VALUE (TREE_OPERAND (arg, 1));
5665
5666 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5667 if (flag_unsafe_math_optimizations
5668 && (fcode == BUILT_IN_SQRT
5669 || fcode == BUILT_IN_SQRTF
5670 || fcode == BUILT_IN_SQRTL))
5671 {
5672 tree logfn = build_function_call_expr (fndecl,
5673 TREE_OPERAND (arg, 1));
5674 return fold (build (MULT_EXPR, type, logfn,
5675 build_real (type, dconsthalf)));
5676 }
5677
5678 /* Optimize log(pow(x,y)) = y*log(x). */
5679 if (flag_unsafe_math_optimizations
5680 && (fcode == BUILT_IN_POW
5681 || fcode == BUILT_IN_POWF
5682 || fcode == BUILT_IN_POWL))
5683 {
5684 tree arg0, arg1, logfn;
5685
5686 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5687 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5688 arglist = build_tree_list (NULL_TREE, arg0);
5689 logfn = build_function_call_expr (fndecl, arglist);
5690 return fold (build (MULT_EXPR, type, arg1, logfn));
5691 }
5692 }
5693 break;
5694
5695 case BUILT_IN_TAN:
5696 case BUILT_IN_TANF:
5697 case BUILT_IN_TANL:
5698 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5699 {
5700 enum built_in_function fcode;
5701 tree arg = TREE_VALUE (arglist);
5702
5703 /* Optimize tan(0.0) = 0.0. */
5704 if (real_zerop (arg))
5705 return arg;
5706
5707 /* Optimize tan(atan(x)) = x. */
5708 fcode = builtin_mathfn_code (arg);
5709 if (flag_unsafe_math_optimizations
5710 && (fcode == BUILT_IN_ATAN
5711 || fcode == BUILT_IN_ATANF
5712 || fcode == BUILT_IN_ATANL))
5713 return TREE_VALUE (TREE_OPERAND (arg, 1));
5714 }
5715 break;
5716
5717 case BUILT_IN_ATAN:
5718 case BUILT_IN_ATANF:
5719 case BUILT_IN_ATANL:
5720 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5721 {
5722 tree arg = TREE_VALUE (arglist);
5723
5724 /* Optimize atan(0.0) = 0.0. */
5725 if (real_zerop (arg))
5726 return arg;
5727
5728 /* Optimize atan(1.0) = pi/4. */
5729 if (real_onep (arg))
5730 {
5731 REAL_VALUE_TYPE cst;
5732
5733 if (! builtin_dconsts_init)
5734 init_builtin_dconsts ();
5735 real_convert (&cst, TYPE_MODE (type), &dconstpi);
5736 cst.exp -= 2;
5737 return build_real (type, cst);
5738 }
5739 }
5740 break;
5741
5742 case BUILT_IN_POW:
5743 case BUILT_IN_POWF:
5744 case BUILT_IN_POWL:
5745 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5746 {
5747 enum built_in_function fcode;
5748 tree arg0 = TREE_VALUE (arglist);
5749 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5750
5751 /* Optimize pow(1.0,y) = 1.0. */
5752 if (real_onep (arg0))
5753 return omit_one_operand (type, build_real (type, dconst1), arg1);
5754
5755 if (TREE_CODE (arg1) == REAL_CST
5756 && ! TREE_CONSTANT_OVERFLOW (arg1))
5757 {
5758 REAL_VALUE_TYPE c;
5759 c = TREE_REAL_CST (arg1);
5760
5761 /* Optimize pow(x,0.0) = 1.0. */
5762 if (REAL_VALUES_EQUAL (c, dconst0))
5763 return omit_one_operand (type, build_real (type, dconst1),
5764 arg0);
5765
5766 /* Optimize pow(x,1.0) = x. */
5767 if (REAL_VALUES_EQUAL (c, dconst1))
5768 return arg0;
5769
5770 /* Optimize pow(x,-1.0) = 1.0/x. */
5771 if (REAL_VALUES_EQUAL (c, dconstm1))
5772 return fold (build (RDIV_EXPR, type,
5773 build_real (type, dconst1),
5774 arg0));
5775
5776 /* Optimize pow(x,2.0) = x*x. */
5777 if (REAL_VALUES_EQUAL (c, dconst2)
5778 && (*lang_hooks.decls.global_bindings_p) () == 0
5779 && ! CONTAINS_PLACEHOLDER_P (arg0))
5780 {
5781 arg0 = save_expr (arg0);
5782 return fold (build (MULT_EXPR, type, arg0, arg0));
5783 }
5784
5785 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5786 if (flag_unsafe_math_optimizations
5787 && REAL_VALUES_EQUAL (c, dconstm2)
5788 && (*lang_hooks.decls.global_bindings_p) () == 0
5789 && ! CONTAINS_PLACEHOLDER_P (arg0))
5790 {
5791 arg0 = save_expr (arg0);
5792 return fold (build (RDIV_EXPR, type,
5793 build_real (type, dconst1),
5794 fold (build (MULT_EXPR, type,
5795 arg0, arg0))));
5796 }
5797
5798 /* Optimize pow(x,0.5) = sqrt(x). */
5799 if (flag_unsafe_math_optimizations
5800 && REAL_VALUES_EQUAL (c, dconsthalf))
5801 {
5802 tree sqrtfn;
5803
5804 fcode = DECL_FUNCTION_CODE (fndecl);
5805 if (fcode == BUILT_IN_POW)
5806 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5807 else if (fcode == BUILT_IN_POWF)
5808 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5809 else if (fcode == BUILT_IN_POWL)
5810 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5811 else
5812 sqrtfn = NULL_TREE;
5813
5814 if (sqrtfn != NULL_TREE)
5815 {
5816 tree arglist = build_tree_list (NULL_TREE, arg0);
5817 return build_function_call_expr (sqrtfn, arglist);
5818 }
5819 }
5820
5821 /* Attempt to evaluate pow at compile-time. */
5822 if (TREE_CODE (arg0) == REAL_CST
5823 && ! TREE_CONSTANT_OVERFLOW (arg0))
5824 {
5825 REAL_VALUE_TYPE cint;
5826 HOST_WIDE_INT n;
5827
5828 n = real_to_integer (&c);
5829 real_from_integer (&cint, VOIDmode, n,
5830 n < 0 ? -1 : 0, 0);
5831 if (real_identical (&c, &cint))
5832 {
5833 REAL_VALUE_TYPE x;
5834 bool inexact;
5835
5836 x = TREE_REAL_CST (arg0);
5837 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
5838 if (flag_unsafe_math_optimizations || !inexact)
5839 return build_real (type, x);
5840 }
5841 }
5842 }
5843
5844 /* Optimize pow(exp(x),y) = exp(x*y). */
5845 fcode = builtin_mathfn_code (arg0);
5846 if (flag_unsafe_math_optimizations
5847 && (fcode == BUILT_IN_EXP
5848 || fcode == BUILT_IN_EXPF
5849 || fcode == BUILT_IN_EXPL))
5850 {
5851 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5852 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5853 arg = fold (build (MULT_EXPR, type, arg, arg1));
5854 arglist = build_tree_list (NULL_TREE, arg);
5855 return build_function_call_expr (expfn, arglist);
5856 }
5857
5858 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5859 if (flag_unsafe_math_optimizations
5860 && (fcode == BUILT_IN_SQRT
5861 || fcode == BUILT_IN_SQRTF
5862 || fcode == BUILT_IN_SQRTL))
5863 {
5864 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5865 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5866 build_real (type, dconsthalf)));
5867
5868 arglist = tree_cons (NULL_TREE, narg0,
5869 build_tree_list (NULL_TREE, narg1));
5870 return build_function_call_expr (fndecl, arglist);
5871 }
5872
5873 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5874 if (flag_unsafe_math_optimizations
5875 && (fcode == BUILT_IN_POW
5876 || fcode == BUILT_IN_POWF
5877 || fcode == BUILT_IN_POWL))
5878 {
5879 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5880 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
5881 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
5882 arglist = tree_cons (NULL_TREE, arg00,
5883 build_tree_list (NULL_TREE, narg1));
5884 return build_function_call_expr (fndecl, arglist);
5885 }
5886 }
5887 break;
5888
5889 case BUILT_IN_INF:
5890 case BUILT_IN_INFF:
5891 case BUILT_IN_INFL:
5892 return fold_builtin_inf (type, true);
5893
5894 case BUILT_IN_HUGE_VAL:
5895 case BUILT_IN_HUGE_VALF:
5896 case BUILT_IN_HUGE_VALL:
5897 return fold_builtin_inf (type, false);
5898
5899 case BUILT_IN_NAN:
5900 case BUILT_IN_NANF:
5901 case BUILT_IN_NANL:
5902 return fold_builtin_nan (arglist, type, true);
5903
5904 case BUILT_IN_NANS:
5905 case BUILT_IN_NANSF:
5906 case BUILT_IN_NANSL:
5907 return fold_builtin_nan (arglist, type, false);
5908
5909 case BUILT_IN_FLOOR:
5910 case BUILT_IN_FLOORF:
5911 case BUILT_IN_FLOORL:
5912 case BUILT_IN_CEIL:
5913 case BUILT_IN_CEILF:
5914 case BUILT_IN_CEILL:
5915 case BUILT_IN_TRUNC:
5916 case BUILT_IN_TRUNCF:
5917 case BUILT_IN_TRUNCL:
5918 case BUILT_IN_ROUND:
5919 case BUILT_IN_ROUNDF:
5920 case BUILT_IN_ROUNDL:
5921 case BUILT_IN_NEARBYINT:
5922 case BUILT_IN_NEARBYINTF:
5923 case BUILT_IN_NEARBYINTL:
5924 return fold_trunc_transparent_mathfn (exp);
5925
5926 default:
5927 break;
5928 }
5929
5930 return 0;
5931 }
5932
5933 /* Conveniently construct a function call expression. */
5934
5935 tree
5936 build_function_call_expr (tree fn, tree arglist)
5937 {
5938 tree call_expr;
5939
5940 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5941 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
5942 call_expr, arglist);
5943 TREE_SIDE_EFFECTS (call_expr) = 1;
5944 return fold (call_expr);
5945 }
5946
5947 /* This function validates the types of a function call argument list
5948 represented as a tree chain of parameters against a specified list
5949 of tree_codes. If the last specifier is a 0, that represents an
5950 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5951
5952 static int
5953 validate_arglist (tree arglist, ...)
5954 {
5955 enum tree_code code;
5956 int res = 0;
5957 va_list ap;
5958
5959 va_start (ap, arglist);
5960
5961 do
5962 {
5963 code = va_arg (ap, enum tree_code);
5964 switch (code)
5965 {
5966 case 0:
5967 /* This signifies an ellipses, any further arguments are all ok. */
5968 res = 1;
5969 goto end;
5970 case VOID_TYPE:
5971 /* This signifies an endlink, if no arguments remain, return
5972 true, otherwise return false. */
5973 res = arglist == 0;
5974 goto end;
5975 default:
5976 /* If no parameters remain or the parameter's code does not
5977 match the specified code, return false. Otherwise continue
5978 checking any remaining arguments. */
5979 if (arglist == 0
5980 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
5981 goto end;
5982 break;
5983 }
5984 arglist = TREE_CHAIN (arglist);
5985 }
5986 while (1);
5987
5988 /* We need gotos here since we can only have one VA_CLOSE in a
5989 function. */
5990 end: ;
5991 va_end (ap);
5992
5993 return res;
5994 }
5995
5996 /* Default version of target-specific builtin setup that does nothing. */
5997
5998 void
5999 default_init_builtins (void)
6000 {
6001 }
6002
6003 /* Default target-specific builtin expander that does nothing. */
6004
6005 rtx
6006 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
6007 rtx target ATTRIBUTE_UNUSED,
6008 rtx subtarget ATTRIBUTE_UNUSED,
6009 enum machine_mode mode ATTRIBUTE_UNUSED,
6010 int ignore ATTRIBUTE_UNUSED)
6011 {
6012 return NULL_RTX;
6013 }
6014
6015 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
6016
6017 void
6018 purge_builtin_constant_p (void)
6019 {
6020 rtx insn, set, arg, new, note;
6021
6022 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6023 if (INSN_P (insn)
6024 && (set = single_set (insn)) != NULL_RTX
6025 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
6026 || (GET_CODE (arg) == SUBREG
6027 && (GET_CODE (arg = SUBREG_REG (arg))
6028 == CONSTANT_P_RTX))))
6029 {
6030 arg = XEXP (arg, 0);
6031 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
6032 validate_change (insn, &SET_SRC (set), new, 0);
6033
6034 /* Remove the REG_EQUAL note from the insn. */
6035 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
6036 remove_note (insn, note);
6037 }
6038 }
6039
6040 /* Returns true is EXP represents data that would potentially reside
6041 in a readonly section. */
6042
6043 static bool
6044 readonly_data_expr (tree exp)
6045 {
6046 STRIP_NOPS (exp);
6047
6048 if (TREE_CODE (exp) == ADDR_EXPR)
6049 return decl_readonly_section (TREE_OPERAND (exp, 0), 0);
6050 else
6051 return false;
6052 }