1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
66 #define STACK_PUSH_CODE PRE_INC
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
75 /* Hook called by safe_from_p for language-specific tree codes. It is
76 up to the language front-end to install a hook if it has any such
77 codes that safe_from_p needs to know about. Since same_from_p will
78 recursively explore the TREE_OPERANDs of an expression, this hook
79 should not reexamine those pieces. This routine may recursively
80 call safe_from_p; it should always pass `0' as the TOP_P
82 int (*lang_safe_from_p
) PARAMS ((rtx
, tree
));
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
92 /* Don't check memory usage, since code is being emitted to check a memory
93 usage. Used when current_function_check_memory_usage is true, to avoid
94 infinite recursion. */
95 static int in_check_memory_usage
;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list
= 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from
;
112 unsigned HOST_WIDE_INT len
;
113 HOST_WIDE_INT offset
;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len
;
127 HOST_WIDE_INT offset
;
128 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
133 extern struct obstack permanent_obstack
;
135 static rtx get_push_address
PARAMS ((int));
137 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
138 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
139 PARAMS ((unsigned HOST_WIDE_INT
,
141 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
142 struct move_by_pieces
*));
143 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
145 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
147 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
149 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
151 struct store_by_pieces
*));
152 static rtx get_subtarget
PARAMS ((rtx
));
153 static int is_zeros_p
PARAMS ((tree
));
154 static int mostly_zeros_p
PARAMS ((tree
));
155 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
156 HOST_WIDE_INT
, enum machine_mode
,
157 tree
, tree
, int, int));
158 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
159 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
160 HOST_WIDE_INT
, enum machine_mode
,
161 tree
, enum machine_mode
, int,
162 HOST_WIDE_INT
, int));
163 static enum memory_use_mode
164 get_memory_usage_from_modifier
PARAMS ((enum expand_modifier
));
165 static rtx var_rtx
PARAMS ((tree
));
166 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
167 static rtx expand_expr_unaligned
PARAMS ((tree
, unsigned int *));
168 static rtx expand_increment
PARAMS ((tree
, int, int));
169 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
170 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
171 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
173 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
175 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
177 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
183 static char direct_load
[NUM_MACHINE_MODES
];
184 static char direct_store
[NUM_MACHINE_MODES
];
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
208 /* This array records the insn_code of insns to perform block clears. */
209 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
211 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
213 #ifndef SLOW_UNALIGNED_ACCESS
214 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
217 /* This is run once per compilation to set up which modes can be used
218 directly in memory and to initialize the block move optab. */
224 enum machine_mode mode
;
230 /* Try indexing by frame ptr and try by stack ptr.
231 It is known that on the Convex the stack ptr isn't a valid index.
232 With luck, one or the other is valid on any machine. */
233 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
234 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
236 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
237 pat
= PATTERN (insn
);
239 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
240 mode
= (enum machine_mode
) ((int) mode
+ 1))
245 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
246 PUT_MODE (mem
, mode
);
247 PUT_MODE (mem1
, mode
);
249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
252 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
253 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
254 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
257 if (! HARD_REGNO_MODE_OK (regno
, mode
))
260 reg
= gen_rtx_REG (mode
, regno
);
263 SET_DEST (pat
) = reg
;
264 if (recog (pat
, insn
, &num_clobbers
) >= 0)
265 direct_load
[(int) mode
] = 1;
267 SET_SRC (pat
) = mem1
;
268 SET_DEST (pat
) = reg
;
269 if (recog (pat
, insn
, &num_clobbers
) >= 0)
270 direct_load
[(int) mode
] = 1;
273 SET_DEST (pat
) = mem
;
274 if (recog (pat
, insn
, &num_clobbers
) >= 0)
275 direct_store
[(int) mode
] = 1;
278 SET_DEST (pat
) = mem1
;
279 if (recog (pat
, insn
, &num_clobbers
) >= 0)
280 direct_store
[(int) mode
] = 1;
287 /* This is run at the start of compiling a function. */
292 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
295 pending_stack_adjust
= 0;
296 stack_pointer_delta
= 0;
297 inhibit_defer_pop
= 0;
299 apply_args_value
= 0;
305 struct expr_status
*p
;
310 ggc_mark_rtx (p
->x_saveregs_value
);
311 ggc_mark_rtx (p
->x_apply_args_value
);
312 ggc_mark_rtx (p
->x_forced_labels
);
323 /* Small sanity check that the queue is empty at the end of a function. */
326 finish_expr_for_function ()
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (var
, body
)
346 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
347 body
, pending_chain
);
348 return pending_chain
;
351 /* Use protect_from_queue to convert a QUEUED expression
352 into something that you can put immediately into an instruction.
353 If the queued incrementation has not happened yet,
354 protect_from_queue returns the variable itself.
355 If the incrementation has happened, protect_from_queue returns a temp
356 that contains a copy of the old value of the variable.
358 Any time an rtx which might possibly be a QUEUED is to be put
359 into an instruction, it must be passed through protect_from_queue first.
360 QUEUED expressions are not meaningful in instructions.
362 Do not pass a value through protect_from_queue and then hold
363 on to it for a while before putting it in an instruction!
364 If the queue is flushed in between, incorrect code will result. */
367 protect_from_queue (x
, modify
)
371 RTX_CODE code
= GET_CODE (x
);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain
== 0)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
386 if (code
== MEM
&& GET_MODE (x
) != BLKmode
387 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
390 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
394 rtx temp
= gen_reg_rtx (GET_MODE (x
));
396 emit_insn_before (gen_move_insn (temp
, new),
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
410 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
411 if (tem
!= XEXP (x
, 0))
417 else if (code
== PLUS
|| code
== MULT
)
419 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
420 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
421 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
433 if (QUEUED_INSN (x
) == 0)
434 return copy_to_reg (QUEUED_VAR (x
));
435 /* If the increment has happened and a pre-increment copy exists,
437 if (QUEUED_COPY (x
) != 0)
438 return QUEUED_COPY (x
);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
444 return QUEUED_COPY (x
);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
456 enum rtx_code code
= GET_CODE (x
);
462 return queued_subexp_p (XEXP (x
, 0));
466 return (queued_subexp_p (XEXP (x
, 0))
467 || queued_subexp_p (XEXP (x
, 1)));
473 /* Perform all the pending incrementations. */
479 while ((p
= pending_chain
))
481 rtx body
= QUEUED_BODY (p
);
483 if (GET_CODE (body
) == SEQUENCE
)
485 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
486 emit_insn (QUEUED_BODY (p
));
489 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
490 pending_chain
= QUEUED_NEXT (p
);
494 /* Copy data from FROM to TO, where the machine modes are not the same.
495 Both modes may be integer, or both may be floating.
496 UNSIGNEDP should be nonzero if FROM is an unsigned type.
497 This causes zero-extension instead of sign-extension. */
500 convert_move (to
, from
, unsignedp
)
504 enum machine_mode to_mode
= GET_MODE (to
);
505 enum machine_mode from_mode
= GET_MODE (from
);
506 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
507 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
511 /* rtx code for making an equivalent value. */
512 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
514 to
= protect_from_queue (to
, 1);
515 from
= protect_from_queue (from
, 0);
517 if (to_real
!= from_real
)
520 /* If FROM is a SUBREG that indicates that we have already done at least
521 the required extension, strip it. We don't handle such SUBREGs as
524 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
525 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
526 >= GET_MODE_SIZE (to_mode
))
527 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
528 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
530 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
533 if (to_mode
== from_mode
534 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
536 emit_move_insn (to
, from
);
540 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
542 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
545 if (VECTOR_MODE_P (to_mode
))
546 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
548 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
550 emit_move_insn (to
, from
);
554 if (to_real
!= from_real
)
561 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
563 /* Try converting directly if the insn is supported. */
564 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
567 emit_unop_insn (code
, to
, from
, UNKNOWN
);
572 #ifdef HAVE_trunchfqf2
573 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
575 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
579 #ifdef HAVE_trunctqfqf2
580 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
582 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
586 #ifdef HAVE_truncsfqf2
587 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
589 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
593 #ifdef HAVE_truncdfqf2
594 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
596 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
600 #ifdef HAVE_truncxfqf2
601 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
603 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
607 #ifdef HAVE_trunctfqf2
608 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
610 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
615 #ifdef HAVE_trunctqfhf2
616 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
618 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
622 #ifdef HAVE_truncsfhf2
623 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
625 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
629 #ifdef HAVE_truncdfhf2
630 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
632 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
636 #ifdef HAVE_truncxfhf2
637 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
639 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
643 #ifdef HAVE_trunctfhf2
644 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
646 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
651 #ifdef HAVE_truncsftqf2
652 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
654 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
658 #ifdef HAVE_truncdftqf2
659 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
661 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
665 #ifdef HAVE_truncxftqf2
666 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
668 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
672 #ifdef HAVE_trunctftqf2
673 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
675 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
680 #ifdef HAVE_truncdfsf2
681 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
683 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
687 #ifdef HAVE_truncxfsf2
688 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
690 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
694 #ifdef HAVE_trunctfsf2
695 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
697 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
701 #ifdef HAVE_truncxfdf2
702 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
704 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
708 #ifdef HAVE_trunctfdf2
709 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
711 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
723 libcall
= extendsfdf2_libfunc
;
727 libcall
= extendsfxf2_libfunc
;
731 libcall
= extendsftf2_libfunc
;
743 libcall
= truncdfsf2_libfunc
;
747 libcall
= extenddfxf2_libfunc
;
751 libcall
= extenddftf2_libfunc
;
763 libcall
= truncxfsf2_libfunc
;
767 libcall
= truncxfdf2_libfunc
;
779 libcall
= trunctfsf2_libfunc
;
783 libcall
= trunctfdf2_libfunc
;
795 if (libcall
== (rtx
) 0)
796 /* This conversion is not implemented yet. */
800 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
802 insns
= get_insns ();
804 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
809 /* Now both modes are integers. */
811 /* Handle expanding beyond a word. */
812 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
813 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
820 enum machine_mode lowpart_mode
;
821 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
823 /* Try converting directly if the insn is supported. */
824 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
827 /* If FROM is a SUBREG, put it into a register. Do this
828 so that we always generate the same set of insns for
829 better cse'ing; if an intermediate assignment occurred,
830 we won't be doing the operation directly on the SUBREG. */
831 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
832 from
= force_reg (from_mode
, from
);
833 emit_unop_insn (code
, to
, from
, equiv_code
);
836 /* Next, try converting via full word. */
837 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
838 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
839 != CODE_FOR_nothing
))
841 if (GET_CODE (to
) == REG
)
842 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
843 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
844 emit_unop_insn (code
, to
,
845 gen_lowpart (word_mode
, to
), equiv_code
);
849 /* No special multiword conversion insn; do it by hand. */
852 /* Since we will turn this into a no conflict block, we must ensure
853 that the source does not overlap the target. */
855 if (reg_overlap_mentioned_p (to
, from
))
856 from
= force_reg (from_mode
, from
);
858 /* Get a copy of FROM widened to a word, if necessary. */
859 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
860 lowpart_mode
= word_mode
;
862 lowpart_mode
= from_mode
;
864 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
866 lowpart
= gen_lowpart (lowpart_mode
, to
);
867 emit_move_insn (lowpart
, lowfrom
);
869 /* Compute the value to put in each remaining word. */
871 fill_value
= const0_rtx
;
876 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
877 && STORE_FLAG_VALUE
== -1)
879 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
881 fill_value
= gen_reg_rtx (word_mode
);
882 emit_insn (gen_slt (fill_value
));
888 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
889 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
891 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
895 /* Fill the remaining words. */
896 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
898 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
899 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
904 if (fill_value
!= subword
)
905 emit_move_insn (subword
, fill_value
);
908 insns
= get_insns ();
911 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
912 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
916 /* Truncating multi-word to a word or less. */
917 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
918 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
920 if (!((GET_CODE (from
) == MEM
921 && ! MEM_VOLATILE_P (from
)
922 && direct_load
[(int) to_mode
]
923 && ! mode_dependent_address_p (XEXP (from
, 0)))
924 || GET_CODE (from
) == REG
925 || GET_CODE (from
) == SUBREG
))
926 from
= force_reg (from_mode
, from
);
927 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
931 /* Handle pointer conversion. */ /* SPEE 900220. */
932 if (to_mode
== PQImode
)
934 if (from_mode
!= QImode
)
935 from
= convert_to_mode (QImode
, from
, unsignedp
);
937 #ifdef HAVE_truncqipqi2
938 if (HAVE_truncqipqi2
)
940 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
943 #endif /* HAVE_truncqipqi2 */
947 if (from_mode
== PQImode
)
949 if (to_mode
!= QImode
)
951 from
= convert_to_mode (QImode
, from
, unsignedp
);
956 #ifdef HAVE_extendpqiqi2
957 if (HAVE_extendpqiqi2
)
959 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
962 #endif /* HAVE_extendpqiqi2 */
967 if (to_mode
== PSImode
)
969 if (from_mode
!= SImode
)
970 from
= convert_to_mode (SImode
, from
, unsignedp
);
972 #ifdef HAVE_truncsipsi2
973 if (HAVE_truncsipsi2
)
975 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
978 #endif /* HAVE_truncsipsi2 */
982 if (from_mode
== PSImode
)
984 if (to_mode
!= SImode
)
986 from
= convert_to_mode (SImode
, from
, unsignedp
);
991 #ifdef HAVE_extendpsisi2
992 if (! unsignedp
&& HAVE_extendpsisi2
)
994 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
997 #endif /* HAVE_extendpsisi2 */
998 #ifdef HAVE_zero_extendpsisi2
999 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1001 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1004 #endif /* HAVE_zero_extendpsisi2 */
1009 if (to_mode
== PDImode
)
1011 if (from_mode
!= DImode
)
1012 from
= convert_to_mode (DImode
, from
, unsignedp
);
1014 #ifdef HAVE_truncdipdi2
1015 if (HAVE_truncdipdi2
)
1017 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1020 #endif /* HAVE_truncdipdi2 */
1024 if (from_mode
== PDImode
)
1026 if (to_mode
!= DImode
)
1028 from
= convert_to_mode (DImode
, from
, unsignedp
);
1033 #ifdef HAVE_extendpdidi2
1034 if (HAVE_extendpdidi2
)
1036 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1039 #endif /* HAVE_extendpdidi2 */
1044 /* Now follow all the conversions between integers
1045 no more than a word long. */
1047 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1048 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1050 GET_MODE_BITSIZE (from_mode
)))
1052 if (!((GET_CODE (from
) == MEM
1053 && ! MEM_VOLATILE_P (from
)
1054 && direct_load
[(int) to_mode
]
1055 && ! mode_dependent_address_p (XEXP (from
, 0)))
1056 || GET_CODE (from
) == REG
1057 || GET_CODE (from
) == SUBREG
))
1058 from
= force_reg (from_mode
, from
);
1059 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1060 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1061 from
= copy_to_reg (from
);
1062 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1066 /* Handle extension. */
1067 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1069 /* Convert directly if that works. */
1070 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1071 != CODE_FOR_nothing
)
1073 emit_unop_insn (code
, to
, from
, equiv_code
);
1078 enum machine_mode intermediate
;
1082 /* Search for a mode to convert via. */
1083 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1084 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1085 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1086 != CODE_FOR_nothing
)
1087 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1088 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1089 GET_MODE_BITSIZE (intermediate
))))
1090 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1091 != CODE_FOR_nothing
))
1093 convert_move (to
, convert_to_mode (intermediate
, from
,
1094 unsignedp
), unsignedp
);
1098 /* No suitable intermediate mode.
1099 Generate what we need with shifts. */
1100 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1101 - GET_MODE_BITSIZE (from_mode
), 0);
1102 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1103 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1105 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1108 emit_move_insn (to
, tmp
);
1113 /* Support special truncate insns for certain modes. */
1115 if (from_mode
== DImode
&& to_mode
== SImode
)
1117 #ifdef HAVE_truncdisi2
1118 if (HAVE_truncdisi2
)
1120 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1124 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1128 if (from_mode
== DImode
&& to_mode
== HImode
)
1130 #ifdef HAVE_truncdihi2
1131 if (HAVE_truncdihi2
)
1133 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1137 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1141 if (from_mode
== DImode
&& to_mode
== QImode
)
1143 #ifdef HAVE_truncdiqi2
1144 if (HAVE_truncdiqi2
)
1146 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1150 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1154 if (from_mode
== SImode
&& to_mode
== HImode
)
1156 #ifdef HAVE_truncsihi2
1157 if (HAVE_truncsihi2
)
1159 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1163 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1167 if (from_mode
== SImode
&& to_mode
== QImode
)
1169 #ifdef HAVE_truncsiqi2
1170 if (HAVE_truncsiqi2
)
1172 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1176 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1180 if (from_mode
== HImode
&& to_mode
== QImode
)
1182 #ifdef HAVE_trunchiqi2
1183 if (HAVE_trunchiqi2
)
1185 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1189 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1193 if (from_mode
== TImode
&& to_mode
== DImode
)
1195 #ifdef HAVE_trunctidi2
1196 if (HAVE_trunctidi2
)
1198 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1202 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1206 if (from_mode
== TImode
&& to_mode
== SImode
)
1208 #ifdef HAVE_trunctisi2
1209 if (HAVE_trunctisi2
)
1211 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1215 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1219 if (from_mode
== TImode
&& to_mode
== HImode
)
1221 #ifdef HAVE_trunctihi2
1222 if (HAVE_trunctihi2
)
1224 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1228 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1232 if (from_mode
== TImode
&& to_mode
== QImode
)
1234 #ifdef HAVE_trunctiqi2
1235 if (HAVE_trunctiqi2
)
1237 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1241 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1245 /* Handle truncation of volatile memrefs, and so on;
1246 the things that couldn't be truncated directly,
1247 and for which there was no special instruction. */
1248 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1250 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1251 emit_move_insn (to
, temp
);
1255 /* Mode combination is not recognized. */
1259 /* Return an rtx for a value that would result
1260 from converting X to mode MODE.
1261 Both X and MODE may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1266 This function *must not* call protect_from_queue
1267 except when putting X into an insn (in which case convert_move does it). */
1270 convert_to_mode (mode
, x
, unsignedp
)
1271 enum machine_mode mode
;
1275 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1278 /* Return an rtx for a value that would result
1279 from converting X from mode OLDMODE to mode MODE.
1280 Both modes may be floating, or both integer.
1281 UNSIGNEDP is nonzero if X is an unsigned value.
1283 This can be done by referring to a part of X in place
1284 or by copying to a new temporary with conversion.
1286 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1288 This function *must not* call protect_from_queue
1289 except when putting X into an insn (in which case convert_move does it). */
1292 convert_modes (mode
, oldmode
, x
, unsignedp
)
1293 enum machine_mode mode
, oldmode
;
1299 /* If FROM is a SUBREG that indicates that we have already done at least
1300 the required extension, strip it. */
1302 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1303 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1304 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1305 x
= gen_lowpart (mode
, x
);
1307 if (GET_MODE (x
) != VOIDmode
)
1308 oldmode
= GET_MODE (x
);
1310 if (mode
== oldmode
)
1313 /* There is one case that we must handle specially: If we are converting
1314 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1315 we are to interpret the constant as unsigned, gen_lowpart will do
1316 the wrong if the constant appears negative. What we want to do is
1317 make the high-order word of the constant zero, not all ones. */
1319 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1320 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1321 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1323 HOST_WIDE_INT val
= INTVAL (x
);
1325 if (oldmode
!= VOIDmode
1326 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1328 int width
= GET_MODE_BITSIZE (oldmode
);
1330 /* We need to zero extend VAL. */
1331 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1334 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1337 /* We can do this with a gen_lowpart if both desired and current modes
1338 are integer, and this is either a constant integer, a register, or a
1339 non-volatile MEM. Except for the constant case where MODE is no
1340 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1342 if ((GET_CODE (x
) == CONST_INT
1343 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1344 || (GET_MODE_CLASS (mode
) == MODE_INT
1345 && GET_MODE_CLASS (oldmode
) == MODE_INT
1346 && (GET_CODE (x
) == CONST_DOUBLE
1347 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1348 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1349 && direct_load
[(int) mode
])
1350 || (GET_CODE (x
) == REG
1351 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1352 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1354 /* ?? If we don't know OLDMODE, we have to assume here that
1355 X does not need sign- or zero-extension. This may not be
1356 the case, but it's the best we can do. */
1357 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1358 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1360 HOST_WIDE_INT val
= INTVAL (x
);
1361 int width
= GET_MODE_BITSIZE (oldmode
);
1363 /* We must sign or zero-extend in this case. Start by
1364 zero-extending, then sign extend if we need to. */
1365 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1367 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1368 val
|= (HOST_WIDE_INT
) (-1) << width
;
1370 return GEN_INT (trunc_int_for_mode (val
, mode
));
1373 return gen_lowpart (mode
, x
);
1376 temp
= gen_reg_rtx (mode
);
1377 convert_move (temp
, x
, unsignedp
);
1381 /* This macro is used to determine what the largest unit size that
1382 move_by_pieces can use is. */
1384 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1385 move efficiently, as opposed to MOVE_MAX which is the maximum
1386 number of bytes we can move with a single instruction. */
1388 #ifndef MOVE_MAX_PIECES
1389 #define MOVE_MAX_PIECES MOVE_MAX
1392 /* Generate several move instructions to copy LEN bytes from block FROM to
1393 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1394 and TO through protect_from_queue before calling.
1396 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1397 used to push FROM to the stack.
1399 ALIGN is maximum alignment we can assume. */
1402 move_by_pieces (to
, from
, len
, align
)
1404 unsigned HOST_WIDE_INT len
;
1407 struct move_by_pieces data
;
1408 rtx to_addr
, from_addr
= XEXP (from
, 0);
1409 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1410 enum machine_mode mode
= VOIDmode
, tmode
;
1411 enum insn_code icode
;
1414 data
.from_addr
= from_addr
;
1417 to_addr
= XEXP (to
, 0);
1420 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1421 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1423 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1430 #ifdef STACK_GROWS_DOWNWARD
1436 data
.to_addr
= to_addr
;
1439 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1440 || GET_CODE (from_addr
) == POST_INC
1441 || GET_CODE (from_addr
) == POST_DEC
);
1443 data
.explicit_inc_from
= 0;
1444 data
.explicit_inc_to
= 0;
1445 if (data
.reverse
) data
.offset
= len
;
1448 /* If copying requires more than two move insns,
1449 copy addresses to registers (to make displacements shorter)
1450 and use post-increment if available. */
1451 if (!(data
.autinc_from
&& data
.autinc_to
)
1452 && move_by_pieces_ninsns (len
, align
) > 2)
1454 /* Find the mode of the largest move... */
1455 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1456 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1457 if (GET_MODE_SIZE (tmode
) < max_size
)
1460 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1462 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1463 data
.autinc_from
= 1;
1464 data
.explicit_inc_from
= -1;
1466 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1468 data
.from_addr
= copy_addr_to_reg (from_addr
);
1469 data
.autinc_from
= 1;
1470 data
.explicit_inc_from
= 1;
1472 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1473 data
.from_addr
= copy_addr_to_reg (from_addr
);
1474 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1476 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1478 data
.explicit_inc_to
= -1;
1480 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1482 data
.to_addr
= copy_addr_to_reg (to_addr
);
1484 data
.explicit_inc_to
= 1;
1486 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1487 data
.to_addr
= copy_addr_to_reg (to_addr
);
1490 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1491 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1492 align
= MOVE_MAX
* BITS_PER_UNIT
;
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1497 while (max_size
> 1)
1499 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1500 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1501 if (GET_MODE_SIZE (tmode
) < max_size
)
1504 if (mode
== VOIDmode
)
1507 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1508 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1509 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1511 max_size
= GET_MODE_SIZE (mode
);
1514 /* The code above should have handled everything. */
1519 /* Return number of insns required to move L bytes by pieces.
1520 ALIGN (in bits) is maximum alignment we can assume. */
1522 static unsigned HOST_WIDE_INT
1523 move_by_pieces_ninsns (l
, align
)
1524 unsigned HOST_WIDE_INT l
;
1527 unsigned HOST_WIDE_INT n_insns
= 0;
1528 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1530 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1531 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1532 align
= MOVE_MAX
* BITS_PER_UNIT
;
1534 while (max_size
> 1)
1536 enum machine_mode mode
= VOIDmode
, tmode
;
1537 enum insn_code icode
;
1539 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1540 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1541 if (GET_MODE_SIZE (tmode
) < max_size
)
1544 if (mode
== VOIDmode
)
1547 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1548 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1549 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1551 max_size
= GET_MODE_SIZE (mode
);
1559 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1560 with move instructions for mode MODE. GENFUN is the gen_... function
1561 to make a move insn for that mode. DATA has all the other info. */
1564 move_by_pieces_1 (genfun
, mode
, data
)
1565 rtx (*genfun
) PARAMS ((rtx
, ...));
1566 enum machine_mode mode
;
1567 struct move_by_pieces
*data
;
1569 unsigned int size
= GET_MODE_SIZE (mode
);
1570 rtx to1
= NULL_RTX
, from1
;
1572 while (data
->len
>= size
)
1575 data
->offset
-= size
;
1579 if (data
->autinc_to
)
1581 to1
= replace_equiv_address (data
->to
, data
->to_addr
);
1582 to1
= adjust_address (to1
, mode
, 0);
1585 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1588 if (data
->autinc_from
)
1590 from1
= replace_equiv_address (data
->from
, data
->from_addr
);
1591 from1
= adjust_address (from1
, mode
, 0);
1594 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1596 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1597 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1598 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1599 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1602 emit_insn ((*genfun
) (to1
, from1
));
1605 #ifdef PUSH_ROUNDING
1606 emit_single_push_insn (mode
, from1
, NULL
);
1612 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1613 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1614 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1615 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1617 if (! data
->reverse
)
1618 data
->offset
+= size
;
1624 /* Emit code to move a block Y to a block X.
1625 This may be done with string-move instructions,
1626 with multiple scalar move instructions, or with a library call.
1628 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1630 SIZE is an rtx that says how long they are.
1631 ALIGN is the maximum alignment we can assume they have.
1633 Return the address of the new block, if memcpy is called and returns it,
1637 emit_block_move (x
, y
, size
)
1642 #ifdef TARGET_MEM_FUNCTIONS
1644 tree call_expr
, arg_list
;
1646 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1648 if (GET_MODE (x
) != BLKmode
)
1651 if (GET_MODE (y
) != BLKmode
)
1654 x
= protect_from_queue (x
, 1);
1655 y
= protect_from_queue (y
, 0);
1656 size
= protect_from_queue (size
, 0);
1658 if (GET_CODE (x
) != MEM
)
1660 if (GET_CODE (y
) != MEM
)
1665 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1666 move_by_pieces (x
, y
, INTVAL (size
), align
);
1669 /* Try the most limited insn first, because there's no point
1670 including more than one in the machine description unless
1671 the more limited one has some advantage. */
1673 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1674 enum machine_mode mode
;
1676 /* Since this is a move insn, we don't care about volatility. */
1679 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1680 mode
= GET_MODE_WIDER_MODE (mode
))
1682 enum insn_code code
= movstr_optab
[(int) mode
];
1683 insn_operand_predicate_fn pred
;
1685 if (code
!= CODE_FOR_nothing
1686 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1687 here because if SIZE is less than the mode mask, as it is
1688 returned by the macro, it will definitely be less than the
1689 actual mode mask. */
1690 && ((GET_CODE (size
) == CONST_INT
1691 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1692 <= (GET_MODE_MASK (mode
) >> 1)))
1693 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1694 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1695 || (*pred
) (x
, BLKmode
))
1696 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1697 || (*pred
) (y
, BLKmode
))
1698 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1699 || (*pred
) (opalign
, VOIDmode
)))
1702 rtx last
= get_last_insn ();
1705 op2
= convert_to_mode (mode
, size
, 1);
1706 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1707 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1708 op2
= copy_to_mode_reg (mode
, op2
);
1710 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1718 delete_insns_since (last
);
1724 /* X, Y, or SIZE may have been passed through protect_from_queue.
1726 It is unsafe to save the value generated by protect_from_queue
1727 and reuse it later. Consider what happens if emit_queue is
1728 called before the return value from protect_from_queue is used.
1730 Expansion of the CALL_EXPR below will call emit_queue before
1731 we are finished emitting RTL for argument setup. So if we are
1732 not careful we could get the wrong value for an argument.
1734 To avoid this problem we go ahead and emit code to copy X, Y &
1735 SIZE into new pseudos. We can then place those new pseudos
1736 into an RTL_EXPR and use them later, even after a call to
1739 Note this is not strictly needed for library calls since they
1740 do not call emit_queue before loading their arguments. However,
1741 we may need to have library calls call emit_queue in the future
1742 since failing to do so could cause problems for targets which
1743 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1744 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1745 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1747 #ifdef TARGET_MEM_FUNCTIONS
1748 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1750 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1751 TREE_UNSIGNED (integer_type_node
));
1752 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1755 #ifdef TARGET_MEM_FUNCTIONS
1756 /* It is incorrect to use the libcall calling conventions to call
1757 memcpy in this context.
1759 This could be a user call to memcpy and the user may wish to
1760 examine the return value from memcpy.
1762 For targets where libcalls and normal calls have different conventions
1763 for returning pointers, we could end up generating incorrect code.
1765 So instead of using a libcall sequence we build up a suitable
1766 CALL_EXPR and expand the call in the normal fashion. */
1767 if (fn
== NULL_TREE
)
1771 /* This was copied from except.c, I don't know if all this is
1772 necessary in this context or not. */
1773 fn
= get_identifier ("memcpy");
1774 fntype
= build_pointer_type (void_type_node
);
1775 fntype
= build_function_type (fntype
, NULL_TREE
);
1776 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1777 ggc_add_tree_root (&fn
, 1);
1778 DECL_EXTERNAL (fn
) = 1;
1779 TREE_PUBLIC (fn
) = 1;
1780 DECL_ARTIFICIAL (fn
) = 1;
1781 TREE_NOTHROW (fn
) = 1;
1782 make_decl_rtl (fn
, NULL
);
1783 assemble_external (fn
);
1786 /* We need to make an argument list for the function call.
1788 memcpy has three arguments, the first two are void * addresses and
1789 the last is a size_t byte count for the copy. */
1791 = build_tree_list (NULL_TREE
,
1792 make_tree (build_pointer_type (void_type_node
), x
));
1793 TREE_CHAIN (arg_list
)
1794 = build_tree_list (NULL_TREE
,
1795 make_tree (build_pointer_type (void_type_node
), y
));
1796 TREE_CHAIN (TREE_CHAIN (arg_list
))
1797 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1798 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1800 /* Now we have to build up the CALL_EXPR itself. */
1801 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1802 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1803 call_expr
, arg_list
, NULL_TREE
);
1804 TREE_SIDE_EFFECTS (call_expr
) = 1;
1806 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1808 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1809 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1810 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1811 TREE_UNSIGNED (integer_type_node
)),
1812 TYPE_MODE (integer_type_node
));
1815 /* If we are initializing a readonly value, show the above call
1816 clobbered it. Otherwise, a load from it may erroneously be hoisted
1818 if (RTX_UNCHANGING_P (x
))
1819 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
1825 /* Copy all or part of a value X into registers starting at REGNO.
1826 The number of registers to be filled is NREGS. */
1829 move_block_to_reg (regno
, x
, nregs
, mode
)
1833 enum machine_mode mode
;
1836 #ifdef HAVE_load_multiple
1844 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1845 x
= validize_mem (force_const_mem (mode
, x
));
1847 /* See if the machine can do this with a load multiple insn. */
1848 #ifdef HAVE_load_multiple
1849 if (HAVE_load_multiple
)
1851 last
= get_last_insn ();
1852 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1860 delete_insns_since (last
);
1864 for (i
= 0; i
< nregs
; i
++)
1865 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1866 operand_subword_force (x
, i
, mode
));
1869 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1870 The number of registers to be filled is NREGS. SIZE indicates the number
1871 of bytes in the object X. */
1874 move_block_from_reg (regno
, x
, nregs
, size
)
1881 #ifdef HAVE_store_multiple
1885 enum machine_mode mode
;
1890 /* If SIZE is that of a mode no bigger than a word, just use that
1891 mode's store operation. */
1892 if (size
<= UNITS_PER_WORD
1893 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1895 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
1899 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1900 to the left before storing to memory. Note that the previous test
1901 doesn't handle all cases (e.g. SIZE == 3). */
1902 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1904 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1910 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1911 gen_rtx_REG (word_mode
, regno
),
1912 build_int_2 ((UNITS_PER_WORD
- size
)
1913 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1914 emit_move_insn (tem
, shift
);
1918 /* See if the machine can do this with a store multiple insn. */
1919 #ifdef HAVE_store_multiple
1920 if (HAVE_store_multiple
)
1922 last
= get_last_insn ();
1923 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1931 delete_insns_since (last
);
1935 for (i
= 0; i
< nregs
; i
++)
1937 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1942 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1946 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1947 registers represented by a PARALLEL. SSIZE represents the total size of
1948 block SRC in bytes, or -1 if not known. */
1949 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1950 the balance will be in what would be the low-order memory addresses, i.e.
1951 left justified for big endian, right justified for little endian. This
1952 happens to be true for the targets currently using this support. If this
1953 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1957 emit_group_load (dst
, orig_src
, ssize
)
1964 if (GET_CODE (dst
) != PARALLEL
)
1967 /* Check for a NULL entry, used to indicate that the parameter goes
1968 both on the stack and in registers. */
1969 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1974 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1976 /* Process the pieces. */
1977 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1979 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1980 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1981 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1984 /* Handle trailing fragments that run over the size of the struct. */
1985 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1987 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1988 bytelen
= ssize
- bytepos
;
1993 /* If we won't be loading directly from memory, protect the real source
1994 from strange tricks we might play; but make sure that the source can
1995 be loaded directly into the destination. */
1997 if (GET_CODE (orig_src
) != MEM
1998 && (!CONSTANT_P (orig_src
)
1999 || (GET_MODE (orig_src
) != mode
2000 && GET_MODE (orig_src
) != VOIDmode
)))
2002 if (GET_MODE (orig_src
) == VOIDmode
)
2003 src
= gen_reg_rtx (mode
);
2005 src
= gen_reg_rtx (GET_MODE (orig_src
));
2007 emit_move_insn (src
, orig_src
);
2010 /* Optimize the access just a bit. */
2011 if (GET_CODE (src
) == MEM
2012 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2013 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2014 && bytelen
== GET_MODE_SIZE (mode
))
2016 tmps
[i
] = gen_reg_rtx (mode
);
2017 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2019 else if (GET_CODE (src
) == CONCAT
)
2022 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
2023 tmps
[i
] = XEXP (src
, 0);
2024 else if (bytepos
== (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
2025 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
2026 tmps
[i
] = XEXP (src
, 1);
2027 else if (bytepos
== 0)
2029 rtx mem
= assign_stack_temp (GET_MODE (src
),
2030 GET_MODE_SIZE (GET_MODE (src
)), 0);
2031 emit_move_insn (mem
, src
);
2032 tmps
[i
] = adjust_address (mem
, mode
, 0);
2037 else if (CONSTANT_P (src
)
2038 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2041 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2042 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2045 if (BYTES_BIG_ENDIAN
&& shift
)
2046 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2047 tmps
[i
], 0, OPTAB_WIDEN
);
2052 /* Copy the extracted pieces into the proper (probable) hard regs. */
2053 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2054 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2057 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2058 registers represented by a PARALLEL. SSIZE represents the total size of
2059 block DST, or -1 if not known. */
2062 emit_group_store (orig_dst
, src
, ssize
)
2069 if (GET_CODE (src
) != PARALLEL
)
2072 /* Check for a NULL entry, used to indicate that the parameter goes
2073 both on the stack and in registers. */
2074 if (XEXP (XVECEXP (src
, 0, 0), 0))
2079 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2081 /* Copy the (probable) hard regs into pseudos. */
2082 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2084 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2085 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2086 emit_move_insn (tmps
[i
], reg
);
2090 /* If we won't be storing directly into memory, protect the real destination
2091 from strange tricks we might play. */
2093 if (GET_CODE (dst
) == PARALLEL
)
2097 /* We can get a PARALLEL dst if there is a conditional expression in
2098 a return statement. In that case, the dst and src are the same,
2099 so no action is necessary. */
2100 if (rtx_equal_p (dst
, src
))
2103 /* It is unclear if we can ever reach here, but we may as well handle
2104 it. Allocate a temporary, and split this into a store/load to/from
2107 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2108 emit_group_store (temp
, src
, ssize
);
2109 emit_group_load (dst
, temp
, ssize
);
2112 else if (GET_CODE (dst
) != MEM
)
2114 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2115 /* Make life a bit easier for combine. */
2116 emit_move_insn (dst
, const0_rtx
);
2119 /* Process the pieces. */
2120 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2122 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2123 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2124 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2126 /* Handle trailing fragments that run over the size of the struct. */
2127 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2129 if (BYTES_BIG_ENDIAN
)
2131 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2132 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2133 tmps
[i
], 0, OPTAB_WIDEN
);
2135 bytelen
= ssize
- bytepos
;
2138 /* Optimize the access just a bit. */
2139 if (GET_CODE (dst
) == MEM
2140 && MEM_ALIGN (dst
) >= GET_MODE_ALIGNMENT (mode
)
2141 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2142 && bytelen
== GET_MODE_SIZE (mode
))
2143 emit_move_insn (adjust_address (dst
, mode
, bytepos
), tmps
[i
]);
2145 store_bit_field (dst
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2146 mode
, tmps
[i
], ssize
);
2151 /* Copy from the pseudo into the (probable) hard reg. */
2152 if (GET_CODE (dst
) == REG
)
2153 emit_move_insn (orig_dst
, dst
);
2156 /* Generate code to copy a BLKmode object of TYPE out of a
2157 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2158 is null, a stack temporary is created. TGTBLK is returned.
2160 The primary purpose of this routine is to handle functions
2161 that return BLKmode structures in registers. Some machines
2162 (the PA for example) want to return all small structures
2163 in registers regardless of the structure's alignment. */
2166 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2171 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2172 rtx src
= NULL
, dst
= NULL
;
2173 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2174 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2178 tgtblk
= assign_temp (build_qualified_type (type
,
2180 | TYPE_QUAL_CONST
)),
2182 preserve_temp_slots (tgtblk
);
2185 /* This code assumes srcreg is at least a full word. If it isn't,
2186 copy it into a new pseudo which is a full word. */
2187 if (GET_MODE (srcreg
) != BLKmode
2188 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2189 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2191 /* Structures whose size is not a multiple of a word are aligned
2192 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2193 machine, this means we must skip the empty high order bytes when
2194 calculating the bit offset. */
2195 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2196 big_endian_correction
2197 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2199 /* Copy the structure BITSIZE bites at a time.
2201 We could probably emit more efficient code for machines which do not use
2202 strict alignment, but it doesn't seem worth the effort at the current
2204 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2205 bitpos
< bytes
* BITS_PER_UNIT
;
2206 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2208 /* We need a new source operand each time xbitpos is on a
2209 word boundary and when xbitpos == big_endian_correction
2210 (the first time through). */
2211 if (xbitpos
% BITS_PER_WORD
== 0
2212 || xbitpos
== big_endian_correction
)
2213 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2216 /* We need a new destination operand each time bitpos is on
2218 if (bitpos
% BITS_PER_WORD
== 0)
2219 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2221 /* Use xbitpos for the source extraction (right justified) and
2222 xbitpos for the destination store (left justified). */
2223 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2224 extract_bit_field (src
, bitsize
,
2225 xbitpos
% BITS_PER_WORD
, 1,
2226 NULL_RTX
, word_mode
, word_mode
,
2234 /* Add a USE expression for REG to the (possibly empty) list pointed
2235 to by CALL_FUSAGE. REG must denote a hard register. */
2238 use_reg (call_fusage
, reg
)
2239 rtx
*call_fusage
, reg
;
2241 if (GET_CODE (reg
) != REG
2242 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2246 = gen_rtx_EXPR_LIST (VOIDmode
,
2247 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2250 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2251 starting at REGNO. All of these registers must be hard registers. */
2254 use_regs (call_fusage
, regno
, nregs
)
2261 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2264 for (i
= 0; i
< nregs
; i
++)
2265 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2268 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2269 PARALLEL REGS. This is for calls that pass values in multiple
2270 non-contiguous locations. The Irix 6 ABI has examples of this. */
2273 use_group_regs (call_fusage
, regs
)
2279 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2281 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2283 /* A NULL entry means the parameter goes both on the stack and in
2284 registers. This can also be a MEM for targets that pass values
2285 partially on the stack and partially in registers. */
2286 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2287 use_reg (call_fusage
, reg
);
2293 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2294 unsigned HOST_WIDE_INT len
;
2295 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2299 unsigned HOST_WIDE_INT max_size
, l
;
2300 HOST_WIDE_INT offset
= 0;
2301 enum machine_mode mode
, tmode
;
2302 enum insn_code icode
;
2306 if (! MOVE_BY_PIECES_P (len
, align
))
2309 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2310 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2311 align
= MOVE_MAX
* BITS_PER_UNIT
;
2313 /* We would first store what we can in the largest integer mode, then go to
2314 successively smaller modes. */
2317 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2322 max_size
= MOVE_MAX_PIECES
+ 1;
2323 while (max_size
> 1)
2325 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2326 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2327 if (GET_MODE_SIZE (tmode
) < max_size
)
2330 if (mode
== VOIDmode
)
2333 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2334 if (icode
!= CODE_FOR_nothing
2335 && align
>= GET_MODE_ALIGNMENT (mode
))
2337 unsigned int size
= GET_MODE_SIZE (mode
);
2344 cst
= (*constfun
) (constfundata
, offset
, mode
);
2345 if (!LEGITIMATE_CONSTANT_P (cst
))
2355 max_size
= GET_MODE_SIZE (mode
);
2358 /* The code above should have handled everything. */
2366 /* Generate several move instructions to store LEN bytes generated by
2367 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2368 pointer which will be passed as argument in every CONSTFUN call.
2369 ALIGN is maximum alignment we can assume. */
2372 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2374 unsigned HOST_WIDE_INT len
;
2375 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2379 struct store_by_pieces data
;
2381 if (! MOVE_BY_PIECES_P (len
, align
))
2383 to
= protect_from_queue (to
, 1);
2384 data
.constfun
= constfun
;
2385 data
.constfundata
= constfundata
;
2388 store_by_pieces_1 (&data
, align
);
2391 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2392 rtx with BLKmode). The caller must pass TO through protect_from_queue
2393 before calling. ALIGN is maximum alignment we can assume. */
2396 clear_by_pieces (to
, len
, align
)
2398 unsigned HOST_WIDE_INT len
;
2401 struct store_by_pieces data
;
2403 data
.constfun
= clear_by_pieces_1
;
2404 data
.constfundata
= NULL
;
2407 store_by_pieces_1 (&data
, align
);
2410 /* Callback routine for clear_by_pieces.
2411 Return const0_rtx unconditionally. */
2414 clear_by_pieces_1 (data
, offset
, mode
)
2415 PTR data ATTRIBUTE_UNUSED
;
2416 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2417 enum machine_mode mode ATTRIBUTE_UNUSED
;
2422 /* Subroutine of clear_by_pieces and store_by_pieces.
2423 Generate several move instructions to store LEN bytes of block TO. (A MEM
2424 rtx with BLKmode). The caller must pass TO through protect_from_queue
2425 before calling. ALIGN is maximum alignment we can assume. */
2428 store_by_pieces_1 (data
, align
)
2429 struct store_by_pieces
*data
;
2432 rtx to_addr
= XEXP (data
->to
, 0);
2433 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2434 enum machine_mode mode
= VOIDmode
, tmode
;
2435 enum insn_code icode
;
2438 data
->to_addr
= to_addr
;
2440 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2441 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2443 data
->explicit_inc_to
= 0;
2445 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2447 data
->offset
= data
->len
;
2449 /* If storing requires more than two move insns,
2450 copy addresses to registers (to make displacements shorter)
2451 and use post-increment if available. */
2452 if (!data
->autinc_to
2453 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2455 /* Determine the main mode we'll be using. */
2456 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2457 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2458 if (GET_MODE_SIZE (tmode
) < max_size
)
2461 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2463 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2464 data
->autinc_to
= 1;
2465 data
->explicit_inc_to
= -1;
2468 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2469 && ! data
->autinc_to
)
2471 data
->to_addr
= copy_addr_to_reg (to_addr
);
2472 data
->autinc_to
= 1;
2473 data
->explicit_inc_to
= 1;
2476 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2477 data
->to_addr
= copy_addr_to_reg (to_addr
);
2480 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2481 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2482 align
= MOVE_MAX
* BITS_PER_UNIT
;
2484 /* First store what we can in the largest integer mode, then go to
2485 successively smaller modes. */
2487 while (max_size
> 1)
2489 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2490 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2491 if (GET_MODE_SIZE (tmode
) < max_size
)
2494 if (mode
== VOIDmode
)
2497 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2498 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2499 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2501 max_size
= GET_MODE_SIZE (mode
);
2504 /* The code above should have handled everything. */
2509 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2510 with move instructions for mode MODE. GENFUN is the gen_... function
2511 to make a move insn for that mode. DATA has all the other info. */
2514 store_by_pieces_2 (genfun
, mode
, data
)
2515 rtx (*genfun
) PARAMS ((rtx
, ...));
2516 enum machine_mode mode
;
2517 struct store_by_pieces
*data
;
2519 unsigned int size
= GET_MODE_SIZE (mode
);
2522 while (data
->len
>= size
)
2525 data
->offset
-= size
;
2527 if (data
->autinc_to
)
2529 to1
= replace_equiv_address (data
->to
, data
->to_addr
);
2530 to1
= adjust_address (to1
, mode
, 0);
2533 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2535 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2536 emit_insn (gen_add2_insn (data
->to_addr
,
2537 GEN_INT (-(HOST_WIDE_INT
) size
)));
2539 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2540 emit_insn ((*genfun
) (to1
, cst
));
2542 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2543 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2545 if (! data
->reverse
)
2546 data
->offset
+= size
;
2552 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2553 its length in bytes. */
2556 clear_storage (object
, size
)
2560 #ifdef TARGET_MEM_FUNCTIONS
2562 tree call_expr
, arg_list
;
2565 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2566 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2568 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2569 just move a zero. Otherwise, do this a piece at a time. */
2570 if (GET_MODE (object
) != BLKmode
2571 && GET_CODE (size
) == CONST_INT
2572 && GET_MODE_SIZE (GET_MODE (object
)) == (unsigned int) INTVAL (size
))
2573 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2576 object
= protect_from_queue (object
, 1);
2577 size
= protect_from_queue (size
, 0);
2579 if (GET_CODE (size
) == CONST_INT
2580 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2581 clear_by_pieces (object
, INTVAL (size
), align
);
2584 /* Try the most limited insn first, because there's no point
2585 including more than one in the machine description unless
2586 the more limited one has some advantage. */
2588 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2589 enum machine_mode mode
;
2591 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2592 mode
= GET_MODE_WIDER_MODE (mode
))
2594 enum insn_code code
= clrstr_optab
[(int) mode
];
2595 insn_operand_predicate_fn pred
;
2597 if (code
!= CODE_FOR_nothing
2598 /* We don't need MODE to be narrower than
2599 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2600 the mode mask, as it is returned by the macro, it will
2601 definitely be less than the actual mode mask. */
2602 && ((GET_CODE (size
) == CONST_INT
2603 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2604 <= (GET_MODE_MASK (mode
) >> 1)))
2605 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2606 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2607 || (*pred
) (object
, BLKmode
))
2608 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2609 || (*pred
) (opalign
, VOIDmode
)))
2612 rtx last
= get_last_insn ();
2615 op1
= convert_to_mode (mode
, size
, 1);
2616 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2617 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2618 op1
= copy_to_mode_reg (mode
, op1
);
2620 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2627 delete_insns_since (last
);
2631 /* OBJECT or SIZE may have been passed through protect_from_queue.
2633 It is unsafe to save the value generated by protect_from_queue
2634 and reuse it later. Consider what happens if emit_queue is
2635 called before the return value from protect_from_queue is used.
2637 Expansion of the CALL_EXPR below will call emit_queue before
2638 we are finished emitting RTL for argument setup. So if we are
2639 not careful we could get the wrong value for an argument.
2641 To avoid this problem we go ahead and emit code to copy OBJECT
2642 and SIZE into new pseudos. We can then place those new pseudos
2643 into an RTL_EXPR and use them later, even after a call to
2646 Note this is not strictly needed for library calls since they
2647 do not call emit_queue before loading their arguments. However,
2648 we may need to have library calls call emit_queue in the future
2649 since failing to do so could cause problems for targets which
2650 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2651 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2653 #ifdef TARGET_MEM_FUNCTIONS
2654 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2656 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2657 TREE_UNSIGNED (integer_type_node
));
2658 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2661 #ifdef TARGET_MEM_FUNCTIONS
2662 /* It is incorrect to use the libcall calling conventions to call
2663 memset in this context.
2665 This could be a user call to memset and the user may wish to
2666 examine the return value from memset.
2668 For targets where libcalls and normal calls have different
2669 conventions for returning pointers, we could end up generating
2672 So instead of using a libcall sequence we build up a suitable
2673 CALL_EXPR and expand the call in the normal fashion. */
2674 if (fn
== NULL_TREE
)
2678 /* This was copied from except.c, I don't know if all this is
2679 necessary in this context or not. */
2680 fn
= get_identifier ("memset");
2681 fntype
= build_pointer_type (void_type_node
);
2682 fntype
= build_function_type (fntype
, NULL_TREE
);
2683 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2684 ggc_add_tree_root (&fn
, 1);
2685 DECL_EXTERNAL (fn
) = 1;
2686 TREE_PUBLIC (fn
) = 1;
2687 DECL_ARTIFICIAL (fn
) = 1;
2688 TREE_NOTHROW (fn
) = 1;
2689 make_decl_rtl (fn
, NULL
);
2690 assemble_external (fn
);
2693 /* We need to make an argument list for the function call.
2695 memset has three arguments, the first is a void * addresses, the
2696 second an integer with the initialization value, the last is a
2697 size_t byte count for the copy. */
2699 = build_tree_list (NULL_TREE
,
2700 make_tree (build_pointer_type (void_type_node
),
2702 TREE_CHAIN (arg_list
)
2703 = build_tree_list (NULL_TREE
,
2704 make_tree (integer_type_node
, const0_rtx
));
2705 TREE_CHAIN (TREE_CHAIN (arg_list
))
2706 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2707 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2709 /* Now we have to build up the CALL_EXPR itself. */
2710 call_expr
= build1 (ADDR_EXPR
,
2711 build_pointer_type (TREE_TYPE (fn
)), fn
);
2712 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2713 call_expr
, arg_list
, NULL_TREE
);
2714 TREE_SIDE_EFFECTS (call_expr
) = 1;
2716 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2718 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2719 VOIDmode
, 2, object
, Pmode
, size
,
2720 TYPE_MODE (integer_type_node
));
2723 /* If we are initializing a readonly value, show the above call
2724 clobbered it. Otherwise, a load from it may erroneously be
2725 hoisted from a loop. */
2726 if (RTX_UNCHANGING_P (object
))
2727 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2734 /* Generate code to copy Y into X.
2735 Both Y and X must have the same mode, except that
2736 Y can be a constant with VOIDmode.
2737 This mode cannot be BLKmode; use emit_block_move for that.
2739 Return the last instruction emitted. */
2742 emit_move_insn (x
, y
)
2745 enum machine_mode mode
= GET_MODE (x
);
2746 rtx y_cst
= NULL_RTX
;
2749 x
= protect_from_queue (x
, 1);
2750 y
= protect_from_queue (y
, 0);
2752 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2755 /* Never force constant_p_rtx to memory. */
2756 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2758 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2761 y
= force_const_mem (mode
, y
);
2764 /* If X or Y are memory references, verify that their addresses are valid
2766 if (GET_CODE (x
) == MEM
2767 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2768 && ! push_operand (x
, GET_MODE (x
)))
2770 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2771 x
= validize_mem (x
);
2773 if (GET_CODE (y
) == MEM
2774 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2776 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2777 y
= validize_mem (y
);
2779 if (mode
== BLKmode
)
2782 last_insn
= emit_move_insn_1 (x
, y
);
2784 if (y_cst
&& GET_CODE (x
) == REG
)
2785 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2790 /* Low level part of emit_move_insn.
2791 Called just like emit_move_insn, but assumes X and Y
2792 are basically valid. */
2795 emit_move_insn_1 (x
, y
)
2798 enum machine_mode mode
= GET_MODE (x
);
2799 enum machine_mode submode
;
2800 enum mode_class
class = GET_MODE_CLASS (mode
);
2803 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2806 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2808 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2810 /* Expand complex moves by moving real part and imag part, if possible. */
2811 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2812 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2814 (class == MODE_COMPLEX_INT
2815 ? MODE_INT
: MODE_FLOAT
),
2817 && (mov_optab
->handlers
[(int) submode
].insn_code
2818 != CODE_FOR_nothing
))
2820 /* Don't split destination if it is a stack push. */
2821 int stack
= push_operand (x
, GET_MODE (x
));
2823 #ifdef PUSH_ROUNDING
2824 /* In case we output to the stack, but the size is smaller machine can
2825 push exactly, we need to use move instructions. */
2827 && PUSH_ROUNDING (GET_MODE_SIZE (submode
)) != GET_MODE_SIZE (submode
))
2830 int offset1
, offset2
;
2832 /* Do not use anti_adjust_stack, since we don't want to update
2833 stack_pointer_delta. */
2834 temp
= expand_binop (Pmode
,
2835 #ifdef STACK_GROWS_DOWNWARD
2842 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))),
2846 if (temp
!= stack_pointer_rtx
)
2847 emit_move_insn (stack_pointer_rtx
, temp
);
2848 #ifdef STACK_GROWS_DOWNWARD
2850 offset2
= GET_MODE_SIZE (submode
);
2852 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2853 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2854 + GET_MODE_SIZE (submode
));
2856 emit_move_insn (change_address (x
, submode
,
2857 gen_rtx_PLUS (Pmode
,
2859 GEN_INT (offset1
))),
2860 gen_realpart (submode
, y
));
2861 emit_move_insn (change_address (x
, submode
,
2862 gen_rtx_PLUS (Pmode
,
2864 GEN_INT (offset2
))),
2865 gen_imagpart (submode
, y
));
2869 /* If this is a stack, push the highpart first, so it
2870 will be in the argument order.
2872 In that case, change_address is used only to convert
2873 the mode, not to change the address. */
2876 /* Note that the real part always precedes the imag part in memory
2877 regardless of machine's endianness. */
2878 #ifdef STACK_GROWS_DOWNWARD
2879 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2880 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2881 gen_imagpart (submode
, y
)));
2882 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2883 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2884 gen_realpart (submode
, y
)));
2886 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2887 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2888 gen_realpart (submode
, y
)));
2889 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2890 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2891 gen_imagpart (submode
, y
)));
2896 rtx realpart_x
, realpart_y
;
2897 rtx imagpart_x
, imagpart_y
;
2899 /* If this is a complex value with each part being smaller than a
2900 word, the usual calling sequence will likely pack the pieces into
2901 a single register. Unfortunately, SUBREG of hard registers only
2902 deals in terms of words, so we have a problem converting input
2903 arguments to the CONCAT of two registers that is used elsewhere
2904 for complex values. If this is before reload, we can copy it into
2905 memory and reload. FIXME, we should see about using extract and
2906 insert on integer registers, but complex short and complex char
2907 variables should be rarely used. */
2908 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2909 && (reload_in_progress
| reload_completed
) == 0)
2911 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2912 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2914 if (packed_dest_p
|| packed_src_p
)
2916 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2917 ? MODE_FLOAT
: MODE_INT
);
2919 enum machine_mode reg_mode
2920 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2922 if (reg_mode
!= BLKmode
)
2924 rtx mem
= assign_stack_temp (reg_mode
,
2925 GET_MODE_SIZE (mode
), 0);
2926 rtx cmem
= adjust_address (mem
, mode
, 0);
2929 = N_("function using short complex types cannot be inline");
2933 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2934 emit_move_insn_1 (cmem
, y
);
2935 return emit_move_insn_1 (sreg
, mem
);
2939 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2940 emit_move_insn_1 (mem
, sreg
);
2941 return emit_move_insn_1 (x
, cmem
);
2947 realpart_x
= gen_realpart (submode
, x
);
2948 realpart_y
= gen_realpart (submode
, y
);
2949 imagpart_x
= gen_imagpart (submode
, x
);
2950 imagpart_y
= gen_imagpart (submode
, y
);
2952 /* Show the output dies here. This is necessary for SUBREGs
2953 of pseudos since we cannot track their lifetimes correctly;
2954 hard regs shouldn't appear here except as return values.
2955 We never want to emit such a clobber after reload. */
2957 && ! (reload_in_progress
|| reload_completed
)
2958 && (GET_CODE (realpart_x
) == SUBREG
2959 || GET_CODE (imagpart_x
) == SUBREG
))
2961 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2964 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2965 (realpart_x
, realpart_y
));
2966 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2967 (imagpart_x
, imagpart_y
));
2970 return get_last_insn ();
2973 /* This will handle any multi-word mode that lacks a move_insn pattern.
2974 However, you will get better code if you define such patterns,
2975 even if they must turn into multiple assembler instructions. */
2976 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2982 #ifdef PUSH_ROUNDING
2984 /* If X is a push on the stack, do the push now and replace
2985 X with a reference to the stack pointer. */
2986 if (push_operand (x
, GET_MODE (x
)))
2991 /* Do not use anti_adjust_stack, since we don't want to update
2992 stack_pointer_delta. */
2993 temp
= expand_binop (Pmode
,
2994 #ifdef STACK_GROWS_DOWNWARD
3001 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))),
3005 if (temp
!= stack_pointer_rtx
)
3006 emit_move_insn (stack_pointer_rtx
, temp
);
3008 code
= GET_CODE (XEXP (x
, 0));
3009 /* Just hope that small offsets off SP are OK. */
3010 if (code
== POST_INC
)
3011 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3012 GEN_INT (-(HOST_WIDE_INT
)
3013 GET_MODE_SIZE (GET_MODE (x
))));
3014 else if (code
== POST_DEC
)
3015 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3016 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3018 temp
= stack_pointer_rtx
;
3020 x
= change_address (x
, VOIDmode
, temp
);
3024 /* If we are in reload, see if either operand is a MEM whose address
3025 is scheduled for replacement. */
3026 if (reload_in_progress
&& GET_CODE (x
) == MEM
3027 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3028 x
= replace_equiv_address_nv (x
, inner
);
3029 if (reload_in_progress
&& GET_CODE (y
) == MEM
3030 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3031 y
= replace_equiv_address_nv (y
, inner
);
3037 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3040 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3041 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3043 /* If we can't get a part of Y, put Y into memory if it is a
3044 constant. Otherwise, force it into a register. If we still
3045 can't get a part of Y, abort. */
3046 if (ypart
== 0 && CONSTANT_P (y
))
3048 y
= force_const_mem (mode
, y
);
3049 ypart
= operand_subword (y
, i
, 1, mode
);
3051 else if (ypart
== 0)
3052 ypart
= operand_subword_force (y
, i
, mode
);
3054 if (xpart
== 0 || ypart
== 0)
3057 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3059 last_insn
= emit_move_insn (xpart
, ypart
);
3062 seq
= gen_sequence ();
3065 /* Show the output dies here. This is necessary for SUBREGs
3066 of pseudos since we cannot track their lifetimes correctly;
3067 hard regs shouldn't appear here except as return values.
3068 We never want to emit such a clobber after reload. */
3070 && ! (reload_in_progress
|| reload_completed
)
3071 && need_clobber
!= 0)
3073 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3084 /* Pushing data onto the stack. */
3086 /* Push a block of length SIZE (perhaps variable)
3087 and return an rtx to address the beginning of the block.
3088 Note that it is not possible for the value returned to be a QUEUED.
3089 The value may be virtual_outgoing_args_rtx.
3091 EXTRA is the number of bytes of padding to push in addition to SIZE.
3092 BELOW nonzero means this padding comes at low addresses;
3093 otherwise, the padding comes at high addresses. */
3096 push_block (size
, extra
, below
)
3102 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3103 if (CONSTANT_P (size
))
3104 anti_adjust_stack (plus_constant (size
, extra
));
3105 else if (GET_CODE (size
) == REG
&& extra
== 0)
3106 anti_adjust_stack (size
);
3109 temp
= copy_to_mode_reg (Pmode
, size
);
3111 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3112 temp
, 0, OPTAB_LIB_WIDEN
);
3113 anti_adjust_stack (temp
);
3116 #ifndef STACK_GROWS_DOWNWARD
3122 temp
= virtual_outgoing_args_rtx
;
3123 if (extra
!= 0 && below
)
3124 temp
= plus_constant (temp
, extra
);
3128 if (GET_CODE (size
) == CONST_INT
)
3129 temp
= plus_constant (virtual_outgoing_args_rtx
,
3130 -INTVAL (size
) - (below
? 0 : extra
));
3131 else if (extra
!= 0 && !below
)
3132 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3133 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3135 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3136 negate_rtx (Pmode
, size
));
3139 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3143 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3144 block of SIZE bytes. */
3147 get_push_address (size
)
3152 if (STACK_PUSH_CODE
== POST_DEC
)
3153 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3154 else if (STACK_PUSH_CODE
== POST_INC
)
3155 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3157 temp
= stack_pointer_rtx
;
3159 return copy_to_reg (temp
);
3162 #ifdef PUSH_ROUNDING
3164 /* Emit single push insn. */
3167 emit_single_push_insn (mode
, x
, type
)
3169 enum machine_mode mode
;
3173 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3175 enum insn_code icode
;
3176 insn_operand_predicate_fn pred
;
3178 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3179 /* If there is push pattern, use it. Otherwise try old way of throwing
3180 MEM representing push operation to move expander. */
3181 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3182 if (icode
!= CODE_FOR_nothing
)
3184 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3185 && !((*pred
) (x
, mode
))))
3186 x
= force_reg (mode
, x
);
3187 emit_insn (GEN_FCN (icode
) (x
));
3190 if (GET_MODE_SIZE (mode
) == rounded_size
)
3191 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3194 #ifdef STACK_GROWS_DOWNWARD
3195 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3196 GEN_INT (-(HOST_WIDE_INT
)rounded_size
));
3198 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3199 GEN_INT (rounded_size
));
3201 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3204 dest
= gen_rtx_MEM (mode
, dest_addr
);
3208 set_mem_attributes (dest
, type
, 1);
3209 /* Function incoming arguments may overlap with sibling call
3210 outgoing arguments and we cannot allow reordering of reads
3211 from function arguments with stores to outgoing arguments
3212 of sibling calls. */
3213 set_mem_alias_set (dest
, 0);
3215 emit_move_insn (dest
, x
);
3219 /* Generate code to push X onto the stack, assuming it has mode MODE and
3221 MODE is redundant except when X is a CONST_INT (since they don't
3223 SIZE is an rtx for the size of data to be copied (in bytes),
3224 needed only if X is BLKmode.
3226 ALIGN (in bits) is maximum alignment we can assume.
3228 If PARTIAL and REG are both nonzero, then copy that many of the first
3229 words of X into registers starting with REG, and push the rest of X.
3230 The amount of space pushed is decreased by PARTIAL words,
3231 rounded *down* to a multiple of PARM_BOUNDARY.
3232 REG must be a hard register in this case.
3233 If REG is zero but PARTIAL is not, take any all others actions for an
3234 argument partially in registers, but do not actually load any
3237 EXTRA is the amount in bytes of extra space to leave next to this arg.
3238 This is ignored if an argument block has already been allocated.
3240 On a machine that lacks real push insns, ARGS_ADDR is the address of
3241 the bottom of the argument block for this call. We use indexing off there
3242 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3243 argument block has not been preallocated.
3245 ARGS_SO_FAR is the size of args previously pushed for this call.
3247 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3248 for arguments passed in registers. If nonzero, it will be the number
3249 of bytes required. */
3252 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3253 args_addr
, args_so_far
, reg_parm_stack_space
,
3256 enum machine_mode mode
;
3265 int reg_parm_stack_space
;
3269 enum direction stack_direction
3270 #ifdef STACK_GROWS_DOWNWARD
3276 /* Decide where to pad the argument: `downward' for below,
3277 `upward' for above, or `none' for don't pad it.
3278 Default is below for small data on big-endian machines; else above. */
3279 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3281 /* Invert direction if stack is post-decrement.
3283 if (STACK_PUSH_CODE
== POST_DEC
)
3284 if (where_pad
!= none
)
3285 where_pad
= (where_pad
== downward
? upward
: downward
);
3287 xinner
= x
= protect_from_queue (x
, 0);
3289 if (mode
== BLKmode
)
3291 /* Copy a block into the stack, entirely or partially. */
3294 int used
= partial
* UNITS_PER_WORD
;
3295 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3303 /* USED is now the # of bytes we need not copy to the stack
3304 because registers will take care of them. */
3307 xinner
= adjust_address (xinner
, BLKmode
, used
);
3309 /* If the partial register-part of the arg counts in its stack size,
3310 skip the part of stack space corresponding to the registers.
3311 Otherwise, start copying to the beginning of the stack space,
3312 by setting SKIP to 0. */
3313 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3315 #ifdef PUSH_ROUNDING
3316 /* Do it with several push insns if that doesn't take lots of insns
3317 and if there is no difficulty with push insns that skip bytes
3318 on the stack for alignment purposes. */
3321 && GET_CODE (size
) == CONST_INT
3323 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3324 /* Here we avoid the case of a structure whose weak alignment
3325 forces many pushes of a small amount of data,
3326 and such small pushes do rounding that causes trouble. */
3327 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3328 || align
>= BIGGEST_ALIGNMENT
3329 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3330 == (align
/ BITS_PER_UNIT
)))
3331 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3333 /* Push padding now if padding above and stack grows down,
3334 or if padding below and stack grows up.
3335 But if space already allocated, this has already been done. */
3336 if (extra
&& args_addr
== 0
3337 && where_pad
!= none
&& where_pad
!= stack_direction
)
3338 anti_adjust_stack (GEN_INT (extra
));
3340 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3342 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3346 in_check_memory_usage
= 1;
3347 temp
= get_push_address (INTVAL (size
) - used
);
3348 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3349 emit_library_call (chkr_copy_bitmap_libfunc
,
3350 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3351 Pmode
, XEXP (xinner
, 0), Pmode
,
3352 GEN_INT (INTVAL (size
) - used
),
3353 TYPE_MODE (sizetype
));
3355 emit_library_call (chkr_set_right_libfunc
,
3356 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3357 Pmode
, GEN_INT (INTVAL (size
) - used
),
3358 TYPE_MODE (sizetype
),
3359 GEN_INT (MEMORY_USE_RW
),
3360 TYPE_MODE (integer_type_node
));
3361 in_check_memory_usage
= 0;
3365 #endif /* PUSH_ROUNDING */
3369 /* Otherwise make space on the stack and copy the data
3370 to the address of that space. */
3372 /* Deduct words put into registers from the size we must copy. */
3375 if (GET_CODE (size
) == CONST_INT
)
3376 size
= GEN_INT (INTVAL (size
) - used
);
3378 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3379 GEN_INT (used
), NULL_RTX
, 0,
3383 /* Get the address of the stack space.
3384 In this case, we do not deal with EXTRA separately.
3385 A single stack adjust will do. */
3388 temp
= push_block (size
, extra
, where_pad
== downward
);
3391 else if (GET_CODE (args_so_far
) == CONST_INT
)
3392 temp
= memory_address (BLKmode
,
3393 plus_constant (args_addr
,
3394 skip
+ INTVAL (args_so_far
)));
3396 temp
= memory_address (BLKmode
,
3397 plus_constant (gen_rtx_PLUS (Pmode
,
3401 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3403 in_check_memory_usage
= 1;
3404 target
= copy_to_reg (temp
);
3405 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3406 emit_library_call (chkr_copy_bitmap_libfunc
,
3407 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3409 XEXP (xinner
, 0), Pmode
,
3410 size
, TYPE_MODE (sizetype
));
3412 emit_library_call (chkr_set_right_libfunc
,
3413 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3415 size
, TYPE_MODE (sizetype
),
3416 GEN_INT (MEMORY_USE_RW
),
3417 TYPE_MODE (integer_type_node
));
3418 in_check_memory_usage
= 0;
3421 target
= gen_rtx_MEM (BLKmode
, temp
);
3425 set_mem_attributes (target
, type
, 1);
3426 /* Function incoming arguments may overlap with sibling call
3427 outgoing arguments and we cannot allow reordering of reads
3428 from function arguments with stores to outgoing arguments
3429 of sibling calls. */
3430 set_mem_alias_set (target
, 0);
3433 set_mem_align (target
, align
);
3435 /* TEMP is the address of the block. Copy the data there. */
3436 if (GET_CODE (size
) == CONST_INT
3437 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3439 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3444 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3445 enum machine_mode mode
;
3447 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3449 mode
= GET_MODE_WIDER_MODE (mode
))
3451 enum insn_code code
= movstr_optab
[(int) mode
];
3452 insn_operand_predicate_fn pred
;
3454 if (code
!= CODE_FOR_nothing
3455 && ((GET_CODE (size
) == CONST_INT
3456 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3457 <= (GET_MODE_MASK (mode
) >> 1)))
3458 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3459 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3460 || ((*pred
) (target
, BLKmode
)))
3461 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3462 || ((*pred
) (xinner
, BLKmode
)))
3463 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3464 || ((*pred
) (opalign
, VOIDmode
))))
3466 rtx op2
= convert_to_mode (mode
, size
, 1);
3467 rtx last
= get_last_insn ();
3470 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3471 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3472 op2
= copy_to_mode_reg (mode
, op2
);
3474 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3482 delete_insns_since (last
);
3487 if (!ACCUMULATE_OUTGOING_ARGS
)
3489 /* If the source is referenced relative to the stack pointer,
3490 copy it to another register to stabilize it. We do not need
3491 to do this if we know that we won't be changing sp. */
3493 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3494 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3495 temp
= copy_to_reg (temp
);
3498 /* Make inhibit_defer_pop nonzero around the library call
3499 to force it to pop the bcopy-arguments right away. */
3501 #ifdef TARGET_MEM_FUNCTIONS
3502 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3503 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3504 convert_to_mode (TYPE_MODE (sizetype
),
3505 size
, TREE_UNSIGNED (sizetype
)),
3506 TYPE_MODE (sizetype
));
3508 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3509 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3510 convert_to_mode (TYPE_MODE (integer_type_node
),
3512 TREE_UNSIGNED (integer_type_node
)),
3513 TYPE_MODE (integer_type_node
));
3518 else if (partial
> 0)
3520 /* Scalar partly in registers. */
3522 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3525 /* # words of start of argument
3526 that we must make space for but need not store. */
3527 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3528 int args_offset
= INTVAL (args_so_far
);
3531 /* Push padding now if padding above and stack grows down,
3532 or if padding below and stack grows up.
3533 But if space already allocated, this has already been done. */
3534 if (extra
&& args_addr
== 0
3535 && where_pad
!= none
&& where_pad
!= stack_direction
)
3536 anti_adjust_stack (GEN_INT (extra
));
3538 /* If we make space by pushing it, we might as well push
3539 the real data. Otherwise, we can leave OFFSET nonzero
3540 and leave the space uninitialized. */
3544 /* Now NOT_STACK gets the number of words that we don't need to
3545 allocate on the stack. */
3546 not_stack
= partial
- offset
;
3548 /* If the partial register-part of the arg counts in its stack size,
3549 skip the part of stack space corresponding to the registers.
3550 Otherwise, start copying to the beginning of the stack space,
3551 by setting SKIP to 0. */
3552 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3554 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3555 x
= validize_mem (force_const_mem (mode
, x
));
3557 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3558 SUBREGs of such registers are not allowed. */
3559 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3560 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3561 x
= copy_to_reg (x
);
3563 /* Loop over all the words allocated on the stack for this arg. */
3564 /* We can do it by words, because any scalar bigger than a word
3565 has a size a multiple of a word. */
3566 #ifndef PUSH_ARGS_REVERSED
3567 for (i
= not_stack
; i
< size
; i
++)
3569 for (i
= size
- 1; i
>= not_stack
; i
--)
3571 if (i
>= not_stack
+ offset
)
3572 emit_push_insn (operand_subword_force (x
, i
, mode
),
3573 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3575 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3577 reg_parm_stack_space
, alignment_pad
);
3582 rtx target
= NULL_RTX
;
3585 /* Push padding now if padding above and stack grows down,
3586 or if padding below and stack grows up.
3587 But if space already allocated, this has already been done. */
3588 if (extra
&& args_addr
== 0
3589 && where_pad
!= none
&& where_pad
!= stack_direction
)
3590 anti_adjust_stack (GEN_INT (extra
));
3592 #ifdef PUSH_ROUNDING
3593 if (args_addr
== 0 && PUSH_ARGS
)
3594 emit_single_push_insn (mode
, x
, type
);
3598 if (GET_CODE (args_so_far
) == CONST_INT
)
3600 = memory_address (mode
,
3601 plus_constant (args_addr
,
3602 INTVAL (args_so_far
)));
3604 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3607 dest
= gen_rtx_MEM (mode
, addr
);
3610 set_mem_attributes (dest
, type
, 1);
3611 /* Function incoming arguments may overlap with sibling call
3612 outgoing arguments and we cannot allow reordering of reads
3613 from function arguments with stores to outgoing arguments
3614 of sibling calls. */
3615 set_mem_alias_set (dest
, 0);
3618 emit_move_insn (dest
, x
);
3622 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3624 in_check_memory_usage
= 1;
3626 target
= get_push_address (GET_MODE_SIZE (mode
));
3628 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3629 emit_library_call (chkr_copy_bitmap_libfunc
,
3630 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3631 Pmode
, XEXP (x
, 0), Pmode
,
3632 GEN_INT (GET_MODE_SIZE (mode
)),
3633 TYPE_MODE (sizetype
));
3635 emit_library_call (chkr_set_right_libfunc
,
3636 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3637 Pmode
, GEN_INT (GET_MODE_SIZE (mode
)),
3638 TYPE_MODE (sizetype
),
3639 GEN_INT (MEMORY_USE_RW
),
3640 TYPE_MODE (integer_type_node
));
3641 in_check_memory_usage
= 0;
3646 /* If part should go in registers, copy that part
3647 into the appropriate registers. Do this now, at the end,
3648 since mem-to-mem copies above may do function calls. */
3649 if (partial
> 0 && reg
!= 0)
3651 /* Handle calls that pass values in multiple non-contiguous locations.
3652 The Irix 6 ABI has examples of this. */
3653 if (GET_CODE (reg
) == PARALLEL
)
3654 emit_group_load (reg
, x
, -1); /* ??? size? */
3656 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3659 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3660 anti_adjust_stack (GEN_INT (extra
));
3662 if (alignment_pad
&& args_addr
== 0)
3663 anti_adjust_stack (alignment_pad
);
3666 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3674 /* Only registers can be subtargets. */
3675 || GET_CODE (x
) != REG
3676 /* If the register is readonly, it can't be set more than once. */
3677 || RTX_UNCHANGING_P (x
)
3678 /* Don't use hard regs to avoid extending their life. */
3679 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3680 /* Avoid subtargets inside loops,
3681 since they hide some invariant expressions. */
3682 || preserve_subexpressions_p ())
3686 /* Expand an assignment that stores the value of FROM into TO.
3687 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3688 (This may contain a QUEUED rtx;
3689 if the value is constant, this rtx is a constant.)
3690 Otherwise, the returned value is NULL_RTX.
3692 SUGGEST_REG is no longer actually used.
3693 It used to mean, copy the value through a register
3694 and return that register, if that is possible.
3695 We now use WANT_VALUE to decide whether to do this. */
3698 expand_assignment (to
, from
, want_value
, suggest_reg
)
3701 int suggest_reg ATTRIBUTE_UNUSED
;
3706 /* Don't crash if the lhs of the assignment was erroneous. */
3708 if (TREE_CODE (to
) == ERROR_MARK
)
3710 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3711 return want_value
? result
: NULL_RTX
;
3714 /* Assignment of a structure component needs special treatment
3715 if the structure component's rtx is not simply a MEM.
3716 Assignment of an array element at a constant index, and assignment of
3717 an array element in an unaligned packed structure field, has the same
3720 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3721 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
3723 enum machine_mode mode1
;
3724 HOST_WIDE_INT bitsize
, bitpos
;
3729 unsigned int alignment
;
3732 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3733 &unsignedp
, &volatilep
, &alignment
);
3735 /* If we are going to use store_bit_field and extract_bit_field,
3736 make sure to_rtx will be safe for multiple use. */
3738 if (mode1
== VOIDmode
&& want_value
)
3739 tem
= stabilize_reference (tem
);
3741 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3744 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3746 if (GET_CODE (to_rtx
) != MEM
)
3749 if (GET_MODE (offset_rtx
) != ptr_mode
)
3750 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3752 #ifdef POINTERS_EXTEND_UNSIGNED
3753 if (GET_MODE (offset_rtx
) != Pmode
)
3754 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
3757 /* A constant address in TO_RTX can have VOIDmode, we must not try
3758 to call force_reg for that case. Avoid that case. */
3759 if (GET_CODE (to_rtx
) == MEM
3760 && GET_MODE (to_rtx
) == BLKmode
3761 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3763 && (bitpos
% bitsize
) == 0
3764 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3765 && alignment
== GET_MODE_ALIGNMENT (mode1
))
3768 = adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3770 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3773 to_rtx
= (replace_equiv_address
3774 (to_rtx
, force_reg (GET_MODE (XEXP (temp
, 0)),
3779 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3780 highest_pow2_factor (offset
));
3785 if (GET_CODE (to_rtx
) == MEM
)
3787 /* When the offset is zero, to_rtx is the address of the
3788 structure we are storing into, and hence may be shared.
3789 We must make a new MEM before setting the volatile bit. */
3791 to_rtx
= copy_rtx (to_rtx
);
3793 MEM_VOLATILE_P (to_rtx
) = 1;
3795 #if 0 /* This was turned off because, when a field is volatile
3796 in an object which is not volatile, the object may be in a register,
3797 and then we would abort over here. */
3803 if (TREE_CODE (to
) == COMPONENT_REF
3804 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3807 to_rtx
= copy_rtx (to_rtx
);
3809 RTX_UNCHANGING_P (to_rtx
) = 1;
3812 /* Check the access. */
3813 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3818 enum machine_mode best_mode
;
3820 best_mode
= get_best_mode (bitsize
, bitpos
,
3821 TYPE_ALIGN (TREE_TYPE (tem
)),
3823 if (best_mode
== VOIDmode
)
3826 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3827 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3828 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3829 size
*= GET_MODE_SIZE (best_mode
);
3831 /* Check the access right of the pointer. */
3832 in_check_memory_usage
= 1;
3834 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
3835 VOIDmode
, 3, to_addr
, Pmode
,
3836 GEN_INT (size
), TYPE_MODE (sizetype
),
3837 GEN_INT (MEMORY_USE_WO
),
3838 TYPE_MODE (integer_type_node
));
3839 in_check_memory_usage
= 0;
3842 /* If this is a varying-length object, we must get the address of
3843 the source and do an explicit block move. */
3846 unsigned int from_align
;
3847 rtx from_rtx
= expand_expr_unaligned (from
, &from_align
);
3849 = adjust_address (to_rtx
, BLKmode
, bitpos
/ BITS_PER_UNIT
);
3851 emit_block_move (inner_to_rtx
, from_rtx
, expr_size (from
));
3859 if (! can_address_p (to
))
3861 to_rtx
= copy_rtx (to_rtx
);
3862 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3865 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3867 /* Spurious cast for HPUX compiler. */
3868 ? ((enum machine_mode
)
3869 TYPE_MODE (TREE_TYPE (to
)))
3871 unsignedp
, int_size_in_bytes (TREE_TYPE (tem
)),
3872 get_alias_set (to
));
3874 preserve_temp_slots (result
);
3878 /* If the value is meaningful, convert RESULT to the proper mode.
3879 Otherwise, return nothing. */
3880 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3881 TYPE_MODE (TREE_TYPE (from
)),
3883 TREE_UNSIGNED (TREE_TYPE (to
)))
3888 /* If the rhs is a function call and its value is not an aggregate,
3889 call the function before we start to compute the lhs.
3890 This is needed for correct code for cases such as
3891 val = setjmp (buf) on machines where reference to val
3892 requires loading up part of an address in a separate insn.
3894 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3895 since it might be a promoted variable where the zero- or sign- extension
3896 needs to be done. Handling this in the normal way is safe because no
3897 computation is done before the call. */
3898 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3899 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3900 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3901 && GET_CODE (DECL_RTL (to
)) == REG
))
3906 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3908 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3910 /* Handle calls that return values in multiple non-contiguous locations.
3911 The Irix 6 ABI has examples of this. */
3912 if (GET_CODE (to_rtx
) == PARALLEL
)
3913 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
3914 else if (GET_MODE (to_rtx
) == BLKmode
)
3915 emit_block_move (to_rtx
, value
, expr_size (from
));
3918 #ifdef POINTERS_EXTEND_UNSIGNED
3919 if (POINTER_TYPE_P (TREE_TYPE (to
))
3920 && GET_MODE (to_rtx
) != GET_MODE (value
))
3921 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3923 emit_move_insn (to_rtx
, value
);
3925 preserve_temp_slots (to_rtx
);
3928 return want_value
? to_rtx
: NULL_RTX
;
3931 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3932 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3935 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3937 /* Don't move directly into a return register. */
3938 if (TREE_CODE (to
) == RESULT_DECL
3939 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3944 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3946 if (GET_CODE (to_rtx
) == PARALLEL
)
3947 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
3949 emit_move_insn (to_rtx
, temp
);
3951 preserve_temp_slots (to_rtx
);
3954 return want_value
? to_rtx
: NULL_RTX
;
3957 /* In case we are returning the contents of an object which overlaps
3958 the place the value is being stored, use a safe function when copying
3959 a value through a pointer into a structure value return block. */
3960 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3961 && current_function_returns_struct
3962 && !current_function_returns_pcc_struct
)
3967 size
= expr_size (from
);
3968 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3969 EXPAND_MEMORY_USE_DONT
);
3971 /* Copy the rights of the bitmap. */
3972 if (current_function_check_memory_usage
)
3973 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
3974 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3975 XEXP (from_rtx
, 0), Pmode
,
3976 convert_to_mode (TYPE_MODE (sizetype
),
3977 size
, TREE_UNSIGNED (sizetype
)),
3978 TYPE_MODE (sizetype
));
3980 #ifdef TARGET_MEM_FUNCTIONS
3981 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3982 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3983 XEXP (from_rtx
, 0), Pmode
,
3984 convert_to_mode (TYPE_MODE (sizetype
),
3985 size
, TREE_UNSIGNED (sizetype
)),
3986 TYPE_MODE (sizetype
));
3988 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3989 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3990 XEXP (to_rtx
, 0), Pmode
,
3991 convert_to_mode (TYPE_MODE (integer_type_node
),
3992 size
, TREE_UNSIGNED (integer_type_node
)),
3993 TYPE_MODE (integer_type_node
));
3996 preserve_temp_slots (to_rtx
);
3999 return want_value
? to_rtx
: NULL_RTX
;
4002 /* Compute FROM and store the value in the rtx we got. */
4005 result
= store_expr (from
, to_rtx
, want_value
);
4006 preserve_temp_slots (result
);
4009 return want_value
? result
: NULL_RTX
;
4012 /* Generate code for computing expression EXP,
4013 and storing the value into TARGET.
4014 TARGET may contain a QUEUED rtx.
4016 If WANT_VALUE is nonzero, return a copy of the value
4017 not in TARGET, so that we can be sure to use the proper
4018 value in a containing expression even if TARGET has something
4019 else stored in it. If possible, we copy the value through a pseudo
4020 and return that pseudo. Or, if the value is constant, we try to
4021 return the constant. In some cases, we return a pseudo
4022 copied *from* TARGET.
4024 If the mode is BLKmode then we may return TARGET itself.
4025 It turns out that in BLKmode it doesn't cause a problem.
4026 because C has no operators that could combine two different
4027 assignments into the same BLKmode object with different values
4028 with no sequence point. Will other languages need this to
4031 If WANT_VALUE is 0, we return NULL, to make sure
4032 to catch quickly any cases where the caller uses the value
4033 and fails to set WANT_VALUE. */
4036 store_expr (exp
, target
, want_value
)
4042 int dont_return_target
= 0;
4043 int dont_store_target
= 0;
4045 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4047 /* Perform first part of compound expression, then assign from second
4049 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4051 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4053 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4055 /* For conditional expression, get safe form of the target. Then
4056 test the condition, doing the appropriate assignment on either
4057 side. This avoids the creation of unnecessary temporaries.
4058 For non-BLKmode, it is more efficient not to do this. */
4060 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4063 target
= protect_from_queue (target
, 1);
4065 do_pending_stack_adjust ();
4067 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4068 start_cleanup_deferral ();
4069 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4070 end_cleanup_deferral ();
4072 emit_jump_insn (gen_jump (lab2
));
4075 start_cleanup_deferral ();
4076 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4077 end_cleanup_deferral ();
4082 return want_value
? target
: NULL_RTX
;
4084 else if (queued_subexp_p (target
))
4085 /* If target contains a postincrement, let's not risk
4086 using it as the place to generate the rhs. */
4088 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4090 /* Expand EXP into a new pseudo. */
4091 temp
= gen_reg_rtx (GET_MODE (target
));
4092 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4095 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4097 /* If target is volatile, ANSI requires accessing the value
4098 *from* the target, if it is accessed. So make that happen.
4099 In no case return the target itself. */
4100 if (! MEM_VOLATILE_P (target
) && want_value
)
4101 dont_return_target
= 1;
4103 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4104 && GET_MODE (target
) != BLKmode
)
4105 /* If target is in memory and caller wants value in a register instead,
4106 arrange that. Pass TARGET as target for expand_expr so that,
4107 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4108 We know expand_expr will not use the target in that case.
4109 Don't do this if TARGET is volatile because we are supposed
4110 to write it and then read it. */
4112 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4113 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4115 /* If TEMP is already in the desired TARGET, only copy it from
4116 memory and don't store it there again. */
4118 || (rtx_equal_p (temp
, target
)
4119 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4120 dont_store_target
= 1;
4121 temp
= copy_to_reg (temp
);
4123 dont_return_target
= 1;
4125 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4126 /* If this is an scalar in a register that is stored in a wider mode
4127 than the declared mode, compute the result into its declared mode
4128 and then convert to the wider mode. Our value is the computed
4131 /* If we don't want a value, we can do the conversion inside EXP,
4132 which will often result in some optimizations. Do the conversion
4133 in two steps: first change the signedness, if needed, then
4134 the extend. But don't do this if the type of EXP is a subtype
4135 of something else since then the conversion might involve
4136 more than just converting modes. */
4137 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4138 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4140 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4141 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4144 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
4148 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
4149 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4153 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4155 /* If TEMP is a volatile MEM and we want a result value, make
4156 the access now so it gets done only once. Likewise if
4157 it contains TARGET. */
4158 if (GET_CODE (temp
) == MEM
&& want_value
4159 && (MEM_VOLATILE_P (temp
)
4160 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4161 temp
= copy_to_reg (temp
);
4163 /* If TEMP is a VOIDmode constant, use convert_modes to make
4164 sure that we properly convert it. */
4165 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4167 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4168 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4169 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4170 GET_MODE (target
), temp
,
4171 SUBREG_PROMOTED_UNSIGNED_P (target
));
4174 convert_move (SUBREG_REG (target
), temp
,
4175 SUBREG_PROMOTED_UNSIGNED_P (target
));
4177 /* If we promoted a constant, change the mode back down to match
4178 target. Otherwise, the caller might get confused by a result whose
4179 mode is larger than expected. */
4181 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
4182 && GET_MODE (temp
) != VOIDmode
)
4184 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4185 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4186 SUBREG_PROMOTED_UNSIGNED_P (temp
)
4187 = SUBREG_PROMOTED_UNSIGNED_P (target
);
4190 return want_value
? temp
: NULL_RTX
;
4194 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4195 /* Return TARGET if it's a specified hardware register.
4196 If TARGET is a volatile mem ref, either return TARGET
4197 or return a reg copied *from* TARGET; ANSI requires this.
4199 Otherwise, if TEMP is not TARGET, return TEMP
4200 if it is constant (for efficiency),
4201 or if we really want the correct value. */
4202 if (!(target
&& GET_CODE (target
) == REG
4203 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4204 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4205 && ! rtx_equal_p (temp
, target
)
4206 && (CONSTANT_P (temp
) || want_value
))
4207 dont_return_target
= 1;
4210 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4211 the same as that of TARGET, adjust the constant. This is needed, for
4212 example, in case it is a CONST_DOUBLE and we want only a word-sized
4214 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4215 && TREE_CODE (exp
) != ERROR_MARK
4216 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4217 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4218 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4220 if (current_function_check_memory_usage
4221 && GET_CODE (target
) == MEM
4222 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
4224 in_check_memory_usage
= 1;
4225 if (GET_CODE (temp
) == MEM
)
4226 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
4227 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4228 XEXP (temp
, 0), Pmode
,
4229 expr_size (exp
), TYPE_MODE (sizetype
));
4231 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
4232 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4233 expr_size (exp
), TYPE_MODE (sizetype
),
4234 GEN_INT (MEMORY_USE_WO
),
4235 TYPE_MODE (integer_type_node
));
4236 in_check_memory_usage
= 0;
4239 /* If value was not generated in the target, store it there.
4240 Convert the value to TARGET's type first if nec. */
4241 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4242 one or both of them are volatile memory refs, we have to distinguish
4244 - expand_expr has used TARGET. In this case, we must not generate
4245 another copy. This can be detected by TARGET being equal according
4247 - expand_expr has not used TARGET - that means that the source just
4248 happens to have the same RTX form. Since temp will have been created
4249 by expand_expr, it will compare unequal according to == .
4250 We must generate a copy in this case, to reach the correct number
4251 of volatile memory references. */
4253 if ((! rtx_equal_p (temp
, target
)
4254 || (temp
!= target
&& (side_effects_p (temp
)
4255 || side_effects_p (target
))))
4256 && TREE_CODE (exp
) != ERROR_MARK
4257 && ! dont_store_target
)
4259 target
= protect_from_queue (target
, 1);
4260 if (GET_MODE (temp
) != GET_MODE (target
)
4261 && GET_MODE (temp
) != VOIDmode
)
4263 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4264 if (dont_return_target
)
4266 /* In this case, we will return TEMP,
4267 so make sure it has the proper mode.
4268 But don't forget to store the value into TARGET. */
4269 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4270 emit_move_insn (target
, temp
);
4273 convert_move (target
, temp
, unsignedp
);
4276 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4278 /* Handle copying a string constant into an array.
4279 The string constant may be shorter than the array.
4280 So copy just the string's actual length, and clear the rest. */
4284 /* Get the size of the data type of the string,
4285 which is actually the size of the target. */
4286 size
= expr_size (exp
);
4287 if (GET_CODE (size
) == CONST_INT
4288 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4289 emit_block_move (target
, temp
, size
);
4292 /* Compute the size of the data to copy from the string. */
4294 = size_binop (MIN_EXPR
,
4295 make_tree (sizetype
, size
),
4296 size_int (TREE_STRING_LENGTH (exp
)));
4297 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4301 /* Copy that much. */
4302 emit_block_move (target
, temp
, copy_size_rtx
);
4304 /* Figure out how much is left in TARGET that we have to clear.
4305 Do all calculations in ptr_mode. */
4307 addr
= XEXP (target
, 0);
4308 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
4310 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4312 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
4313 size
= plus_constant (size
, -TREE_STRING_LENGTH (exp
));
4317 addr
= force_reg (ptr_mode
, addr
);
4318 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
4319 copy_size_rtx
, NULL_RTX
, 0,
4322 size
= expand_binop (ptr_mode
, sub_optab
, size
,
4323 copy_size_rtx
, NULL_RTX
, 0,
4326 label
= gen_label_rtx ();
4327 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4328 GET_MODE (size
), 0, 0, label
);
4331 if (size
!= const0_rtx
)
4333 rtx dest
= gen_rtx_MEM (BLKmode
, addr
);
4335 MEM_COPY_ATTRIBUTES (dest
, target
);
4337 /* Be sure we can write on ADDR. */
4338 in_check_memory_usage
= 1;
4339 if (current_function_check_memory_usage
)
4340 emit_library_call (chkr_check_addr_libfunc
,
4341 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
4343 size
, TYPE_MODE (sizetype
),
4344 GEN_INT (MEMORY_USE_WO
),
4345 TYPE_MODE (integer_type_node
));
4346 in_check_memory_usage
= 0;
4347 clear_storage (dest
, size
);
4354 /* Handle calls that return values in multiple non-contiguous locations.
4355 The Irix 6 ABI has examples of this. */
4356 else if (GET_CODE (target
) == PARALLEL
)
4357 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4358 else if (GET_MODE (temp
) == BLKmode
)
4359 emit_block_move (target
, temp
, expr_size (exp
));
4361 emit_move_insn (target
, temp
);
4364 /* If we don't want a value, return NULL_RTX. */
4368 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4369 ??? The latter test doesn't seem to make sense. */
4370 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4373 /* Return TARGET itself if it is a hard register. */
4374 else if (want_value
&& GET_MODE (target
) != BLKmode
4375 && ! (GET_CODE (target
) == REG
4376 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4377 return copy_to_reg (target
);
4383 /* Return 1 if EXP just contains zeros. */
4391 switch (TREE_CODE (exp
))
4395 case NON_LVALUE_EXPR
:
4396 return is_zeros_p (TREE_OPERAND (exp
, 0));
4399 return integer_zerop (exp
);
4403 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4406 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4409 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4410 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4411 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4412 if (! is_zeros_p (TREE_VALUE (elt
)))
4422 /* Return 1 if EXP contains mostly (3/4) zeros. */
4425 mostly_zeros_p (exp
)
4428 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4430 int elts
= 0, zeros
= 0;
4431 tree elt
= CONSTRUCTOR_ELTS (exp
);
4432 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4434 /* If there are no ranges of true bits, it is all zero. */
4435 return elt
== NULL_TREE
;
4437 for (; elt
; elt
= TREE_CHAIN (elt
))
4439 /* We do not handle the case where the index is a RANGE_EXPR,
4440 so the statistic will be somewhat inaccurate.
4441 We do make a more accurate count in store_constructor itself,
4442 so since this function is only used for nested array elements,
4443 this should be close enough. */
4444 if (mostly_zeros_p (TREE_VALUE (elt
)))
4449 return 4 * zeros
>= 3 * elts
;
4452 return is_zeros_p (exp
);
4455 /* Helper function for store_constructor.
4456 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4457 TYPE is the type of the CONSTRUCTOR, not the element type.
4458 CLEARED is as for store_constructor.
4459 ALIAS_SET is the alias set to use for any stores.
4461 This provides a recursive shortcut back to store_constructor when it isn't
4462 necessary to go through store_field. This is so that we can pass through
4463 the cleared field to let store_constructor know that we may not have to
4464 clear a substructure if the outer structure has already been cleared. */
4467 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4470 unsigned HOST_WIDE_INT bitsize
;
4471 HOST_WIDE_INT bitpos
;
4472 enum machine_mode mode
;
4477 if (TREE_CODE (exp
) == CONSTRUCTOR
4478 && bitpos
% BITS_PER_UNIT
== 0
4479 /* If we have a non-zero bitpos for a register target, then we just
4480 let store_field do the bitfield handling. This is unlikely to
4481 generate unnecessary clear instructions anyways. */
4482 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4484 if (GET_CODE (target
) == MEM
)
4486 = adjust_address (target
,
4487 GET_MODE (target
) == BLKmode
4489 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4490 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4493 /* Update the alias set, if required. */
4494 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4495 && MEM_ALIAS_SET (target
) != 0)
4497 target
= copy_rtx (target
);
4498 set_mem_alias_set (target
, alias_set
);
4501 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4504 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4505 int_size_in_bytes (type
), alias_set
);
4508 /* Store the value of constructor EXP into the rtx TARGET.
4509 TARGET is either a REG or a MEM; we know it cannot conflict, since
4510 safe_from_p has been called.
4511 CLEARED is true if TARGET is known to have been zero'd.
4512 SIZE is the number of bytes of TARGET we are allowed to modify: this
4513 may not be the same as the size of EXP if we are assigning to a field
4514 which has been packed to exclude padding bits. */
4517 store_constructor (exp
, target
, cleared
, size
)
4523 tree type
= TREE_TYPE (exp
);
4524 #ifdef WORD_REGISTER_OPERATIONS
4525 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4528 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4529 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4533 /* We either clear the aggregate or indicate the value is dead. */
4534 if ((TREE_CODE (type
) == UNION_TYPE
4535 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4537 && ! CONSTRUCTOR_ELTS (exp
))
4538 /* If the constructor is empty, clear the union. */
4540 clear_storage (target
, expr_size (exp
));
4544 /* If we are building a static constructor into a register,
4545 set the initial value as zero so we can fold the value into
4546 a constant. But if more than one register is involved,
4547 this probably loses. */
4548 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4549 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4551 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4555 /* If the constructor has fewer fields than the structure
4556 or if we are initializing the structure to mostly zeros,
4557 clear the whole structure first. Don't do this if TARGET is a
4558 register whose mode size isn't equal to SIZE since clear_storage
4559 can't handle this case. */
4560 else if (! cleared
&& size
> 0
4561 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4562 != fields_length (type
))
4563 || mostly_zeros_p (exp
))
4564 && (GET_CODE (target
) != REG
4565 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4568 clear_storage (target
, GEN_INT (size
));
4573 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4575 /* Store each element of the constructor into
4576 the corresponding field of TARGET. */
4578 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4580 tree field
= TREE_PURPOSE (elt
);
4581 #ifdef WORD_REGISTER_OPERATIONS
4582 tree value
= TREE_VALUE (elt
);
4584 enum machine_mode mode
;
4585 HOST_WIDE_INT bitsize
;
4586 HOST_WIDE_INT bitpos
= 0;
4589 rtx to_rtx
= target
;
4591 /* Just ignore missing fields.
4592 We cleared the whole structure, above,
4593 if any fields are missing. */
4597 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4600 if (host_integerp (DECL_SIZE (field
), 1))
4601 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4605 unsignedp
= TREE_UNSIGNED (field
);
4606 mode
= DECL_MODE (field
);
4607 if (DECL_BIT_FIELD (field
))
4610 offset
= DECL_FIELD_OFFSET (field
);
4611 if (host_integerp (offset
, 0)
4612 && host_integerp (bit_position (field
), 0))
4614 bitpos
= int_bit_position (field
);
4618 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4624 if (contains_placeholder_p (offset
))
4625 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4626 offset
, make_tree (TREE_TYPE (exp
), target
));
4628 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4629 if (GET_CODE (to_rtx
) != MEM
)
4632 if (GET_MODE (offset_rtx
) != ptr_mode
)
4633 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4635 #ifdef POINTERS_EXTEND_UNSIGNED
4636 if (GET_MODE (offset_rtx
) != Pmode
)
4637 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4640 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4641 highest_pow2_factor (offset
));
4644 if (TREE_READONLY (field
))
4646 if (GET_CODE (to_rtx
) == MEM
)
4647 to_rtx
= copy_rtx (to_rtx
);
4649 RTX_UNCHANGING_P (to_rtx
) = 1;
4652 #ifdef WORD_REGISTER_OPERATIONS
4653 /* If this initializes a field that is smaller than a word, at the
4654 start of a word, try to widen it to a full word.
4655 This special case allows us to output C++ member function
4656 initializations in a form that the optimizers can understand. */
4657 if (GET_CODE (target
) == REG
4658 && bitsize
< BITS_PER_WORD
4659 && bitpos
% BITS_PER_WORD
== 0
4660 && GET_MODE_CLASS (mode
) == MODE_INT
4661 && TREE_CODE (value
) == INTEGER_CST
4663 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4665 tree type
= TREE_TYPE (value
);
4667 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4669 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4670 value
= convert (type
, value
);
4673 if (BYTES_BIG_ENDIAN
)
4675 = fold (build (LSHIFT_EXPR
, type
, value
,
4676 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4677 bitsize
= BITS_PER_WORD
;
4682 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4683 && DECL_NONADDRESSABLE_P (field
))
4685 to_rtx
= copy_rtx (to_rtx
);
4686 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4689 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4690 TREE_VALUE (elt
), type
, cleared
,
4691 get_alias_set (TREE_TYPE (field
)));
4694 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4699 tree domain
= TYPE_DOMAIN (type
);
4700 tree elttype
= TREE_TYPE (type
);
4701 int const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4702 && TYPE_MAX_VALUE (domain
)
4703 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4704 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4705 HOST_WIDE_INT minelt
= 0;
4706 HOST_WIDE_INT maxelt
= 0;
4708 /* If we have constant bounds for the range of the type, get them. */
4711 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4712 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4715 /* If the constructor has fewer elements than the array,
4716 clear the whole array first. Similarly if this is
4717 static constructor of a non-BLKmode object. */
4718 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4722 HOST_WIDE_INT count
= 0, zero_count
= 0;
4723 need_to_clear
= ! const_bounds_p
;
4725 /* This loop is a more accurate version of the loop in
4726 mostly_zeros_p (it handles RANGE_EXPR in an index).
4727 It is also needed to check for missing elements. */
4728 for (elt
= CONSTRUCTOR_ELTS (exp
);
4729 elt
!= NULL_TREE
&& ! need_to_clear
;
4730 elt
= TREE_CHAIN (elt
))
4732 tree index
= TREE_PURPOSE (elt
);
4733 HOST_WIDE_INT this_node_count
;
4735 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4737 tree lo_index
= TREE_OPERAND (index
, 0);
4738 tree hi_index
= TREE_OPERAND (index
, 1);
4740 if (! host_integerp (lo_index
, 1)
4741 || ! host_integerp (hi_index
, 1))
4747 this_node_count
= (tree_low_cst (hi_index
, 1)
4748 - tree_low_cst (lo_index
, 1) + 1);
4751 this_node_count
= 1;
4753 count
+= this_node_count
;
4754 if (mostly_zeros_p (TREE_VALUE (elt
)))
4755 zero_count
+= this_node_count
;
4758 /* Clear the entire array first if there are any missing elements,
4759 or if the incidence of zero elements is >= 75%. */
4761 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4765 if (need_to_clear
&& size
> 0)
4768 clear_storage (target
, GEN_INT (size
));
4771 else if (REG_P (target
))
4772 /* Inform later passes that the old value is dead. */
4773 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4775 /* Store each element of the constructor into
4776 the corresponding element of TARGET, determined
4777 by counting the elements. */
4778 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4780 elt
= TREE_CHAIN (elt
), i
++)
4782 enum machine_mode mode
;
4783 HOST_WIDE_INT bitsize
;
4784 HOST_WIDE_INT bitpos
;
4786 tree value
= TREE_VALUE (elt
);
4787 tree index
= TREE_PURPOSE (elt
);
4788 rtx xtarget
= target
;
4790 if (cleared
&& is_zeros_p (value
))
4793 unsignedp
= TREE_UNSIGNED (elttype
);
4794 mode
= TYPE_MODE (elttype
);
4795 if (mode
== BLKmode
)
4796 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4797 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4800 bitsize
= GET_MODE_BITSIZE (mode
);
4802 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4804 tree lo_index
= TREE_OPERAND (index
, 0);
4805 tree hi_index
= TREE_OPERAND (index
, 1);
4806 rtx index_r
, pos_rtx
, hi_r
, loop_top
, loop_end
;
4807 struct nesting
*loop
;
4808 HOST_WIDE_INT lo
, hi
, count
;
4811 /* If the range is constant and "small", unroll the loop. */
4813 && host_integerp (lo_index
, 0)
4814 && host_integerp (hi_index
, 0)
4815 && (lo
= tree_low_cst (lo_index
, 0),
4816 hi
= tree_low_cst (hi_index
, 0),
4817 count
= hi
- lo
+ 1,
4818 (GET_CODE (target
) != MEM
4820 || (host_integerp (TYPE_SIZE (elttype
), 1)
4821 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4824 lo
-= minelt
; hi
-= minelt
;
4825 for (; lo
<= hi
; lo
++)
4827 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4829 if (GET_CODE (target
) == MEM
4830 && !MEM_KEEP_ALIAS_SET_P (target
)
4831 && TYPE_NONALIASED_COMPONENT (type
))
4833 target
= copy_rtx (target
);
4834 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4837 store_constructor_field
4838 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4839 get_alias_set (elttype
));
4844 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4845 loop_top
= gen_label_rtx ();
4846 loop_end
= gen_label_rtx ();
4848 unsignedp
= TREE_UNSIGNED (domain
);
4850 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4853 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4855 SET_DECL_RTL (index
, index_r
);
4856 if (TREE_CODE (value
) == SAVE_EXPR
4857 && SAVE_EXPR_RTL (value
) == 0)
4859 /* Make sure value gets expanded once before the
4861 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4864 store_expr (lo_index
, index_r
, 0);
4865 loop
= expand_start_loop (0);
4867 /* Assign value to element index. */
4869 = convert (ssizetype
,
4870 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4871 index
, TYPE_MIN_VALUE (domain
))));
4872 position
= size_binop (MULT_EXPR
, position
,
4874 TYPE_SIZE_UNIT (elttype
)));
4876 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4877 xtarget
= offset_address (target
, pos_rtx
,
4878 highest_pow2_factor (position
));
4879 xtarget
= adjust_address (xtarget
, mode
, 0);
4880 if (TREE_CODE (value
) == CONSTRUCTOR
)
4881 store_constructor (value
, xtarget
, cleared
,
4882 bitsize
/ BITS_PER_UNIT
);
4884 store_expr (value
, xtarget
, 0);
4886 expand_exit_loop_if_false (loop
,
4887 build (LT_EXPR
, integer_type_node
,
4890 expand_increment (build (PREINCREMENT_EXPR
,
4892 index
, integer_one_node
), 0, 0);
4894 emit_label (loop_end
);
4897 else if ((index
!= 0 && ! host_integerp (index
, 0))
4898 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4903 index
= ssize_int (1);
4906 index
= convert (ssizetype
,
4907 fold (build (MINUS_EXPR
, index
,
4908 TYPE_MIN_VALUE (domain
))));
4910 position
= size_binop (MULT_EXPR
, index
,
4912 TYPE_SIZE_UNIT (elttype
)));
4913 xtarget
= offset_address (target
,
4914 expand_expr (position
, 0, VOIDmode
, 0),
4915 highest_pow2_factor (position
));
4916 xtarget
= adjust_address (xtarget
, mode
, 0);
4917 store_expr (value
, xtarget
, 0);
4922 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4923 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4925 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4927 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4928 && TYPE_NONALIASED_COMPONENT (type
))
4930 target
= copy_rtx (target
);
4931 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4934 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4935 type
, cleared
, get_alias_set (elttype
));
4941 /* Set constructor assignments. */
4942 else if (TREE_CODE (type
) == SET_TYPE
)
4944 tree elt
= CONSTRUCTOR_ELTS (exp
);
4945 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4946 tree domain
= TYPE_DOMAIN (type
);
4947 tree domain_min
, domain_max
, bitlength
;
4949 /* The default implementation strategy is to extract the constant
4950 parts of the constructor, use that to initialize the target,
4951 and then "or" in whatever non-constant ranges we need in addition.
4953 If a large set is all zero or all ones, it is
4954 probably better to set it using memset (if available) or bzero.
4955 Also, if a large set has just a single range, it may also be
4956 better to first clear all the first clear the set (using
4957 bzero/memset), and set the bits we want. */
4959 /* Check for all zeros. */
4960 if (elt
== NULL_TREE
&& size
> 0)
4963 clear_storage (target
, GEN_INT (size
));
4967 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4968 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4969 bitlength
= size_binop (PLUS_EXPR
,
4970 size_diffop (domain_max
, domain_min
),
4973 nbits
= tree_low_cst (bitlength
, 1);
4975 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4976 are "complicated" (more than one range), initialize (the
4977 constant parts) by copying from a constant. */
4978 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4979 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4981 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4982 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4983 char *bit_buffer
= (char *) alloca (nbits
);
4984 HOST_WIDE_INT word
= 0;
4985 unsigned int bit_pos
= 0;
4986 unsigned int ibit
= 0;
4987 unsigned int offset
= 0; /* In bytes from beginning of set. */
4989 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4992 if (bit_buffer
[ibit
])
4994 if (BYTES_BIG_ENDIAN
)
4995 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4997 word
|= 1 << bit_pos
;
5001 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5003 if (word
!= 0 || ! cleared
)
5005 rtx datum
= GEN_INT (word
);
5008 /* The assumption here is that it is safe to use
5009 XEXP if the set is multi-word, but not if
5010 it's single-word. */
5011 if (GET_CODE (target
) == MEM
)
5012 to_rtx
= adjust_address (target
, mode
, offset
);
5013 else if (offset
== 0)
5017 emit_move_insn (to_rtx
, datum
);
5024 offset
+= set_word_size
/ BITS_PER_UNIT
;
5029 /* Don't bother clearing storage if the set is all ones. */
5030 if (TREE_CHAIN (elt
) != NULL_TREE
5031 || (TREE_PURPOSE (elt
) == NULL_TREE
5033 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5034 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5035 || (tree_low_cst (TREE_VALUE (elt
), 0)
5036 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5037 != (HOST_WIDE_INT
) nbits
))))
5038 clear_storage (target
, expr_size (exp
));
5040 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5042 /* Start of range of element or NULL. */
5043 tree startbit
= TREE_PURPOSE (elt
);
5044 /* End of range of element, or element value. */
5045 tree endbit
= TREE_VALUE (elt
);
5046 #ifdef TARGET_MEM_FUNCTIONS
5047 HOST_WIDE_INT startb
, endb
;
5049 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5051 bitlength_rtx
= expand_expr (bitlength
,
5052 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5054 /* Handle non-range tuple element like [ expr ]. */
5055 if (startbit
== NULL_TREE
)
5057 startbit
= save_expr (endbit
);
5061 startbit
= convert (sizetype
, startbit
);
5062 endbit
= convert (sizetype
, endbit
);
5063 if (! integer_zerop (domain_min
))
5065 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5066 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5068 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5069 EXPAND_CONST_ADDRESS
);
5070 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5071 EXPAND_CONST_ADDRESS
);
5077 ((build_qualified_type (type_for_mode (GET_MODE (target
), 0),
5080 emit_move_insn (targetx
, target
);
5083 else if (GET_CODE (target
) == MEM
)
5088 #ifdef TARGET_MEM_FUNCTIONS
5089 /* Optimization: If startbit and endbit are
5090 constants divisible by BITS_PER_UNIT,
5091 call memset instead. */
5092 if (TREE_CODE (startbit
) == INTEGER_CST
5093 && TREE_CODE (endbit
) == INTEGER_CST
5094 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5095 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5097 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5099 plus_constant (XEXP (targetx
, 0),
5100 startb
/ BITS_PER_UNIT
),
5102 constm1_rtx
, TYPE_MODE (integer_type_node
),
5103 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5104 TYPE_MODE (sizetype
));
5108 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5109 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5110 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5111 startbit_rtx
, TYPE_MODE (sizetype
),
5112 endbit_rtx
, TYPE_MODE (sizetype
));
5115 emit_move_insn (target
, targetx
);
5123 /* Store the value of EXP (an expression tree)
5124 into a subfield of TARGET which has mode MODE and occupies
5125 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5126 If MODE is VOIDmode, it means that we are storing into a bit-field.
5128 If VALUE_MODE is VOIDmode, return nothing in particular.
5129 UNSIGNEDP is not used in this case.
5131 Otherwise, return an rtx for the value stored. This rtx
5132 has mode VALUE_MODE if that is convenient to do.
5133 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5135 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5137 ALIAS_SET is the alias set for the destination. This value will
5138 (in general) be different from that for TARGET, since TARGET is a
5139 reference to the containing structure. */
5142 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
,
5143 total_size
, alias_set
)
5145 HOST_WIDE_INT bitsize
;
5146 HOST_WIDE_INT bitpos
;
5147 enum machine_mode mode
;
5149 enum machine_mode value_mode
;
5151 HOST_WIDE_INT total_size
;
5154 HOST_WIDE_INT width_mask
= 0;
5156 if (TREE_CODE (exp
) == ERROR_MARK
)
5159 /* If we have nothing to store, do nothing unless the expression has
5162 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5164 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
5165 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5167 /* If we are storing into an unaligned field of an aligned union that is
5168 in a register, we may have the mode of TARGET being an integer mode but
5169 MODE == BLKmode. In that case, get an aligned object whose size and
5170 alignment are the same as TARGET and store TARGET into it (we can avoid
5171 the store if the field being stored is the entire width of TARGET). Then
5172 call ourselves recursively to store the field into a BLKmode version of
5173 that object. Finally, load from the object into TARGET. This is not
5174 very efficient in general, but should only be slightly more expensive
5175 than the otherwise-required unaligned accesses. Perhaps this can be
5176 cleaned up later. */
5179 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5183 (build_qualified_type (type_for_mode (GET_MODE (target
), 0),
5186 rtx blk_object
= copy_rtx (object
);
5188 PUT_MODE (blk_object
, BLKmode
);
5189 set_mem_alias_set (blk_object
, 0);
5191 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5192 emit_move_insn (object
, target
);
5194 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
5195 total_size
, alias_set
);
5197 /* Even though we aren't returning target, we need to
5198 give it the updated value. */
5199 emit_move_insn (target
, object
);
5204 if (GET_CODE (target
) == CONCAT
)
5206 /* We're storing into a struct containing a single __complex. */
5210 return store_expr (exp
, target
, 0);
5213 /* If the structure is in a register or if the component
5214 is a bit field, we cannot use addressing to access it.
5215 Use bit-field techniques or SUBREG to store in it. */
5217 if (mode
== VOIDmode
5218 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5219 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5220 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5221 || GET_CODE (target
) == REG
5222 || GET_CODE (target
) == SUBREG
5223 /* If the field isn't aligned enough to store as an ordinary memref,
5224 store it as a bit field. */
5225 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5226 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5227 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5228 || (mode
== BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5229 && (TYPE_ALIGN (TREE_TYPE (exp
)) > MEM_ALIGN (target
)
5230 || bitpos
% TYPE_ALIGN (TREE_TYPE (exp
)) != 0))
5231 /* If the RHS and field are a constant size and the size of the
5232 RHS isn't the same size as the bitfield, we must use bitfield
5235 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5236 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5238 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5240 /* If BITSIZE is narrower than the size of the type of EXP
5241 we will be narrowing TEMP. Normally, what's wanted are the
5242 low-order bits. However, if EXP's type is a record and this is
5243 big-endian machine, we want the upper BITSIZE bits. */
5244 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5245 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
5246 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5247 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5248 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5252 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5254 if (mode
!= VOIDmode
&& mode
!= BLKmode
5255 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5256 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5258 /* If the modes of TARGET and TEMP are both BLKmode, both
5259 must be in memory and BITPOS must be aligned on a byte
5260 boundary. If so, we simply do a block copy. */
5261 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5263 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5264 || bitpos
% BITS_PER_UNIT
!= 0)
5267 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5268 emit_block_move (target
, temp
,
5269 bitsize
== -1 ? expr_size (exp
)
5270 : GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5273 return value_mode
== VOIDmode
? const0_rtx
: target
;
5276 /* Store the value in the bitfield. */
5277 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, total_size
);
5278 if (value_mode
!= VOIDmode
)
5280 /* The caller wants an rtx for the value.
5281 If possible, avoid refetching from the bitfield itself. */
5283 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5286 enum machine_mode tmode
;
5289 return expand_and (temp
,
5293 GET_MODE (temp
) == VOIDmode
5295 : GET_MODE (temp
))), NULL_RTX
);
5297 tmode
= GET_MODE (temp
);
5298 if (tmode
== VOIDmode
)
5300 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5301 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5302 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5305 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5306 NULL_RTX
, value_mode
, VOIDmode
,
5313 rtx addr
= XEXP (target
, 0);
5316 /* If a value is wanted, it must be the lhs;
5317 so make the address stable for multiple use. */
5319 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5320 && ! CONSTANT_ADDRESS_P (addr
)
5321 /* A frame-pointer reference is already stable. */
5322 && ! (GET_CODE (addr
) == PLUS
5323 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5324 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5325 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5326 target
= replace_equiv_address (target
, copy_to_reg (addr
));
5328 /* Now build a reference to just the desired component. */
5330 to_rtx
= copy_rtx (adjust_address (target
, mode
,
5331 bitpos
/ BITS_PER_UNIT
));
5333 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5334 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5336 to_rtx
= copy_rtx (to_rtx
);
5337 set_mem_alias_set (to_rtx
, alias_set
);
5340 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5344 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5345 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5346 codes and find the ultimate containing object, which we return.
5348 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5349 bit position, and *PUNSIGNEDP to the signedness of the field.
5350 If the position of the field is variable, we store a tree
5351 giving the variable offset (in units) in *POFFSET.
5352 This offset is in addition to the bit position.
5353 If the position is not variable, we store 0 in *POFFSET.
5354 We set *PALIGNMENT to the alignment of the address that will be
5355 computed. This is the alignment of the thing we return if *POFFSET
5356 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5358 If any of the extraction expressions is volatile,
5359 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5361 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5362 is a mode that can be used to access the field. In that case, *PBITSIZE
5365 If the field describes a variable-sized object, *PMODE is set to
5366 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5367 this case, but the address of the object can be found. */
5370 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5371 punsignedp
, pvolatilep
, palignment
)
5373 HOST_WIDE_INT
*pbitsize
;
5374 HOST_WIDE_INT
*pbitpos
;
5376 enum machine_mode
*pmode
;
5379 unsigned int *palignment
;
5382 enum machine_mode mode
= VOIDmode
;
5383 tree offset
= size_zero_node
;
5384 tree bit_offset
= bitsize_zero_node
;
5385 unsigned int alignment
= BIGGEST_ALIGNMENT
;
5386 tree placeholder_ptr
= 0;
5389 /* First get the mode, signedness, and size. We do this from just the
5390 outermost expression. */
5391 if (TREE_CODE (exp
) == COMPONENT_REF
)
5393 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5394 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5395 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5397 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5399 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5401 size_tree
= TREE_OPERAND (exp
, 1);
5402 *punsignedp
= TREE_UNSIGNED (exp
);
5406 mode
= TYPE_MODE (TREE_TYPE (exp
));
5407 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5409 if (mode
== BLKmode
)
5410 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5412 *pbitsize
= GET_MODE_BITSIZE (mode
);
5417 if (! host_integerp (size_tree
, 1))
5418 mode
= BLKmode
, *pbitsize
= -1;
5420 *pbitsize
= tree_low_cst (size_tree
, 1);
5423 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5424 and find the ultimate containing object. */
5427 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5428 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5429 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5431 tree field
= TREE_OPERAND (exp
, 1);
5432 tree this_offset
= DECL_FIELD_OFFSET (field
);
5434 /* If this field hasn't been filled in yet, don't go
5435 past it. This should only happen when folding expressions
5436 made during type construction. */
5437 if (this_offset
== 0)
5439 else if (! TREE_CONSTANT (this_offset
)
5440 && contains_placeholder_p (this_offset
))
5441 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5443 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5444 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5445 DECL_FIELD_BIT_OFFSET (field
));
5447 if (! host_integerp (offset
, 0))
5448 alignment
= MIN (alignment
, DECL_OFFSET_ALIGN (field
));
5451 else if (TREE_CODE (exp
) == ARRAY_REF
5452 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5454 tree index
= TREE_OPERAND (exp
, 1);
5455 tree array
= TREE_OPERAND (exp
, 0);
5456 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5457 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5458 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5460 /* We assume all arrays have sizes that are a multiple of a byte.
5461 First subtract the lower bound, if any, in the type of the
5462 index, then convert to sizetype and multiply by the size of the
5464 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5465 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5468 /* If the index has a self-referential type, pass it to a
5469 WITH_RECORD_EXPR; if the component size is, pass our
5470 component to one. */
5471 if (! TREE_CONSTANT (index
)
5472 && contains_placeholder_p (index
))
5473 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5474 if (! TREE_CONSTANT (unit_size
)
5475 && contains_placeholder_p (unit_size
))
5476 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5478 offset
= size_binop (PLUS_EXPR
, offset
,
5479 size_binop (MULT_EXPR
,
5480 convert (sizetype
, index
),
5484 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5486 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5488 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5489 We might have been called from tree optimization where we
5490 haven't set up an object yet. */
5498 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5499 && ! ((TREE_CODE (exp
) == NOP_EXPR
5500 || TREE_CODE (exp
) == CONVERT_EXPR
)
5501 && (TYPE_MODE (TREE_TYPE (exp
))
5502 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5505 /* If any reference in the chain is volatile, the effect is volatile. */
5506 if (TREE_THIS_VOLATILE (exp
))
5509 /* If the offset is non-constant already, then we can't assume any
5510 alignment more than the alignment here. */
5511 if (! TREE_CONSTANT (offset
))
5512 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5514 exp
= TREE_OPERAND (exp
, 0);
5518 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
5519 else if (TREE_TYPE (exp
) != 0)
5520 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5522 /* If OFFSET is constant, see if we can return the whole thing as a
5523 constant bit position. Otherwise, split it up. */
5524 if (host_integerp (offset
, 0)
5525 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5527 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5528 && host_integerp (tem
, 0))
5529 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5531 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5534 *palignment
= alignment
;
5538 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5540 static enum memory_use_mode
5541 get_memory_usage_from_modifier (modifier
)
5542 enum expand_modifier modifier
;
5548 return MEMORY_USE_RO
;
5550 case EXPAND_MEMORY_USE_WO
:
5551 return MEMORY_USE_WO
;
5553 case EXPAND_MEMORY_USE_RW
:
5554 return MEMORY_USE_RW
;
5556 case EXPAND_MEMORY_USE_DONT
:
5557 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5558 MEMORY_USE_DONT, because they are modifiers to a call of
5559 expand_expr in the ADDR_EXPR case of expand_expr. */
5560 case EXPAND_CONST_ADDRESS
:
5561 case EXPAND_INITIALIZER
:
5562 return MEMORY_USE_DONT
;
5563 case EXPAND_MEMORY_USE_BAD
:
5569 /* Given an rtx VALUE that may contain additions and multiplications, return
5570 an equivalent value that just refers to a register, memory, or constant.
5571 This is done by generating instructions to perform the arithmetic and
5572 returning a pseudo-register containing the value.
5574 The returned value may be a REG, SUBREG, MEM or constant. */
5577 force_operand (value
, target
)
5581 /* Use a temporary to force order of execution of calls to
5585 /* Use subtarget as the target for operand 0 of a binary operation. */
5586 rtx subtarget
= get_subtarget (target
);
5588 /* Check for a PIC address load. */
5590 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5591 && XEXP (value
, 0) == pic_offset_table_rtx
5592 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5593 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5594 || GET_CODE (XEXP (value
, 1)) == CONST
))
5597 subtarget
= gen_reg_rtx (GET_MODE (value
));
5598 emit_move_insn (subtarget
, value
);
5602 if (GET_CODE (value
) == PLUS
)
5603 binoptab
= add_optab
;
5604 else if (GET_CODE (value
) == MINUS
)
5605 binoptab
= sub_optab
;
5606 else if (GET_CODE (value
) == MULT
)
5608 op2
= XEXP (value
, 1);
5609 if (!CONSTANT_P (op2
)
5610 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5612 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5613 return expand_mult (GET_MODE (value
), tmp
,
5614 force_operand (op2
, NULL_RTX
),
5620 op2
= XEXP (value
, 1);
5621 if (!CONSTANT_P (op2
)
5622 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5624 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5626 binoptab
= add_optab
;
5627 op2
= negate_rtx (GET_MODE (value
), op2
);
5630 /* Check for an addition with OP2 a constant integer and our first
5631 operand a PLUS of a virtual register and something else. In that
5632 case, we want to emit the sum of the virtual register and the
5633 constant first and then add the other value. This allows virtual
5634 register instantiation to simply modify the constant rather than
5635 creating another one around this addition. */
5636 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5637 && GET_CODE (XEXP (value
, 0)) == PLUS
5638 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5639 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5640 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5642 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5643 XEXP (XEXP (value
, 0), 0), op2
,
5644 subtarget
, 0, OPTAB_LIB_WIDEN
);
5645 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5646 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5647 target
, 0, OPTAB_LIB_WIDEN
);
5650 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5651 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5652 force_operand (op2
, NULL_RTX
),
5653 target
, 0, OPTAB_LIB_WIDEN
);
5654 /* We give UNSIGNEDP = 0 to expand_binop
5655 because the only operations we are expanding here are signed ones. */
5658 #ifdef INSN_SCHEDULING
5659 /* On machines that have insn scheduling, we want all memory reference to be
5660 explicit, so we need to deal with such paradoxical SUBREGs. */
5661 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5662 && (GET_MODE_SIZE (GET_MODE (value
))
5663 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5665 = simplify_gen_subreg (GET_MODE (value
),
5666 force_reg (GET_MODE (SUBREG_REG (value
)),
5667 force_operand (SUBREG_REG (value
),
5669 GET_MODE (SUBREG_REG (value
)),
5670 SUBREG_BYTE (value
));
5676 /* Subroutine of expand_expr: return nonzero iff there is no way that
5677 EXP can reference X, which is being modified. TOP_P is nonzero if this
5678 call is going to be used to determine whether we need a temporary
5679 for EXP, as opposed to a recursive call to this function.
5681 It is always safe for this routine to return zero since it merely
5682 searches for optimization opportunities. */
5685 safe_from_p (x
, exp
, top_p
)
5692 static tree save_expr_list
;
5695 /* If EXP has varying size, we MUST use a target since we currently
5696 have no way of allocating temporaries of variable size
5697 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5698 So we assume here that something at a higher level has prevented a
5699 clash. This is somewhat bogus, but the best we can do. Only
5700 do this when X is BLKmode and when we are at the top level. */
5701 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5702 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5703 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5704 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5705 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5707 && GET_MODE (x
) == BLKmode
)
5708 /* If X is in the outgoing argument area, it is always safe. */
5709 || (GET_CODE (x
) == MEM
5710 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5711 || (GET_CODE (XEXP (x
, 0)) == PLUS
5712 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5715 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5716 find the underlying pseudo. */
5717 if (GET_CODE (x
) == SUBREG
)
5720 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5724 /* A SAVE_EXPR might appear many times in the expression passed to the
5725 top-level safe_from_p call, and if it has a complex subexpression,
5726 examining it multiple times could result in a combinatorial explosion.
5727 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5728 with optimization took about 28 minutes to compile -- even though it was
5729 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5730 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5731 we have processed. Note that the only test of top_p was above. */
5740 rtn
= safe_from_p (x
, exp
, 0);
5742 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5743 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5748 /* Now look at our tree code and possibly recurse. */
5749 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5752 exp_rtl
= DECL_RTL_SET_P (exp
) ? DECL_RTL (exp
) : NULL_RTX
;
5759 if (TREE_CODE (exp
) == TREE_LIST
)
5760 return ((TREE_VALUE (exp
) == 0
5761 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5762 && (TREE_CHAIN (exp
) == 0
5763 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5764 else if (TREE_CODE (exp
) == ERROR_MARK
)
5765 return 1; /* An already-visited SAVE_EXPR? */
5770 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5774 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5775 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5779 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5780 the expression. If it is set, we conflict iff we are that rtx or
5781 both are in memory. Otherwise, we check all operands of the
5782 expression recursively. */
5784 switch (TREE_CODE (exp
))
5787 /* If the operand is static or we are static, we can't conflict.
5788 Likewise if we don't conflict with the operand at all. */
5789 if (staticp (TREE_OPERAND (exp
, 0))
5790 || TREE_STATIC (exp
)
5791 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5794 /* Otherwise, the only way this can conflict is if we are taking
5795 the address of a DECL a that address if part of X, which is
5797 exp
= TREE_OPERAND (exp
, 0);
5800 if (!DECL_RTL_SET_P (exp
)
5801 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5804 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5809 if (GET_CODE (x
) == MEM
5810 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5811 get_alias_set (exp
)))
5816 /* Assume that the call will clobber all hard registers and
5818 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5819 || GET_CODE (x
) == MEM
)
5824 /* If a sequence exists, we would have to scan every instruction
5825 in the sequence to see if it was safe. This is probably not
5827 if (RTL_EXPR_SEQUENCE (exp
))
5830 exp_rtl
= RTL_EXPR_RTL (exp
);
5833 case WITH_CLEANUP_EXPR
:
5834 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5837 case CLEANUP_POINT_EXPR
:
5838 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5841 exp_rtl
= SAVE_EXPR_RTL (exp
);
5845 /* If we've already scanned this, don't do it again. Otherwise,
5846 show we've scanned it and record for clearing the flag if we're
5848 if (TREE_PRIVATE (exp
))
5851 TREE_PRIVATE (exp
) = 1;
5852 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5854 TREE_PRIVATE (exp
) = 0;
5858 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5862 /* The only operand we look at is operand 1. The rest aren't
5863 part of the expression. */
5864 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5866 case METHOD_CALL_EXPR
:
5867 /* This takes an rtx argument, but shouldn't appear here. */
5874 /* If we have an rtx, we do not need to scan our operands. */
5878 nops
= first_rtl_op (TREE_CODE (exp
));
5879 for (i
= 0; i
< nops
; i
++)
5880 if (TREE_OPERAND (exp
, i
) != 0
5881 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5884 /* If this is a language-specific tree code, it may require
5885 special handling. */
5886 if ((unsigned int) TREE_CODE (exp
)
5887 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5889 && !(*lang_safe_from_p
) (x
, exp
))
5893 /* If we have an rtl, find any enclosed object. Then see if we conflict
5897 if (GET_CODE (exp_rtl
) == SUBREG
)
5899 exp_rtl
= SUBREG_REG (exp_rtl
);
5900 if (GET_CODE (exp_rtl
) == REG
5901 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5905 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5906 are memory and they conflict. */
5907 return ! (rtx_equal_p (x
, exp_rtl
)
5908 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5909 && true_dependence (exp_rtl
, GET_MODE (x
), x
,
5910 rtx_addr_varies_p
)));
5913 /* If we reach here, it is safe. */
5917 /* Subroutine of expand_expr: return rtx if EXP is a
5918 variable or parameter; else return 0. */
5925 switch (TREE_CODE (exp
))
5929 return DECL_RTL (exp
);
5935 #ifdef MAX_INTEGER_COMPUTATION_MODE
5938 check_max_integer_computation_mode (exp
)
5941 enum tree_code code
;
5942 enum machine_mode mode
;
5944 /* Strip any NOPs that don't change the mode. */
5946 code
= TREE_CODE (exp
);
5948 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5949 if (code
== NOP_EXPR
5950 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5953 /* First check the type of the overall operation. We need only look at
5954 unary, binary and relational operations. */
5955 if (TREE_CODE_CLASS (code
) == '1'
5956 || TREE_CODE_CLASS (code
) == '2'
5957 || TREE_CODE_CLASS (code
) == '<')
5959 mode
= TYPE_MODE (TREE_TYPE (exp
));
5960 if (GET_MODE_CLASS (mode
) == MODE_INT
5961 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5962 internal_error ("unsupported wide integer operation");
5965 /* Check operand of a unary op. */
5966 if (TREE_CODE_CLASS (code
) == '1')
5968 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5969 if (GET_MODE_CLASS (mode
) == MODE_INT
5970 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5971 internal_error ("unsupported wide integer operation");
5974 /* Check operands of a binary/comparison op. */
5975 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5977 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5978 if (GET_MODE_CLASS (mode
) == MODE_INT
5979 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5980 internal_error ("unsupported wide integer operation");
5982 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5983 if (GET_MODE_CLASS (mode
) == MODE_INT
5984 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5985 internal_error ("unsupported wide integer operation");
5990 /* Return the highest power of two that EXP is known to be a multiple of.
5991 This is used in updating alignment of MEMs in array references. */
5993 static HOST_WIDE_INT
5994 highest_pow2_factor (exp
)
5997 HOST_WIDE_INT c0
, c1
;
5999 switch (TREE_CODE (exp
))
6002 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
6003 lowest bit that's a one. If the result is zero, pessimize by
6004 returning 1. This is overly-conservative, but such things should not
6005 happen in the offset expressions that we are called with. */
6006 if (host_integerp (exp
, 0))
6008 c0
= tree_low_cst (exp
, 0);
6009 c0
= c0
< 0 ? - c0
: c0
;
6010 return c0
!= 0 ? c0
& -c0
: 1;
6014 case PLUS_EXPR
: case MINUS_EXPR
:
6015 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6016 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6017 return MIN (c0
, c1
);
6020 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6021 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6024 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6026 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6027 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6028 return MAX (1, c0
/ c1
);
6030 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6031 case COMPOUND_EXPR
: case SAVE_EXPR
:
6032 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6035 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6036 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6037 return MIN (c0
, c1
);
6046 /* Return an object on the placeholder list that matches EXP, a
6047 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6048 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6049 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6050 is a location which initially points to a starting location in the
6051 placeholder list (zero means start of the list) and where a pointer into
6052 the placeholder list at which the object is found is placed. */
6055 find_placeholder (exp
, plist
)
6059 tree type
= TREE_TYPE (exp
);
6060 tree placeholder_expr
;
6062 for (placeholder_expr
6063 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6064 placeholder_expr
!= 0;
6065 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6067 tree need_type
= TYPE_MAIN_VARIANT (type
);
6070 /* Find the outermost reference that is of the type we want. If none,
6071 see if any object has a type that is a pointer to the type we
6073 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6074 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6075 || TREE_CODE (elt
) == COND_EXPR
)
6076 ? TREE_OPERAND (elt
, 1)
6077 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6078 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6079 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6080 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6081 ? TREE_OPERAND (elt
, 0) : 0))
6082 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6085 *plist
= placeholder_expr
;
6089 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6091 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6092 || TREE_CODE (elt
) == COND_EXPR
)
6093 ? TREE_OPERAND (elt
, 1)
6094 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6095 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6096 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6097 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6098 ? TREE_OPERAND (elt
, 0) : 0))
6099 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6100 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6104 *plist
= placeholder_expr
;
6105 return build1 (INDIRECT_REF
, need_type
, elt
);
6112 /* expand_expr: generate code for computing expression EXP.
6113 An rtx for the computed value is returned. The value is never null.
6114 In the case of a void EXP, const0_rtx is returned.
6116 The value may be stored in TARGET if TARGET is nonzero.
6117 TARGET is just a suggestion; callers must assume that
6118 the rtx returned may not be the same as TARGET.
6120 If TARGET is CONST0_RTX, it means that the value will be ignored.
6122 If TMODE is not VOIDmode, it suggests generating the
6123 result in mode TMODE. But this is done only when convenient.
6124 Otherwise, TMODE is ignored and the value generated in its natural mode.
6125 TMODE is just a suggestion; callers must assume that
6126 the rtx returned may not have mode TMODE.
6128 Note that TARGET may have neither TMODE nor MODE. In that case, it
6129 probably will not be used.
6131 If MODIFIER is EXPAND_SUM then when EXP is an addition
6132 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6133 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6134 products as above, or REG or MEM, or constant.
6135 Ordinarily in such cases we would output mul or add instructions
6136 and then return a pseudo reg containing the sum.
6138 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6139 it also marks a label as absolutely required (it can't be dead).
6140 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6141 This is used for outputting expressions used in initializers.
6143 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6144 with a constant address even if that address is not normally legitimate.
6145 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6148 expand_expr (exp
, target
, tmode
, modifier
)
6151 enum machine_mode tmode
;
6152 enum expand_modifier modifier
;
6155 tree type
= TREE_TYPE (exp
);
6156 int unsignedp
= TREE_UNSIGNED (type
);
6157 enum machine_mode mode
;
6158 enum tree_code code
= TREE_CODE (exp
);
6160 rtx subtarget
, original_target
;
6163 /* Used by check-memory-usage to make modifier read only. */
6164 enum expand_modifier ro_modifier
;
6166 /* Handle ERROR_MARK before anybody tries to access its type. */
6167 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6169 op0
= CONST0_RTX (tmode
);
6175 mode
= TYPE_MODE (type
);
6176 /* Use subtarget as the target for operand 0 of a binary operation. */
6177 subtarget
= get_subtarget (target
);
6178 original_target
= target
;
6179 ignore
= (target
== const0_rtx
6180 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6181 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6182 || code
== COND_EXPR
)
6183 && TREE_CODE (type
) == VOID_TYPE
));
6185 /* Make a read-only version of the modifier. */
6186 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
6187 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
6188 ro_modifier
= modifier
;
6190 ro_modifier
= EXPAND_NORMAL
;
6192 /* If we are going to ignore this result, we need only do something
6193 if there is a side-effect somewhere in the expression. If there
6194 is, short-circuit the most common cases here. Note that we must
6195 not call expand_expr with anything but const0_rtx in case this
6196 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6200 if (! TREE_SIDE_EFFECTS (exp
))
6203 /* Ensure we reference a volatile object even if value is ignored, but
6204 don't do this if all we are doing is taking its address. */
6205 if (TREE_THIS_VOLATILE (exp
)
6206 && TREE_CODE (exp
) != FUNCTION_DECL
6207 && mode
!= VOIDmode
&& mode
!= BLKmode
6208 && modifier
!= EXPAND_CONST_ADDRESS
)
6210 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
6211 if (GET_CODE (temp
) == MEM
)
6212 temp
= copy_to_reg (temp
);
6216 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6217 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6218 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6219 VOIDmode
, ro_modifier
);
6220 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6221 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6223 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6225 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
,
6229 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6230 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6231 /* If the second operand has no side effects, just evaluate
6233 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6234 VOIDmode
, ro_modifier
);
6235 else if (code
== BIT_FIELD_REF
)
6237 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6239 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
,
6241 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
,
6249 #ifdef MAX_INTEGER_COMPUTATION_MODE
6250 /* Only check stuff here if the mode we want is different from the mode
6251 of the expression; if it's the same, check_max_integer_computiation_mode
6252 will handle it. Do we really need to check this stuff at all? */
6255 && GET_MODE (target
) != mode
6256 && TREE_CODE (exp
) != INTEGER_CST
6257 && TREE_CODE (exp
) != PARM_DECL
6258 && TREE_CODE (exp
) != ARRAY_REF
6259 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6260 && TREE_CODE (exp
) != COMPONENT_REF
6261 && TREE_CODE (exp
) != BIT_FIELD_REF
6262 && TREE_CODE (exp
) != INDIRECT_REF
6263 && TREE_CODE (exp
) != CALL_EXPR
6264 && TREE_CODE (exp
) != VAR_DECL
6265 && TREE_CODE (exp
) != RTL_EXPR
)
6267 enum machine_mode mode
= GET_MODE (target
);
6269 if (GET_MODE_CLASS (mode
) == MODE_INT
6270 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6271 internal_error ("unsupported wide integer operation");
6275 && TREE_CODE (exp
) != INTEGER_CST
6276 && TREE_CODE (exp
) != PARM_DECL
6277 && TREE_CODE (exp
) != ARRAY_REF
6278 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6279 && TREE_CODE (exp
) != COMPONENT_REF
6280 && TREE_CODE (exp
) != BIT_FIELD_REF
6281 && TREE_CODE (exp
) != INDIRECT_REF
6282 && TREE_CODE (exp
) != VAR_DECL
6283 && TREE_CODE (exp
) != CALL_EXPR
6284 && TREE_CODE (exp
) != RTL_EXPR
6285 && GET_MODE_CLASS (tmode
) == MODE_INT
6286 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6287 internal_error ("unsupported wide integer operation");
6289 check_max_integer_computation_mode (exp
);
6292 /* If will do cse, generate all results into pseudo registers
6293 since 1) that allows cse to find more things
6294 and 2) otherwise cse could produce an insn the machine
6297 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6298 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6305 tree function
= decl_function_context (exp
);
6306 /* Handle using a label in a containing function. */
6307 if (function
!= current_function_decl
6308 && function
!= inline_function_decl
&& function
!= 0)
6310 struct function
*p
= find_function_data (function
);
6311 p
->expr
->x_forced_labels
6312 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6313 p
->expr
->x_forced_labels
);
6317 if (modifier
== EXPAND_INITIALIZER
)
6318 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6323 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6324 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6325 if (function
!= current_function_decl
6326 && function
!= inline_function_decl
&& function
!= 0)
6327 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6332 if (DECL_RTL (exp
) == 0)
6334 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6335 return CONST0_RTX (mode
);
6338 /* ... fall through ... */
6341 /* If a static var's type was incomplete when the decl was written,
6342 but the type is complete now, lay out the decl now. */
6343 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6344 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6346 layout_decl (exp
, 0);
6347 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
6350 /* Although static-storage variables start off initialized, according to
6351 ANSI C, a memcpy could overwrite them with uninitialized values. So
6352 we check them too. This also lets us check for read-only variables
6353 accessed via a non-const declaration, in case it won't be detected
6354 any other way (e.g., in an embedded system or OS kernel without
6357 Aggregates are not checked here; they're handled elsewhere. */
6358 if (cfun
&& current_function_check_memory_usage
6360 && GET_CODE (DECL_RTL (exp
)) == MEM
6361 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6363 enum memory_use_mode memory_usage
;
6364 memory_usage
= get_memory_usage_from_modifier (modifier
);
6366 in_check_memory_usage
= 1;
6367 if (memory_usage
!= MEMORY_USE_DONT
)
6368 emit_library_call (chkr_check_addr_libfunc
,
6369 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
6370 XEXP (DECL_RTL (exp
), 0), Pmode
,
6371 GEN_INT (int_size_in_bytes (type
)),
6372 TYPE_MODE (sizetype
),
6373 GEN_INT (memory_usage
),
6374 TYPE_MODE (integer_type_node
));
6375 in_check_memory_usage
= 0;
6378 /* ... fall through ... */
6382 if (DECL_RTL (exp
) == 0)
6385 /* Ensure variable marked as used even if it doesn't go through
6386 a parser. If it hasn't be used yet, write out an external
6388 if (! TREE_USED (exp
))
6390 assemble_external (exp
);
6391 TREE_USED (exp
) = 1;
6394 /* Show we haven't gotten RTL for this yet. */
6397 /* Handle variables inherited from containing functions. */
6398 context
= decl_function_context (exp
);
6400 /* We treat inline_function_decl as an alias for the current function
6401 because that is the inline function whose vars, types, etc.
6402 are being merged into the current function.
6403 See expand_inline_function. */
6405 if (context
!= 0 && context
!= current_function_decl
6406 && context
!= inline_function_decl
6407 /* If var is static, we don't need a static chain to access it. */
6408 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6409 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6413 /* Mark as non-local and addressable. */
6414 DECL_NONLOCAL (exp
) = 1;
6415 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6417 mark_addressable (exp
);
6418 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6420 addr
= XEXP (DECL_RTL (exp
), 0);
6421 if (GET_CODE (addr
) == MEM
)
6423 = replace_equiv_address (addr
,
6424 fix_lexical_addr (XEXP (addr
, 0), exp
));
6426 addr
= fix_lexical_addr (addr
, exp
);
6428 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6431 /* This is the case of an array whose size is to be determined
6432 from its initializer, while the initializer is still being parsed.
6435 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6436 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6437 temp
= validize_mem (DECL_RTL (exp
));
6439 /* If DECL_RTL is memory, we are in the normal case and either
6440 the address is not valid or it is not a register and -fforce-addr
6441 is specified, get the address into a register. */
6443 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6444 && modifier
!= EXPAND_CONST_ADDRESS
6445 && modifier
!= EXPAND_SUM
6446 && modifier
!= EXPAND_INITIALIZER
6447 && (! memory_address_p (DECL_MODE (exp
),
6448 XEXP (DECL_RTL (exp
), 0))
6450 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6451 temp
= replace_equiv_address (DECL_RTL (exp
),
6452 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6454 /* If we got something, return it. But first, set the alignment
6455 if the address is a register. */
6458 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6459 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6464 /* If the mode of DECL_RTL does not match that of the decl, it
6465 must be a promoted value. We return a SUBREG of the wanted mode,
6466 but mark it so that we know that it was already extended. */
6468 if (GET_CODE (DECL_RTL (exp
)) == REG
6469 && GET_MODE (DECL_RTL (exp
)) != mode
)
6471 /* Get the signedness used for this variable. Ensure we get the
6472 same mode we got when the variable was declared. */
6473 if (GET_MODE (DECL_RTL (exp
))
6474 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
6477 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6478 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6479 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6483 return DECL_RTL (exp
);
6486 return immed_double_const (TREE_INT_CST_LOW (exp
),
6487 TREE_INT_CST_HIGH (exp
), mode
);
6490 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
6491 EXPAND_MEMORY_USE_BAD
);
6494 /* If optimized, generate immediate CONST_DOUBLE
6495 which will be turned into memory by reload if necessary.
6497 We used to force a register so that loop.c could see it. But
6498 this does not allow gen_* patterns to perform optimizations with
6499 the constants. It also produces two insns in cases like "x = 1.0;".
6500 On most machines, floating-point constants are not permitted in
6501 many insns, so we'd end up copying it to a register in any case.
6503 Now, we do the copying in expand_binop, if appropriate. */
6504 return immed_real_const (exp
);
6508 if (! TREE_CST_RTL (exp
))
6509 output_constant_def (exp
, 1);
6511 /* TREE_CST_RTL probably contains a constant address.
6512 On RISC machines where a constant address isn't valid,
6513 make some insns to get that address into a register. */
6514 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6515 && modifier
!= EXPAND_CONST_ADDRESS
6516 && modifier
!= EXPAND_INITIALIZER
6517 && modifier
!= EXPAND_SUM
6518 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6520 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6521 return replace_equiv_address (TREE_CST_RTL (exp
),
6522 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6523 return TREE_CST_RTL (exp
);
6525 case EXPR_WITH_FILE_LOCATION
:
6528 const char *saved_input_filename
= input_filename
;
6529 int saved_lineno
= lineno
;
6530 input_filename
= EXPR_WFL_FILENAME (exp
);
6531 lineno
= EXPR_WFL_LINENO (exp
);
6532 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6533 emit_line_note (input_filename
, lineno
);
6534 /* Possibly avoid switching back and forth here. */
6535 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6536 input_filename
= saved_input_filename
;
6537 lineno
= saved_lineno
;
6542 context
= decl_function_context (exp
);
6544 /* If this SAVE_EXPR was at global context, assume we are an
6545 initialization function and move it into our context. */
6547 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6549 /* We treat inline_function_decl as an alias for the current function
6550 because that is the inline function whose vars, types, etc.
6551 are being merged into the current function.
6552 See expand_inline_function. */
6553 if (context
== current_function_decl
|| context
== inline_function_decl
)
6556 /* If this is non-local, handle it. */
6559 /* The following call just exists to abort if the context is
6560 not of a containing function. */
6561 find_function_data (context
);
6563 temp
= SAVE_EXPR_RTL (exp
);
6564 if (temp
&& GET_CODE (temp
) == REG
)
6566 put_var_into_stack (exp
);
6567 temp
= SAVE_EXPR_RTL (exp
);
6569 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6572 replace_equiv_address (temp
,
6573 fix_lexical_addr (XEXP (temp
, 0), exp
));
6575 if (SAVE_EXPR_RTL (exp
) == 0)
6577 if (mode
== VOIDmode
)
6580 temp
= assign_temp (build_qualified_type (type
,
6582 | TYPE_QUAL_CONST
)),
6585 SAVE_EXPR_RTL (exp
) = temp
;
6586 if (!optimize
&& GET_CODE (temp
) == REG
)
6587 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6590 /* If the mode of TEMP does not match that of the expression, it
6591 must be a promoted value. We pass store_expr a SUBREG of the
6592 wanted mode but mark it so that we know that it was already
6593 extended. Note that `unsignedp' was modified above in
6596 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6598 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6599 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6600 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6603 if (temp
== const0_rtx
)
6604 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6605 EXPAND_MEMORY_USE_BAD
);
6607 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6609 TREE_USED (exp
) = 1;
6612 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6613 must be a promoted value. We return a SUBREG of the wanted mode,
6614 but mark it so that we know that it was already extended. */
6616 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6617 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6619 /* Compute the signedness and make the proper SUBREG. */
6620 promote_mode (type
, mode
, &unsignedp
, 0);
6621 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6622 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6623 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6627 return SAVE_EXPR_RTL (exp
);
6632 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6633 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6637 case PLACEHOLDER_EXPR
:
6639 tree old_list
= placeholder_list
;
6640 tree placeholder_expr
= 0;
6642 exp
= find_placeholder (exp
, &placeholder_expr
);
6646 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6647 temp
= expand_expr (exp
, original_target
, tmode
, ro_modifier
);
6648 placeholder_list
= old_list
;
6652 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6655 case WITH_RECORD_EXPR
:
6656 /* Put the object on the placeholder list, expand our first operand,
6657 and pop the list. */
6658 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6660 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6661 tmode
, ro_modifier
);
6662 placeholder_list
= TREE_CHAIN (placeholder_list
);
6666 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6667 expand_goto (TREE_OPERAND (exp
, 0));
6669 expand_computed_goto (TREE_OPERAND (exp
, 0));
6673 expand_exit_loop_if_false (NULL
,
6674 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6677 case LABELED_BLOCK_EXPR
:
6678 if (LABELED_BLOCK_BODY (exp
))
6679 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6680 /* Should perhaps use expand_label, but this is simpler and safer. */
6681 do_pending_stack_adjust ();
6682 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6685 case EXIT_BLOCK_EXPR
:
6686 if (EXIT_BLOCK_RETURN (exp
))
6687 sorry ("returned value in block_exit_expr");
6688 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6693 expand_start_loop (1);
6694 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6702 tree vars
= TREE_OPERAND (exp
, 0);
6703 int vars_need_expansion
= 0;
6705 /* Need to open a binding contour here because
6706 if there are any cleanups they must be contained here. */
6707 expand_start_bindings (2);
6709 /* Mark the corresponding BLOCK for output in its proper place. */
6710 if (TREE_OPERAND (exp
, 2) != 0
6711 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6712 insert_block (TREE_OPERAND (exp
, 2));
6714 /* If VARS have not yet been expanded, expand them now. */
6717 if (!DECL_RTL_SET_P (vars
))
6719 vars_need_expansion
= 1;
6722 expand_decl_init (vars
);
6723 vars
= TREE_CHAIN (vars
);
6726 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6728 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6734 if (RTL_EXPR_SEQUENCE (exp
))
6736 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6738 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6739 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6741 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6742 free_temps_for_rtl_expr (exp
);
6743 return RTL_EXPR_RTL (exp
);
6746 /* If we don't need the result, just ensure we evaluate any
6751 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6752 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6753 EXPAND_MEMORY_USE_BAD
);
6757 /* All elts simple constants => refer to a constant in memory. But
6758 if this is a non-BLKmode mode, let it store a field at a time
6759 since that should make a CONST_INT or CONST_DOUBLE when we
6760 fold. Likewise, if we have a target we can use, it is best to
6761 store directly into the target unless the type is large enough
6762 that memcpy will be used. If we are making an initializer and
6763 all operands are constant, put it in memory as well. */
6764 else if ((TREE_STATIC (exp
)
6765 && ((mode
== BLKmode
6766 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6767 || TREE_ADDRESSABLE (exp
)
6768 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6769 && (! MOVE_BY_PIECES_P
6770 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6772 && ! mostly_zeros_p (exp
))))
6773 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6775 rtx constructor
= output_constant_def (exp
, 1);
6777 if (modifier
!= EXPAND_CONST_ADDRESS
6778 && modifier
!= EXPAND_INITIALIZER
6779 && modifier
!= EXPAND_SUM
)
6780 constructor
= validize_mem (constructor
);
6786 /* Handle calls that pass values in multiple non-contiguous
6787 locations. The Irix 6 ABI has examples of this. */
6788 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6789 || GET_CODE (target
) == PARALLEL
)
6791 = assign_temp (build_qualified_type (type
,
6793 | (TREE_READONLY (exp
)
6794 * TYPE_QUAL_CONST
))),
6795 TREE_ADDRESSABLE (exp
), 1, 1);
6797 store_constructor (exp
, target
, 0,
6798 int_size_in_bytes (TREE_TYPE (exp
)));
6804 tree exp1
= TREE_OPERAND (exp
, 0);
6806 tree string
= string_constant (exp1
, &index
);
6808 /* Try to optimize reads from const strings. */
6810 && TREE_CODE (string
) == STRING_CST
6811 && TREE_CODE (index
) == INTEGER_CST
6812 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6813 && GET_MODE_CLASS (mode
) == MODE_INT
6814 && GET_MODE_SIZE (mode
) == 1
6815 && modifier
!= EXPAND_MEMORY_USE_WO
)
6817 GEN_INT (TREE_STRING_POINTER (string
)[TREE_INT_CST_LOW (index
)]);
6819 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6820 op0
= memory_address (mode
, op0
);
6822 if (cfun
&& current_function_check_memory_usage
6823 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6825 enum memory_use_mode memory_usage
;
6826 memory_usage
= get_memory_usage_from_modifier (modifier
);
6828 if (memory_usage
!= MEMORY_USE_DONT
)
6830 in_check_memory_usage
= 1;
6831 emit_library_call (chkr_check_addr_libfunc
,
6832 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, op0
,
6833 Pmode
, GEN_INT (int_size_in_bytes (type
)),
6834 TYPE_MODE (sizetype
),
6835 GEN_INT (memory_usage
),
6836 TYPE_MODE (integer_type_node
));
6837 in_check_memory_usage
= 0;
6841 temp
= gen_rtx_MEM (mode
, op0
);
6842 set_mem_attributes (temp
, exp
, 0);
6844 /* If we are writing to this object and its type is a record with
6845 readonly fields, we must mark it as readonly so it will
6846 conflict with readonly references to those fields. */
6847 if (modifier
== EXPAND_MEMORY_USE_WO
&& readonly_fields_p (type
))
6848 RTX_UNCHANGING_P (temp
) = 1;
6854 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6858 tree array
= TREE_OPERAND (exp
, 0);
6859 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6860 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6861 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6864 /* Optimize the special-case of a zero lower bound.
6866 We convert the low_bound to sizetype to avoid some problems
6867 with constant folding. (E.g. suppose the lower bound is 1,
6868 and its mode is QI. Without the conversion, (ARRAY
6869 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6870 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6872 if (! integer_zerop (low_bound
))
6873 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6875 /* Fold an expression like: "foo"[2].
6876 This is not done in fold so it won't happen inside &.
6877 Don't fold if this is for wide characters since it's too
6878 difficult to do correctly and this is a very rare case. */
6880 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6881 && TREE_CODE (array
) == STRING_CST
6882 && TREE_CODE (index
) == INTEGER_CST
6883 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6884 && GET_MODE_CLASS (mode
) == MODE_INT
6885 && GET_MODE_SIZE (mode
) == 1)
6887 GEN_INT (TREE_STRING_POINTER (array
)[TREE_INT_CST_LOW (index
)]);
6889 /* If this is a constant index into a constant array,
6890 just get the value from the array. Handle both the cases when
6891 we have an explicit constructor and when our operand is a variable
6892 that was declared const. */
6894 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6895 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6896 && TREE_CODE (index
) == INTEGER_CST
6897 && 0 > compare_tree_int (index
,
6898 list_length (CONSTRUCTOR_ELTS
6899 (TREE_OPERAND (exp
, 0)))))
6903 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6904 i
= TREE_INT_CST_LOW (index
);
6905 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6909 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6910 tmode
, ro_modifier
);
6913 else if (optimize
>= 1
6914 && modifier
!= EXPAND_CONST_ADDRESS
6915 && modifier
!= EXPAND_INITIALIZER
6916 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6917 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6918 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6920 if (TREE_CODE (index
) == INTEGER_CST
)
6922 tree init
= DECL_INITIAL (array
);
6924 if (TREE_CODE (init
) == CONSTRUCTOR
)
6928 for (elem
= CONSTRUCTOR_ELTS (init
);
6930 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6931 elem
= TREE_CHAIN (elem
))
6934 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6935 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6936 tmode
, ro_modifier
);
6938 else if (TREE_CODE (init
) == STRING_CST
6939 && 0 > compare_tree_int (index
,
6940 TREE_STRING_LENGTH (init
)))
6942 tree type
= TREE_TYPE (TREE_TYPE (init
));
6943 enum machine_mode mode
= TYPE_MODE (type
);
6945 if (GET_MODE_CLASS (mode
) == MODE_INT
6946 && GET_MODE_SIZE (mode
) == 1)
6948 (TREE_STRING_POINTER
6949 (init
)[TREE_INT_CST_LOW (index
)]));
6958 case ARRAY_RANGE_REF
:
6959 /* If the operand is a CONSTRUCTOR, we can just extract the
6960 appropriate field if it is present. Don't do this if we have
6961 already written the data since we want to refer to that copy
6962 and varasm.c assumes that's what we'll do. */
6963 if (code
== COMPONENT_REF
6964 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6965 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6969 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6970 elt
= TREE_CHAIN (elt
))
6971 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6972 /* We can normally use the value of the field in the
6973 CONSTRUCTOR. However, if this is a bitfield in
6974 an integral mode that we can fit in a HOST_WIDE_INT,
6975 we must mask only the number of bits in the bitfield,
6976 since this is done implicitly by the constructor. If
6977 the bitfield does not meet either of those conditions,
6978 we can't do this optimization. */
6979 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6980 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6982 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6983 <= HOST_BITS_PER_WIDE_INT
))))
6985 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6986 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6988 HOST_WIDE_INT bitsize
6989 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6991 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6993 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6994 op0
= expand_and (op0
, op1
, target
);
6998 enum machine_mode imode
6999 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7001 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7004 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7006 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7016 enum machine_mode mode1
;
7017 HOST_WIDE_INT bitsize
, bitpos
;
7020 unsigned int alignment
;
7021 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7022 &mode1
, &unsignedp
, &volatilep
,
7026 /* If we got back the original object, something is wrong. Perhaps
7027 we are evaluating an expression too early. In any event, don't
7028 infinitely recurse. */
7032 /* If TEM's type is a union of variable size, pass TARGET to the inner
7033 computation, since it will need a temporary and TARGET is known
7034 to have to do. This occurs in unchecked conversion in Ada. */
7038 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7039 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7041 ? target
: NULL_RTX
),
7043 (modifier
== EXPAND_INITIALIZER
7044 || modifier
== EXPAND_CONST_ADDRESS
)
7045 ? modifier
: EXPAND_NORMAL
);
7047 /* If this is a constant, put it into a register if it is a
7048 legitimate constant and OFFSET is 0 and memory if it isn't. */
7049 if (CONSTANT_P (op0
))
7051 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7052 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7054 op0
= force_reg (mode
, op0
);
7056 op0
= validize_mem (force_const_mem (mode
, op0
));
7061 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
7063 /* If this object is in a register, put it into memory.
7064 This case can't occur in C, but can in Ada if we have
7065 unchecked conversion of an expression from a scalar type to
7066 an array or record type. */
7067 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7068 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7070 /* If the operand is a SAVE_EXPR, we can deal with this by
7071 forcing the SAVE_EXPR into memory. */
7072 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7074 put_var_into_stack (TREE_OPERAND (exp
, 0));
7075 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7080 = build_qualified_type (TREE_TYPE (tem
),
7081 (TYPE_QUALS (TREE_TYPE (tem
))
7082 | TYPE_QUAL_CONST
));
7083 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7085 emit_move_insn (memloc
, op0
);
7090 if (GET_CODE (op0
) != MEM
)
7093 if (GET_MODE (offset_rtx
) != ptr_mode
)
7094 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7096 #ifdef POINTERS_EXTEND_UNSIGNED
7097 if (GET_MODE (offset_rtx
) != Pmode
)
7098 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7101 /* A constant address in OP0 can have VOIDmode, we must not try
7102 to call force_reg for that case. Avoid that case. */
7103 if (GET_CODE (op0
) == MEM
7104 && GET_MODE (op0
) == BLKmode
7105 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7107 && (bitpos
% bitsize
) == 0
7108 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7109 && alignment
== GET_MODE_ALIGNMENT (mode1
))
7111 rtx temp
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7113 if (GET_CODE (XEXP (temp
, 0)) == REG
)
7116 op0
= (replace_equiv_address
7118 force_reg (GET_MODE (XEXP (temp
, 0)),
7123 op0
= offset_address (op0
, offset_rtx
,
7124 highest_pow2_factor (offset
));
7127 /* Don't forget about volatility even if this is a bitfield. */
7128 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7130 if (op0
== orig_op0
)
7131 op0
= copy_rtx (op0
);
7133 MEM_VOLATILE_P (op0
) = 1;
7136 /* Check the access. */
7137 if (cfun
!= 0 && current_function_check_memory_usage
7138 && GET_CODE (op0
) == MEM
)
7140 enum memory_use_mode memory_usage
;
7141 memory_usage
= get_memory_usage_from_modifier (modifier
);
7143 if (memory_usage
!= MEMORY_USE_DONT
)
7148 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
7149 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
7151 /* Check the access right of the pointer. */
7152 in_check_memory_usage
= 1;
7153 if (size
> BITS_PER_UNIT
)
7154 emit_library_call (chkr_check_addr_libfunc
,
7155 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, to
,
7156 Pmode
, GEN_INT (size
/ BITS_PER_UNIT
),
7157 TYPE_MODE (sizetype
),
7158 GEN_INT (memory_usage
),
7159 TYPE_MODE (integer_type_node
));
7160 in_check_memory_usage
= 0;
7164 /* In cases where an aligned union has an unaligned object
7165 as a field, we might be extracting a BLKmode value from
7166 an integer-mode (e.g., SImode) object. Handle this case
7167 by doing the extract into an object as wide as the field
7168 (which we know to be the width of a basic mode), then
7169 storing into memory, and changing the mode to BLKmode. */
7170 if (mode1
== VOIDmode
7171 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7172 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7173 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7174 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7175 && modifier
!= EXPAND_CONST_ADDRESS
7176 && modifier
!= EXPAND_INITIALIZER
)
7177 /* If the field isn't aligned enough to fetch as a memref,
7178 fetch it as a bit field. */
7179 || (mode1
!= BLKmode
7180 && SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
7181 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7182 < GET_MODE_ALIGNMENT (mode
))
7183 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7184 /* If the type and the field are a constant size and the
7185 size of the type isn't the same size as the bitfield,
7186 we must use bitfield operations. */
7188 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7190 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7193 && SLOW_UNALIGNED_ACCESS (mode
, alignment
)
7194 && (TYPE_ALIGN (type
) > alignment
7195 || bitpos
% TYPE_ALIGN (type
) != 0)))
7197 enum machine_mode ext_mode
= mode
;
7199 if (ext_mode
== BLKmode
7200 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7201 && GET_CODE (target
) == MEM
7202 && bitpos
% BITS_PER_UNIT
== 0))
7203 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7205 if (ext_mode
== BLKmode
)
7207 /* In this case, BITPOS must start at a byte boundary and
7208 TARGET, if specified, must be a MEM. */
7209 if (GET_CODE (op0
) != MEM
7210 || (target
!= 0 && GET_CODE (target
) != MEM
)
7211 || bitpos
% BITS_PER_UNIT
!= 0)
7214 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7216 target
= assign_temp (type
, 0, 1, 1);
7218 emit_block_move (target
, op0
,
7219 bitsize
== -1 ? expr_size (exp
)
7220 : GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7226 op0
= validize_mem (op0
);
7228 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7229 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7231 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7232 unsignedp
, target
, ext_mode
, ext_mode
,
7233 int_size_in_bytes (TREE_TYPE (tem
)));
7235 /* If the result is a record type and BITSIZE is narrower than
7236 the mode of OP0, an integral mode, and this is a big endian
7237 machine, we must put the field into the high-order bits. */
7238 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7239 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7240 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
7241 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7242 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7246 if (mode
== BLKmode
)
7248 tree nt
= build_qualified_type (type_for_mode (ext_mode
, 0),
7250 rtx
new = assign_temp (nt
, 0, 1, 1);
7252 emit_move_insn (new, op0
);
7253 op0
= copy_rtx (new);
7254 PUT_MODE (op0
, BLKmode
);
7260 /* If the result is BLKmode, use that to access the object
7262 if (mode
== BLKmode
)
7265 /* Get a reference to just this component. */
7266 if (modifier
== EXPAND_CONST_ADDRESS
7267 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7268 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7270 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7272 if (op0
== orig_op0
)
7273 op0
= copy_rtx (op0
);
7275 set_mem_attributes (op0
, exp
, 0);
7276 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7277 mark_reg_pointer (XEXP (op0
, 0), alignment
);
7279 MEM_VOLATILE_P (op0
) |= volatilep
;
7280 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7281 || modifier
== EXPAND_CONST_ADDRESS
7282 || modifier
== EXPAND_INITIALIZER
)
7284 else if (target
== 0)
7285 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7287 convert_move (target
, op0
, unsignedp
);
7293 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7295 /* Evaluate the interior expression. */
7296 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7299 /* Get or create an instruction off which to hang a note. */
7300 if (REG_P (subtarget
))
7303 insn
= get_last_insn ();
7306 if (! INSN_P (insn
))
7307 insn
= prev_nonnote_insn (insn
);
7311 target
= gen_reg_rtx (GET_MODE (subtarget
));
7312 insn
= emit_move_insn (target
, subtarget
);
7315 /* Collect the data for the note. */
7316 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7317 vtbl_ref
= plus_constant (vtbl_ref
,
7318 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7319 /* Discard the initial CONST that was added. */
7320 vtbl_ref
= XEXP (vtbl_ref
, 0);
7323 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7328 /* Intended for a reference to a buffer of a file-object in Pascal.
7329 But it's not certain that a special tree code will really be
7330 necessary for these. INDIRECT_REF might work for them. */
7336 /* Pascal set IN expression.
7339 rlo = set_low - (set_low%bits_per_word);
7340 the_word = set [ (index - rlo)/bits_per_word ];
7341 bit_index = index % bits_per_word;
7342 bitmask = 1 << bit_index;
7343 return !!(the_word & bitmask); */
7345 tree set
= TREE_OPERAND (exp
, 0);
7346 tree index
= TREE_OPERAND (exp
, 1);
7347 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7348 tree set_type
= TREE_TYPE (set
);
7349 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7350 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7351 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7352 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7353 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7354 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7355 rtx setaddr
= XEXP (setval
, 0);
7356 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7358 rtx diff
, quo
, rem
, addr
, bit
, result
;
7360 /* If domain is empty, answer is no. Likewise if index is constant
7361 and out of bounds. */
7362 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7363 && TREE_CODE (set_low_bound
) == INTEGER_CST
7364 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7365 || (TREE_CODE (index
) == INTEGER_CST
7366 && TREE_CODE (set_low_bound
) == INTEGER_CST
7367 && tree_int_cst_lt (index
, set_low_bound
))
7368 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7369 && TREE_CODE (index
) == INTEGER_CST
7370 && tree_int_cst_lt (set_high_bound
, index
))))
7374 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7376 /* If we get here, we have to generate the code for both cases
7377 (in range and out of range). */
7379 op0
= gen_label_rtx ();
7380 op1
= gen_label_rtx ();
7382 if (! (GET_CODE (index_val
) == CONST_INT
7383 && GET_CODE (lo_r
) == CONST_INT
))
7385 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7386 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7389 if (! (GET_CODE (index_val
) == CONST_INT
7390 && GET_CODE (hi_r
) == CONST_INT
))
7392 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7393 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7396 /* Calculate the element number of bit zero in the first word
7398 if (GET_CODE (lo_r
) == CONST_INT
)
7399 rlow
= GEN_INT (INTVAL (lo_r
)
7400 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7402 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7403 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7404 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7406 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7407 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7409 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7410 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7411 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7412 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7414 addr
= memory_address (byte_mode
,
7415 expand_binop (index_mode
, add_optab
, diff
,
7416 setaddr
, NULL_RTX
, iunsignedp
,
7419 /* Extract the bit we want to examine. */
7420 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7421 gen_rtx_MEM (byte_mode
, addr
),
7422 make_tree (TREE_TYPE (index
), rem
),
7424 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7425 GET_MODE (target
) == byte_mode
? target
: 0,
7426 1, OPTAB_LIB_WIDEN
);
7428 if (result
!= target
)
7429 convert_move (target
, result
, 1);
7431 /* Output the code to handle the out-of-range case. */
7434 emit_move_insn (target
, const0_rtx
);
7439 case WITH_CLEANUP_EXPR
:
7440 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7442 WITH_CLEANUP_EXPR_RTL (exp
)
7443 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7444 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 1));
7446 /* That's it for this cleanup. */
7447 TREE_OPERAND (exp
, 1) = 0;
7449 return WITH_CLEANUP_EXPR_RTL (exp
);
7451 case CLEANUP_POINT_EXPR
:
7453 /* Start a new binding layer that will keep track of all cleanup
7454 actions to be performed. */
7455 expand_start_bindings (2);
7457 target_temp_slot_level
= temp_slot_level
;
7459 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7460 /* If we're going to use this value, load it up now. */
7462 op0
= force_not_mem (op0
);
7463 preserve_temp_slots (op0
);
7464 expand_end_bindings (NULL_TREE
, 0, 0);
7469 /* Check for a built-in function. */
7470 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7471 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7473 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7475 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7476 == BUILT_IN_FRONTEND
)
7477 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7479 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7482 return expand_call (exp
, target
, ignore
);
7484 case NON_LVALUE_EXPR
:
7487 case REFERENCE_EXPR
:
7488 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7491 if (TREE_CODE (type
) == UNION_TYPE
)
7493 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7495 /* If both input and output are BLKmode, this conversion
7496 isn't actually doing anything unless we need to make the
7497 alignment stricter. */
7498 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
7499 && (TYPE_ALIGN (type
) <= TYPE_ALIGN (valtype
)
7500 || TYPE_ALIGN (type
) >= BIGGEST_ALIGNMENT
))
7501 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7505 target
= assign_temp (type
, 0, 1, 1);
7507 if (GET_CODE (target
) == MEM
)
7508 /* Store data into beginning of memory target. */
7509 store_expr (TREE_OPERAND (exp
, 0),
7510 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7512 else if (GET_CODE (target
) == REG
)
7513 /* Store this field into a union of the proper type. */
7514 store_field (target
,
7515 MIN ((int_size_in_bytes (TREE_TYPE
7516 (TREE_OPERAND (exp
, 0)))
7518 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7519 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7520 VOIDmode
, 0, int_size_in_bytes (type
), 0);
7524 /* Return the entire union. */
7528 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7530 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7533 /* If the signedness of the conversion differs and OP0 is
7534 a promoted SUBREG, clear that indication since we now
7535 have to do the proper extension. */
7536 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7537 && GET_CODE (op0
) == SUBREG
)
7538 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7543 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7544 if (GET_MODE (op0
) == mode
)
7547 /* If OP0 is a constant, just convert it into the proper mode. */
7548 if (CONSTANT_P (op0
))
7550 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7551 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7553 if (modifier
== EXPAND_INITIALIZER
)
7554 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7558 convert_to_mode (mode
, op0
,
7559 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7561 convert_move (target
, op0
,
7562 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7566 /* We come here from MINUS_EXPR when the second operand is a
7569 this_optab
= ! unsignedp
&& flag_trapv
7570 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7571 ? addv_optab
: add_optab
;
7573 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7574 something else, make sure we add the register to the constant and
7575 then to the other thing. This case can occur during strength
7576 reduction and doing it this way will produce better code if the
7577 frame pointer or argument pointer is eliminated.
7579 fold-const.c will ensure that the constant is always in the inner
7580 PLUS_EXPR, so the only case we need to do anything about is if
7581 sp, ap, or fp is our second argument, in which case we must swap
7582 the innermost first argument and our second argument. */
7584 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7585 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7586 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7587 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7588 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7589 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7591 tree t
= TREE_OPERAND (exp
, 1);
7593 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7594 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7597 /* If the result is to be ptr_mode and we are adding an integer to
7598 something, we might be forming a constant. So try to use
7599 plus_constant. If it produces a sum and we can't accept it,
7600 use force_operand. This allows P = &ARR[const] to generate
7601 efficient code on machines where a SYMBOL_REF is not a valid
7604 If this is an EXPAND_SUM call, always return the sum. */
7605 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7606 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7608 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7609 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7610 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7614 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7616 /* Use immed_double_const to ensure that the constant is
7617 truncated according to the mode of OP1, then sign extended
7618 to a HOST_WIDE_INT. Using the constant directly can result
7619 in non-canonical RTL in a 64x32 cross compile. */
7621 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7623 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7624 op1
= plus_constant (op1
, INTVAL (constant_part
));
7625 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7626 op1
= force_operand (op1
, target
);
7630 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7631 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7632 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7636 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7638 if (! CONSTANT_P (op0
))
7640 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7641 VOIDmode
, modifier
);
7642 /* Don't go to both_summands if modifier
7643 says it's not right to return a PLUS. */
7644 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7648 /* Use immed_double_const to ensure that the constant is
7649 truncated according to the mode of OP1, then sign extended
7650 to a HOST_WIDE_INT. Using the constant directly can result
7651 in non-canonical RTL in a 64x32 cross compile. */
7653 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7655 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7656 op0
= plus_constant (op0
, INTVAL (constant_part
));
7657 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7658 op0
= force_operand (op0
, target
);
7663 /* No sense saving up arithmetic to be done
7664 if it's all in the wrong mode to form part of an address.
7665 And force_operand won't know whether to sign-extend or
7667 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7668 || mode
!= ptr_mode
)
7671 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7674 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7675 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7678 /* Make sure any term that's a sum with a constant comes last. */
7679 if (GET_CODE (op0
) == PLUS
7680 && CONSTANT_P (XEXP (op0
, 1)))
7686 /* If adding to a sum including a constant,
7687 associate it to put the constant outside. */
7688 if (GET_CODE (op1
) == PLUS
7689 && CONSTANT_P (XEXP (op1
, 1)))
7691 rtx constant_term
= const0_rtx
;
7693 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7696 /* Ensure that MULT comes first if there is one. */
7697 else if (GET_CODE (op0
) == MULT
)
7698 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7700 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7702 /* Let's also eliminate constants from op0 if possible. */
7703 op0
= eliminate_constant_term (op0
, &constant_term
);
7705 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7706 their sum should be a constant. Form it into OP1, since the
7707 result we want will then be OP0 + OP1. */
7709 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7714 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7717 /* Put a constant term last and put a multiplication first. */
7718 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7719 temp
= op1
, op1
= op0
, op0
= temp
;
7721 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7722 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7725 /* For initializers, we are allowed to return a MINUS of two
7726 symbolic constants. Here we handle all cases when both operands
7728 /* Handle difference of two symbolic constants,
7729 for the sake of an initializer. */
7730 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7731 && really_constant_p (TREE_OPERAND (exp
, 0))
7732 && really_constant_p (TREE_OPERAND (exp
, 1)))
7734 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7735 VOIDmode
, ro_modifier
);
7736 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7737 VOIDmode
, ro_modifier
);
7739 /* If the last operand is a CONST_INT, use plus_constant of
7740 the negated constant. Else make the MINUS. */
7741 if (GET_CODE (op1
) == CONST_INT
)
7742 return plus_constant (op0
, - INTVAL (op1
));
7744 return gen_rtx_MINUS (mode
, op0
, op1
);
7746 /* Convert A - const to A + (-const). */
7747 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7749 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7750 TREE_OPERAND (exp
, 1)));
7752 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7753 /* If we can't negate the constant in TYPE, leave it alone and
7754 expand_binop will negate it for us. We used to try to do it
7755 here in the signed version of TYPE, but that doesn't work
7756 on POINTER_TYPEs. */;
7759 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7763 this_optab
= ! unsignedp
&& flag_trapv
7764 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7765 ? subv_optab
: sub_optab
;
7769 /* If first operand is constant, swap them.
7770 Thus the following special case checks need only
7771 check the second operand. */
7772 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7774 tree t1
= TREE_OPERAND (exp
, 0);
7775 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7776 TREE_OPERAND (exp
, 1) = t1
;
7779 /* Attempt to return something suitable for generating an
7780 indexed address, for machines that support that. */
7782 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7783 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7784 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7786 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7789 /* Apply distributive law if OP0 is x+c. */
7790 if (GET_CODE (op0
) == PLUS
7791 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7796 (mode
, XEXP (op0
, 0),
7797 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7798 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7799 * INTVAL (XEXP (op0
, 1))));
7801 if (GET_CODE (op0
) != REG
)
7802 op0
= force_operand (op0
, NULL_RTX
);
7803 if (GET_CODE (op0
) != REG
)
7804 op0
= copy_to_mode_reg (mode
, op0
);
7807 gen_rtx_MULT (mode
, op0
,
7808 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7811 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7814 /* Check for multiplying things that have been extended
7815 from a narrower type. If this machine supports multiplying
7816 in that narrower type with a result in the desired type,
7817 do it that way, and avoid the explicit type-conversion. */
7818 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7819 && TREE_CODE (type
) == INTEGER_TYPE
7820 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7821 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7822 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7823 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7824 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7825 /* Don't use a widening multiply if a shift will do. */
7826 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7827 > HOST_BITS_PER_WIDE_INT
)
7828 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7830 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7831 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7833 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7834 /* If both operands are extended, they must either both
7835 be zero-extended or both be sign-extended. */
7836 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7838 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7840 enum machine_mode innermode
7841 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7842 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7843 ? smul_widen_optab
: umul_widen_optab
);
7844 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7845 ? umul_widen_optab
: smul_widen_optab
);
7846 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7848 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7850 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7851 NULL_RTX
, VOIDmode
, 0);
7852 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7853 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7856 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7857 NULL_RTX
, VOIDmode
, 0);
7860 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7861 && innermode
== word_mode
)
7864 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7865 NULL_RTX
, VOIDmode
, 0);
7866 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7867 op1
= convert_modes (innermode
, mode
,
7868 expand_expr (TREE_OPERAND (exp
, 1),
7869 NULL_RTX
, VOIDmode
, 0),
7872 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7873 NULL_RTX
, VOIDmode
, 0);
7874 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7875 unsignedp
, OPTAB_LIB_WIDEN
);
7876 htem
= expand_mult_highpart_adjust (innermode
,
7877 gen_highpart (innermode
, temp
),
7879 gen_highpart (innermode
, temp
),
7881 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7886 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7887 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7888 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7890 case TRUNC_DIV_EXPR
:
7891 case FLOOR_DIV_EXPR
:
7893 case ROUND_DIV_EXPR
:
7894 case EXACT_DIV_EXPR
:
7895 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7897 /* Possible optimization: compute the dividend with EXPAND_SUM
7898 then if the divisor is constant can optimize the case
7899 where some terms of the dividend have coeffs divisible by it. */
7900 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7901 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7902 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7905 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7906 expensive divide. If not, combine will rebuild the original
7908 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7909 && !real_onep (TREE_OPERAND (exp
, 0)))
7910 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7911 build (RDIV_EXPR
, type
,
7912 build_real (type
, dconst1
),
7913 TREE_OPERAND (exp
, 1))),
7914 target
, tmode
, unsignedp
);
7915 this_optab
= sdiv_optab
;
7918 case TRUNC_MOD_EXPR
:
7919 case FLOOR_MOD_EXPR
:
7921 case ROUND_MOD_EXPR
:
7922 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7924 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7925 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7926 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7928 case FIX_ROUND_EXPR
:
7929 case FIX_FLOOR_EXPR
:
7931 abort (); /* Not used for C. */
7933 case FIX_TRUNC_EXPR
:
7934 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7936 target
= gen_reg_rtx (mode
);
7937 expand_fix (target
, op0
, unsignedp
);
7941 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7943 target
= gen_reg_rtx (mode
);
7944 /* expand_float can't figure out what to do if FROM has VOIDmode.
7945 So give it the correct mode. With -O, cse will optimize this. */
7946 if (GET_MODE (op0
) == VOIDmode
)
7947 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7949 expand_float (target
, op0
,
7950 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7954 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7955 temp
= expand_unop (mode
,
7956 ! unsignedp
&& flag_trapv
7957 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7958 ? negv_optab
: neg_optab
, op0
, target
, 0);
7964 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7966 /* Handle complex values specially. */
7967 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7968 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7969 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7971 /* Unsigned abs is simply the operand. Testing here means we don't
7972 risk generating incorrect code below. */
7973 if (TREE_UNSIGNED (type
))
7976 return expand_abs (mode
, op0
, target
, unsignedp
,
7977 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7981 target
= original_target
;
7982 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7983 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7984 || GET_MODE (target
) != mode
7985 || (GET_CODE (target
) == REG
7986 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7987 target
= gen_reg_rtx (mode
);
7988 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7989 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7991 /* First try to do it with a special MIN or MAX instruction.
7992 If that does not win, use a conditional jump to select the proper
7994 this_optab
= (TREE_UNSIGNED (type
)
7995 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7996 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7998 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8003 /* At this point, a MEM target is no longer useful; we will get better
8006 if (GET_CODE (target
) == MEM
)
8007 target
= gen_reg_rtx (mode
);
8010 emit_move_insn (target
, op0
);
8012 op0
= gen_label_rtx ();
8014 /* If this mode is an integer too wide to compare properly,
8015 compare word by word. Rely on cse to optimize constant cases. */
8016 if (GET_MODE_CLASS (mode
) == MODE_INT
8017 && ! can_compare_p (GE
, mode
, ccp_jump
))
8019 if (code
== MAX_EXPR
)
8020 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8021 target
, op1
, NULL_RTX
, op0
);
8023 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8024 op1
, target
, NULL_RTX
, op0
);
8028 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8029 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8030 unsignedp
, mode
, NULL_RTX
, 0, NULL_RTX
,
8033 emit_move_insn (target
, op1
);
8038 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8039 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8045 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8046 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8051 /* ??? Can optimize bitwise operations with one arg constant.
8052 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8053 and (a bitwise1 b) bitwise2 b (etc)
8054 but that is probably not worth while. */
8056 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8057 boolean values when we want in all cases to compute both of them. In
8058 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8059 as actual zero-or-1 values and then bitwise anding. In cases where
8060 there cannot be any side effects, better code would be made by
8061 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8062 how to recognize those cases. */
8064 case TRUTH_AND_EXPR
:
8066 this_optab
= and_optab
;
8071 this_optab
= ior_optab
;
8074 case TRUTH_XOR_EXPR
:
8076 this_optab
= xor_optab
;
8083 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8085 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8086 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8089 /* Could determine the answer when only additive constants differ. Also,
8090 the addition of one can be handled by changing the condition. */
8097 case UNORDERED_EXPR
:
8104 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
8108 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8109 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8111 && GET_CODE (original_target
) == REG
8112 && (GET_MODE (original_target
)
8113 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8115 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8118 if (temp
!= original_target
)
8119 temp
= copy_to_reg (temp
);
8121 op1
= gen_label_rtx ();
8122 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8123 GET_MODE (temp
), unsignedp
, 0, op1
);
8124 emit_move_insn (temp
, const1_rtx
);
8129 /* If no set-flag instruction, must generate a conditional
8130 store into a temporary variable. Drop through
8131 and handle this like && and ||. */
8133 case TRUTH_ANDIF_EXPR
:
8134 case TRUTH_ORIF_EXPR
:
8136 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8137 /* Make sure we don't have a hard reg (such as function's return
8138 value) live across basic blocks, if not optimizing. */
8139 || (!optimize
&& GET_CODE (target
) == REG
8140 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8141 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8144 emit_clr_insn (target
);
8146 op1
= gen_label_rtx ();
8147 jumpifnot (exp
, op1
);
8150 emit_0_to_1_insn (target
);
8153 return ignore
? const0_rtx
: target
;
8155 case TRUTH_NOT_EXPR
:
8156 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8157 /* The parser is careful to generate TRUTH_NOT_EXPR
8158 only with operands that are always zero or one. */
8159 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8160 target
, 1, OPTAB_LIB_WIDEN
);
8166 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8168 return expand_expr (TREE_OPERAND (exp
, 1),
8169 (ignore
? const0_rtx
: target
),
8173 /* If we would have a "singleton" (see below) were it not for a
8174 conversion in each arm, bring that conversion back out. */
8175 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8176 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8177 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8178 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8180 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8181 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8183 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8184 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8185 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8186 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8187 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8188 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8189 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8190 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8191 return expand_expr (build1 (NOP_EXPR
, type
,
8192 build (COND_EXPR
, TREE_TYPE (iftrue
),
8193 TREE_OPERAND (exp
, 0),
8195 target
, tmode
, modifier
);
8199 /* Note that COND_EXPRs whose type is a structure or union
8200 are required to be constructed to contain assignments of
8201 a temporary variable, so that we can evaluate them here
8202 for side effect only. If type is void, we must do likewise. */
8204 /* If an arm of the branch requires a cleanup,
8205 only that cleanup is performed. */
8208 tree binary_op
= 0, unary_op
= 0;
8210 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8211 convert it to our mode, if necessary. */
8212 if (integer_onep (TREE_OPERAND (exp
, 1))
8213 && integer_zerop (TREE_OPERAND (exp
, 2))
8214 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8218 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8223 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
8224 if (GET_MODE (op0
) == mode
)
8228 target
= gen_reg_rtx (mode
);
8229 convert_move (target
, op0
, unsignedp
);
8233 /* Check for X ? A + B : A. If we have this, we can copy A to the
8234 output and conditionally add B. Similarly for unary operations.
8235 Don't do this if X has side-effects because those side effects
8236 might affect A or B and the "?" operation is a sequence point in
8237 ANSI. (operand_equal_p tests for side effects.) */
8239 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8240 && operand_equal_p (TREE_OPERAND (exp
, 2),
8241 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8242 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8243 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8244 && operand_equal_p (TREE_OPERAND (exp
, 1),
8245 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8246 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8247 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8248 && operand_equal_p (TREE_OPERAND (exp
, 2),
8249 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8250 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8251 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8252 && operand_equal_p (TREE_OPERAND (exp
, 1),
8253 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8254 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8256 /* If we are not to produce a result, we have no target. Otherwise,
8257 if a target was specified use it; it will not be used as an
8258 intermediate target unless it is safe. If no target, use a
8263 else if (original_target
8264 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8265 || (singleton
&& GET_CODE (original_target
) == REG
8266 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8267 && original_target
== var_rtx (singleton
)))
8268 && GET_MODE (original_target
) == mode
8269 #ifdef HAVE_conditional_move
8270 && (! can_conditionally_move_p (mode
)
8271 || GET_CODE (original_target
) == REG
8272 || TREE_ADDRESSABLE (type
))
8274 && (GET_CODE (original_target
) != MEM
8275 || TREE_ADDRESSABLE (type
)))
8276 temp
= original_target
;
8277 else if (TREE_ADDRESSABLE (type
))
8280 temp
= assign_temp (type
, 0, 0, 1);
8282 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8283 do the test of X as a store-flag operation, do this as
8284 A + ((X != 0) << log C). Similarly for other simple binary
8285 operators. Only do for C == 1 if BRANCH_COST is low. */
8286 if (temp
&& singleton
&& binary_op
8287 && (TREE_CODE (binary_op
) == PLUS_EXPR
8288 || TREE_CODE (binary_op
) == MINUS_EXPR
8289 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8290 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8291 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8292 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8293 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8296 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8297 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8298 ? addv_optab
: add_optab
)
8299 : TREE_CODE (binary_op
) == MINUS_EXPR
8300 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8301 ? subv_optab
: sub_optab
)
8302 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8305 /* If we had X ? A : A + 1, do this as A + (X == 0).
8307 We have to invert the truth value here and then put it
8308 back later if do_store_flag fails. We cannot simply copy
8309 TREE_OPERAND (exp, 0) to another variable and modify that
8310 because invert_truthvalue can modify the tree pointed to
8312 if (singleton
== TREE_OPERAND (exp
, 1))
8313 TREE_OPERAND (exp
, 0)
8314 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8316 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8317 (safe_from_p (temp
, singleton
, 1)
8319 mode
, BRANCH_COST
<= 1);
8321 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8322 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8323 build_int_2 (tree_log2
8327 (safe_from_p (temp
, singleton
, 1)
8328 ? temp
: NULL_RTX
), 0);
8332 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8333 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8334 unsignedp
, OPTAB_LIB_WIDEN
);
8336 else if (singleton
== TREE_OPERAND (exp
, 1))
8337 TREE_OPERAND (exp
, 0)
8338 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8341 do_pending_stack_adjust ();
8343 op0
= gen_label_rtx ();
8345 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8349 /* If the target conflicts with the other operand of the
8350 binary op, we can't use it. Also, we can't use the target
8351 if it is a hard register, because evaluating the condition
8352 might clobber it. */
8354 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8355 || (GET_CODE (temp
) == REG
8356 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8357 temp
= gen_reg_rtx (mode
);
8358 store_expr (singleton
, temp
, 0);
8361 expand_expr (singleton
,
8362 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8363 if (singleton
== TREE_OPERAND (exp
, 1))
8364 jumpif (TREE_OPERAND (exp
, 0), op0
);
8366 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8368 start_cleanup_deferral ();
8369 if (binary_op
&& temp
== 0)
8370 /* Just touch the other operand. */
8371 expand_expr (TREE_OPERAND (binary_op
, 1),
8372 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8374 store_expr (build (TREE_CODE (binary_op
), type
,
8375 make_tree (type
, temp
),
8376 TREE_OPERAND (binary_op
, 1)),
8379 store_expr (build1 (TREE_CODE (unary_op
), type
,
8380 make_tree (type
, temp
)),
8384 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8385 comparison operator. If we have one of these cases, set the
8386 output to A, branch on A (cse will merge these two references),
8387 then set the output to FOO. */
8389 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8390 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8391 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8392 TREE_OPERAND (exp
, 1), 0)
8393 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8394 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8395 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8397 if (GET_CODE (temp
) == REG
8398 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8399 temp
= gen_reg_rtx (mode
);
8400 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8401 jumpif (TREE_OPERAND (exp
, 0), op0
);
8403 start_cleanup_deferral ();
8404 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8408 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8409 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8410 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8411 TREE_OPERAND (exp
, 2), 0)
8412 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8413 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8414 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8416 if (GET_CODE (temp
) == REG
8417 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8418 temp
= gen_reg_rtx (mode
);
8419 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8420 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8422 start_cleanup_deferral ();
8423 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8428 op1
= gen_label_rtx ();
8429 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8431 start_cleanup_deferral ();
8433 /* One branch of the cond can be void, if it never returns. For
8434 example A ? throw : E */
8436 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8437 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8439 expand_expr (TREE_OPERAND (exp
, 1),
8440 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8441 end_cleanup_deferral ();
8443 emit_jump_insn (gen_jump (op1
));
8446 start_cleanup_deferral ();
8448 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8449 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8451 expand_expr (TREE_OPERAND (exp
, 2),
8452 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8455 end_cleanup_deferral ();
8466 /* Something needs to be initialized, but we didn't know
8467 where that thing was when building the tree. For example,
8468 it could be the return value of a function, or a parameter
8469 to a function which lays down in the stack, or a temporary
8470 variable which must be passed by reference.
8472 We guarantee that the expression will either be constructed
8473 or copied into our original target. */
8475 tree slot
= TREE_OPERAND (exp
, 0);
8476 tree cleanups
= NULL_TREE
;
8479 if (TREE_CODE (slot
) != VAR_DECL
)
8483 target
= original_target
;
8485 /* Set this here so that if we get a target that refers to a
8486 register variable that's already been used, put_reg_into_stack
8487 knows that it should fix up those uses. */
8488 TREE_USED (slot
) = 1;
8492 if (DECL_RTL_SET_P (slot
))
8494 target
= DECL_RTL (slot
);
8495 /* If we have already expanded the slot, so don't do
8497 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8502 target
= assign_temp (type
, 2, 0, 1);
8503 /* All temp slots at this level must not conflict. */
8504 preserve_temp_slots (target
);
8505 SET_DECL_RTL (slot
, target
);
8506 if (TREE_ADDRESSABLE (slot
))
8507 put_var_into_stack (slot
);
8509 /* Since SLOT is not known to the called function
8510 to belong to its stack frame, we must build an explicit
8511 cleanup. This case occurs when we must build up a reference
8512 to pass the reference as an argument. In this case,
8513 it is very likely that such a reference need not be
8516 if (TREE_OPERAND (exp
, 2) == 0)
8517 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8518 cleanups
= TREE_OPERAND (exp
, 2);
8523 /* This case does occur, when expanding a parameter which
8524 needs to be constructed on the stack. The target
8525 is the actual stack address that we want to initialize.
8526 The function we call will perform the cleanup in this case. */
8528 /* If we have already assigned it space, use that space,
8529 not target that we were passed in, as our target
8530 parameter is only a hint. */
8531 if (DECL_RTL_SET_P (slot
))
8533 target
= DECL_RTL (slot
);
8534 /* If we have already expanded the slot, so don't do
8536 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8541 SET_DECL_RTL (slot
, target
);
8542 /* If we must have an addressable slot, then make sure that
8543 the RTL that we just stored in slot is OK. */
8544 if (TREE_ADDRESSABLE (slot
))
8545 put_var_into_stack (slot
);
8549 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8550 /* Mark it as expanded. */
8551 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8553 store_expr (exp1
, target
, 0);
8555 expand_decl_cleanup (NULL_TREE
, cleanups
);
8562 tree lhs
= TREE_OPERAND (exp
, 0);
8563 tree rhs
= TREE_OPERAND (exp
, 1);
8565 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8571 /* If lhs is complex, expand calls in rhs before computing it.
8572 That's so we don't compute a pointer and save it over a
8573 call. If lhs is simple, compute it first so we can give it
8574 as a target if the rhs is just a call. This avoids an
8575 extra temp and copy and that prevents a partial-subsumption
8576 which makes bad code. Actually we could treat
8577 component_ref's of vars like vars. */
8579 tree lhs
= TREE_OPERAND (exp
, 0);
8580 tree rhs
= TREE_OPERAND (exp
, 1);
8584 /* Check for |= or &= of a bitfield of size one into another bitfield
8585 of size 1. In this case, (unless we need the result of the
8586 assignment) we can do this more efficiently with a
8587 test followed by an assignment, if necessary.
8589 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8590 things change so we do, this code should be enhanced to
8593 && TREE_CODE (lhs
) == COMPONENT_REF
8594 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8595 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8596 && TREE_OPERAND (rhs
, 0) == lhs
8597 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8598 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8599 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8601 rtx label
= gen_label_rtx ();
8603 do_jump (TREE_OPERAND (rhs
, 1),
8604 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8605 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8606 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8607 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8609 : integer_zero_node
)),
8611 do_pending_stack_adjust ();
8616 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8622 if (!TREE_OPERAND (exp
, 0))
8623 expand_null_return ();
8625 expand_return (TREE_OPERAND (exp
, 0));
8628 case PREINCREMENT_EXPR
:
8629 case PREDECREMENT_EXPR
:
8630 return expand_increment (exp
, 0, ignore
);
8632 case POSTINCREMENT_EXPR
:
8633 case POSTDECREMENT_EXPR
:
8634 /* Faster to treat as pre-increment if result is not used. */
8635 return expand_increment (exp
, ! ignore
, ignore
);
8638 /* Are we taking the address of a nested function? */
8639 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8640 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8641 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8642 && ! TREE_STATIC (exp
))
8644 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8645 op0
= force_operand (op0
, target
);
8647 /* If we are taking the address of something erroneous, just
8649 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8651 /* If we are taking the address of a constant and are at the
8652 top level, we have to use output_constant_def since we can't
8653 call force_const_mem at top level. */
8655 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8656 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8658 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8661 /* We make sure to pass const0_rtx down if we came in with
8662 ignore set, to avoid doing the cleanups twice for something. */
8663 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8664 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8665 (modifier
== EXPAND_INITIALIZER
8666 ? modifier
: EXPAND_CONST_ADDRESS
));
8668 /* If we are going to ignore the result, OP0 will have been set
8669 to const0_rtx, so just return it. Don't get confused and
8670 think we are taking the address of the constant. */
8674 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8675 clever and returns a REG when given a MEM. */
8676 op0
= protect_from_queue (op0
, 1);
8678 /* We would like the object in memory. If it is a constant, we can
8679 have it be statically allocated into memory. For a non-constant,
8680 we need to allocate some memory and store the value into it. */
8682 if (CONSTANT_P (op0
))
8683 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8685 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8686 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8687 || GET_CODE (op0
) == PARALLEL
)
8689 /* If this object is in a register, it must be not
8691 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8692 tree nt
= build_qualified_type (inner_type
,
8693 (TYPE_QUALS (inner_type
)
8694 | TYPE_QUAL_CONST
));
8695 rtx memloc
= assign_temp (nt
, 1, 1, 1);
8697 if (GET_CODE (op0
) == PARALLEL
)
8698 /* Handle calls that pass values in multiple non-contiguous
8699 locations. The Irix 6 ABI has examples of this. */
8700 emit_group_store (memloc
, op0
, int_size_in_bytes (inner_type
));
8702 emit_move_insn (memloc
, op0
);
8707 if (GET_CODE (op0
) != MEM
)
8710 mark_temp_addr_taken (op0
);
8711 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8713 op0
= XEXP (op0
, 0);
8714 #ifdef POINTERS_EXTEND_UNSIGNED
8715 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8716 && mode
== ptr_mode
)
8717 op0
= convert_memory_address (ptr_mode
, op0
);
8722 op0
= force_operand (XEXP (op0
, 0), target
);
8725 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8726 op0
= force_reg (Pmode
, op0
);
8728 if (GET_CODE (op0
) == REG
8729 && ! REG_USERVAR_P (op0
))
8730 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8732 #ifdef POINTERS_EXTEND_UNSIGNED
8733 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8734 && mode
== ptr_mode
)
8735 op0
= convert_memory_address (ptr_mode
, op0
);
8740 case ENTRY_VALUE_EXPR
:
8743 /* COMPLEX type for Extended Pascal & Fortran */
8746 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8749 /* Get the rtx code of the operands. */
8750 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8751 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8754 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8758 /* Move the real (op0) and imaginary (op1) parts to their location. */
8759 emit_move_insn (gen_realpart (mode
, target
), op0
);
8760 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8762 insns
= get_insns ();
8765 /* Complex construction should appear as a single unit. */
8766 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8767 each with a separate pseudo as destination.
8768 It's not correct for flow to treat them as a unit. */
8769 if (GET_CODE (target
) != CONCAT
)
8770 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8778 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8779 return gen_realpart (mode
, op0
);
8782 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8783 return gen_imagpart (mode
, op0
);
8787 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8791 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8794 target
= gen_reg_rtx (mode
);
8798 /* Store the realpart and the negated imagpart to target. */
8799 emit_move_insn (gen_realpart (partmode
, target
),
8800 gen_realpart (partmode
, op0
));
8802 imag_t
= gen_imagpart (partmode
, target
);
8803 temp
= expand_unop (partmode
,
8804 ! unsignedp
&& flag_trapv
8805 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8806 ? negv_optab
: neg_optab
,
8807 gen_imagpart (partmode
, op0
), imag_t
, 0);
8809 emit_move_insn (imag_t
, temp
);
8811 insns
= get_insns ();
8814 /* Conjugate should appear as a single unit
8815 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8816 each with a separate pseudo as destination.
8817 It's not correct for flow to treat them as a unit. */
8818 if (GET_CODE (target
) != CONCAT
)
8819 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8826 case TRY_CATCH_EXPR
:
8828 tree handler
= TREE_OPERAND (exp
, 1);
8830 expand_eh_region_start ();
8832 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8834 expand_eh_region_end_cleanup (handler
);
8839 case TRY_FINALLY_EXPR
:
8841 tree try_block
= TREE_OPERAND (exp
, 0);
8842 tree finally_block
= TREE_OPERAND (exp
, 1);
8843 rtx finally_label
= gen_label_rtx ();
8844 rtx done_label
= gen_label_rtx ();
8845 rtx return_link
= gen_reg_rtx (Pmode
);
8846 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8847 (tree
) finally_label
, (tree
) return_link
);
8848 TREE_SIDE_EFFECTS (cleanup
) = 1;
8850 /* Start a new binding layer that will keep track of all cleanup
8851 actions to be performed. */
8852 expand_start_bindings (2);
8854 target_temp_slot_level
= temp_slot_level
;
8856 expand_decl_cleanup (NULL_TREE
, cleanup
);
8857 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8859 preserve_temp_slots (op0
);
8860 expand_end_bindings (NULL_TREE
, 0, 0);
8861 emit_jump (done_label
);
8862 emit_label (finally_label
);
8863 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8864 emit_indirect_jump (return_link
);
8865 emit_label (done_label
);
8869 case GOTO_SUBROUTINE_EXPR
:
8871 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8872 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8873 rtx return_address
= gen_label_rtx ();
8874 emit_move_insn (return_link
,
8875 gen_rtx_LABEL_REF (Pmode
, return_address
));
8877 emit_label (return_address
);
8882 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8885 return get_exception_pointer (cfun
);
8888 /* Function descriptors are not valid except for as
8889 initialization constants, and should not be expanded. */
8893 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8896 /* Here to do an ordinary binary operator, generating an instruction
8897 from the optab already placed in `this_optab'. */
8899 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8901 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8902 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8904 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8905 unsignedp
, OPTAB_LIB_WIDEN
);
8911 /* Similar to expand_expr, except that we don't specify a target, target
8912 mode, or modifier and we return the alignment of the inner type. This is
8913 used in cases where it is not necessary to align the result to the
8914 alignment of its type as long as we know the alignment of the result, for
8915 example for comparisons of BLKmode values. */
8918 expand_expr_unaligned (exp
, palign
)
8920 unsigned int *palign
;
8923 tree type
= TREE_TYPE (exp
);
8924 enum machine_mode mode
= TYPE_MODE (type
);
8926 /* Default the alignment we return to that of the type. */
8927 *palign
= TYPE_ALIGN (type
);
8929 /* The only cases in which we do anything special is if the resulting mode
8931 if (mode
!= BLKmode
)
8932 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8934 switch (TREE_CODE (exp
))
8938 case NON_LVALUE_EXPR
:
8939 /* Conversions between BLKmode values don't change the underlying
8940 alignment or value. */
8941 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == BLKmode
)
8942 return expand_expr_unaligned (TREE_OPERAND (exp
, 0), palign
);
8946 /* Much of the code for this case is copied directly from expand_expr.
8947 We need to duplicate it here because we will do something different
8948 in the fall-through case, so we need to handle the same exceptions
8951 tree array
= TREE_OPERAND (exp
, 0);
8952 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
8953 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
8954 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
8957 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
8960 /* Optimize the special-case of a zero lower bound.
8962 We convert the low_bound to sizetype to avoid some problems
8963 with constant folding. (E.g. suppose the lower bound is 1,
8964 and its mode is QI. Without the conversion, (ARRAY
8965 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8966 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8968 if (! integer_zerop (low_bound
))
8969 index
= size_diffop (index
, convert (sizetype
, low_bound
));
8971 /* If this is a constant index into a constant array,
8972 just get the value from the array. Handle both the cases when
8973 we have an explicit constructor and when our operand is a variable
8974 that was declared const. */
8976 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
8977 && host_integerp (index
, 0)
8978 && 0 > compare_tree_int (index
,
8979 list_length (CONSTRUCTOR_ELTS
8980 (TREE_OPERAND (exp
, 0)))))
8984 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
8985 i
= tree_low_cst (index
, 0);
8986 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
8990 return expand_expr_unaligned (fold (TREE_VALUE (elem
)), palign
);
8993 else if (optimize
>= 1
8994 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8995 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8996 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
8998 if (TREE_CODE (index
) == INTEGER_CST
)
9000 tree init
= DECL_INITIAL (array
);
9002 if (TREE_CODE (init
) == CONSTRUCTOR
)
9006 for (elem
= CONSTRUCTOR_ELTS (init
);
9007 ! tree_int_cst_equal (TREE_PURPOSE (elem
), index
);
9008 elem
= TREE_CHAIN (elem
))
9012 return expand_expr_unaligned (fold (TREE_VALUE (elem
)),
9022 case ARRAY_RANGE_REF
:
9023 /* If the operand is a CONSTRUCTOR, we can just extract the
9024 appropriate field if it is present. Don't do this if we have
9025 already written the data since we want to refer to that copy
9026 and varasm.c assumes that's what we'll do. */
9027 if (TREE_CODE (exp
) == COMPONENT_REF
9028 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
9029 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
9033 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
9034 elt
= TREE_CHAIN (elt
))
9035 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
9036 /* Note that unlike the case in expand_expr, we know this is
9037 BLKmode and hence not an integer. */
9038 return expand_expr_unaligned (TREE_VALUE (elt
), palign
);
9042 enum machine_mode mode1
;
9043 HOST_WIDE_INT bitsize
, bitpos
;
9046 unsigned int alignment
;
9048 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9049 &mode1
, &unsignedp
, &volatilep
,
9052 /* If we got back the original object, something is wrong. Perhaps
9053 we are evaluating an expression too early. In any event, don't
9054 infinitely recurse. */
9058 op0
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
9060 /* If this is a constant, put it into a register if it is a
9061 legitimate constant and OFFSET is 0 and memory if it isn't. */
9062 if (CONSTANT_P (op0
))
9064 enum machine_mode inner_mode
= TYPE_MODE (TREE_TYPE (tem
));
9066 if (inner_mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
9068 op0
= force_reg (inner_mode
, op0
);
9070 op0
= validize_mem (force_const_mem (inner_mode
, op0
));
9075 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
9077 /* If this object is in a register, put it into memory.
9078 This case can't occur in C, but can in Ada if we have
9079 unchecked conversion of an expression from a scalar type to
9080 an array or record type. */
9081 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9082 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
9084 tree nt
= build_qualified_type (TREE_TYPE (tem
),
9085 (TYPE_QUALS (TREE_TYPE (tem
))
9086 | TYPE_QUAL_CONST
));
9087 rtx memloc
= assign_temp (nt
, 1, 1, 1);
9089 emit_move_insn (memloc
, op0
);
9093 if (GET_CODE (op0
) != MEM
)
9096 if (GET_MODE (offset_rtx
) != ptr_mode
)
9097 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
9099 #ifdef POINTERS_EXTEND_UNSIGNED
9100 if (GET_MODE (offset_rtx
) != Pmode
)
9101 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
9104 op0
= offset_address (op0
, offset_rtx
,
9105 highest_pow2_factor (offset
));
9108 /* Don't forget about volatility even if this is a bitfield. */
9109 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
9111 op0
= copy_rtx (op0
);
9112 MEM_VOLATILE_P (op0
) = 1;
9115 /* Check the access. */
9116 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
9121 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
9122 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
9124 /* Check the access right of the pointer. */
9125 in_check_memory_usage
= 1;
9126 if (size
> BITS_PER_UNIT
)
9127 emit_library_call (chkr_check_addr_libfunc
,
9128 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
9129 to
, ptr_mode
, GEN_INT (size
/ BITS_PER_UNIT
),
9130 TYPE_MODE (sizetype
),
9131 GEN_INT (MEMORY_USE_RO
),
9132 TYPE_MODE (integer_type_node
));
9133 in_check_memory_usage
= 0;
9136 /* In cases where an aligned union has an unaligned object
9137 as a field, we might be extracting a BLKmode value from
9138 an integer-mode (e.g., SImode) object. Handle this case
9139 by doing the extract into an object as wide as the field
9140 (which we know to be the width of a basic mode), then
9141 storing into memory, and changing the mode to BLKmode.
9142 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9143 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9144 if (mode1
== VOIDmode
9145 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9146 || (SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
9147 && (TYPE_ALIGN (type
) > alignment
9148 || bitpos
% TYPE_ALIGN (type
) != 0)))
9150 enum machine_mode ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
9152 if (ext_mode
== BLKmode
)
9154 /* In this case, BITPOS must start at a byte boundary. */
9155 if (GET_CODE (op0
) != MEM
9156 || bitpos
% BITS_PER_UNIT
!= 0)
9159 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
9163 tree nt
= build_qualified_type (type_for_mode (ext_mode
, 0),
9165 rtx
new = assign_temp (nt
, 0, 1, 1);
9167 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
9168 unsignedp
, NULL_RTX
, ext_mode
,
9170 int_size_in_bytes (TREE_TYPE (tem
)));
9172 /* If the result is a record type and BITSIZE is narrower than
9173 the mode of OP0, an integral mode, and this is a big endian
9174 machine, we must put the field into the high-order bits. */
9175 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
9176 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
9177 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
9178 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
9179 size_int (GET_MODE_BITSIZE
9184 emit_move_insn (new, op0
);
9185 op0
= copy_rtx (new);
9186 PUT_MODE (op0
, BLKmode
);
9190 /* Get a reference to just this component. */
9191 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9193 set_mem_attributes (op0
, exp
, 0);
9195 /* Adjust the alignment in case the bit position is not
9196 a multiple of the alignment of the inner object. */
9197 while (bitpos
% alignment
!= 0)
9200 if (GET_CODE (XEXP (op0
, 0)) == REG
)
9201 mark_reg_pointer (XEXP (op0
, 0), alignment
);
9203 MEM_IN_STRUCT_P (op0
) = 1;
9204 MEM_VOLATILE_P (op0
) |= volatilep
;
9206 *palign
= alignment
;
9215 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
9218 /* Return the tree node if a ARG corresponds to a string constant or zero
9219 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9220 in bytes within the string that ARG is accessing. The type of the
9221 offset will be `sizetype'. */
9224 string_constant (arg
, ptr_offset
)
9230 if (TREE_CODE (arg
) == ADDR_EXPR
9231 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9233 *ptr_offset
= size_zero_node
;
9234 return TREE_OPERAND (arg
, 0);
9236 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9238 tree arg0
= TREE_OPERAND (arg
, 0);
9239 tree arg1
= TREE_OPERAND (arg
, 1);
9244 if (TREE_CODE (arg0
) == ADDR_EXPR
9245 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9247 *ptr_offset
= convert (sizetype
, arg1
);
9248 return TREE_OPERAND (arg0
, 0);
9250 else if (TREE_CODE (arg1
) == ADDR_EXPR
9251 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9253 *ptr_offset
= convert (sizetype
, arg0
);
9254 return TREE_OPERAND (arg1
, 0);
9261 /* Expand code for a post- or pre- increment or decrement
9262 and return the RTX for the result.
9263 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9266 expand_increment (exp
, post
, ignore
)
9272 tree incremented
= TREE_OPERAND (exp
, 0);
9273 optab this_optab
= add_optab
;
9275 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9276 int op0_is_copy
= 0;
9277 int single_insn
= 0;
9278 /* 1 means we can't store into OP0 directly,
9279 because it is a subreg narrower than a word,
9280 and we don't dare clobber the rest of the word. */
9283 /* Stabilize any component ref that might need to be
9284 evaluated more than once below. */
9286 || TREE_CODE (incremented
) == BIT_FIELD_REF
9287 || (TREE_CODE (incremented
) == COMPONENT_REF
9288 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9289 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9290 incremented
= stabilize_reference (incremented
);
9291 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9292 ones into save exprs so that they don't accidentally get evaluated
9293 more than once by the code below. */
9294 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9295 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9296 incremented
= save_expr (incremented
);
9298 /* Compute the operands as RTX.
9299 Note whether OP0 is the actual lvalue or a copy of it:
9300 I believe it is a copy iff it is a register or subreg
9301 and insns were generated in computing it. */
9303 temp
= get_last_insn ();
9304 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9306 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9307 in place but instead must do sign- or zero-extension during assignment,
9308 so we copy it into a new register and let the code below use it as
9311 Note that we can safely modify this SUBREG since it is know not to be
9312 shared (it was made by the expand_expr call above). */
9314 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9317 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9321 else if (GET_CODE (op0
) == SUBREG
9322 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9324 /* We cannot increment this SUBREG in place. If we are
9325 post-incrementing, get a copy of the old value. Otherwise,
9326 just mark that we cannot increment in place. */
9328 op0
= copy_to_reg (op0
);
9333 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9334 && temp
!= get_last_insn ());
9335 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9336 EXPAND_MEMORY_USE_BAD
);
9338 /* Decide whether incrementing or decrementing. */
9339 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9340 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9341 this_optab
= sub_optab
;
9343 /* Convert decrement by a constant into a negative increment. */
9344 if (this_optab
== sub_optab
9345 && GET_CODE (op1
) == CONST_INT
)
9347 op1
= GEN_INT (-INTVAL (op1
));
9348 this_optab
= add_optab
;
9351 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9352 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9354 /* For a preincrement, see if we can do this with a single instruction. */
9357 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9358 if (icode
!= (int) CODE_FOR_nothing
9359 /* Make sure that OP0 is valid for operands 0 and 1
9360 of the insn we want to queue. */
9361 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9362 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9363 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9367 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9368 then we cannot just increment OP0. We must therefore contrive to
9369 increment the original value. Then, for postincrement, we can return
9370 OP0 since it is a copy of the old value. For preincrement, expand here
9371 unless we can do it with a single insn.
9373 Likewise if storing directly into OP0 would clobber high bits
9374 we need to preserve (bad_subreg). */
9375 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9377 /* This is the easiest way to increment the value wherever it is.
9378 Problems with multiple evaluation of INCREMENTED are prevented
9379 because either (1) it is a component_ref or preincrement,
9380 in which case it was stabilized above, or (2) it is an array_ref
9381 with constant index in an array in a register, which is
9382 safe to reevaluate. */
9383 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9384 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9385 ? MINUS_EXPR
: PLUS_EXPR
),
9388 TREE_OPERAND (exp
, 1));
9390 while (TREE_CODE (incremented
) == NOP_EXPR
9391 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9393 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9394 incremented
= TREE_OPERAND (incremented
, 0);
9397 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9398 return post
? op0
: temp
;
9403 /* We have a true reference to the value in OP0.
9404 If there is an insn to add or subtract in this mode, queue it.
9405 Queueing the increment insn avoids the register shuffling
9406 that often results if we must increment now and first save
9407 the old value for subsequent use. */
9409 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9410 op0
= stabilize (op0
);
9413 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9414 if (icode
!= (int) CODE_FOR_nothing
9415 /* Make sure that OP0 is valid for operands 0 and 1
9416 of the insn we want to queue. */
9417 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9418 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9420 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9421 op1
= force_reg (mode
, op1
);
9423 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9425 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9427 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9428 ? force_reg (Pmode
, XEXP (op0
, 0))
9429 : copy_to_reg (XEXP (op0
, 0)));
9432 op0
= replace_equiv_address (op0
, addr
);
9433 temp
= force_reg (GET_MODE (op0
), op0
);
9434 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9435 op1
= force_reg (mode
, op1
);
9437 /* The increment queue is LIFO, thus we have to `queue'
9438 the instructions in reverse order. */
9439 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9440 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9445 /* Preincrement, or we can't increment with one simple insn. */
9447 /* Save a copy of the value before inc or dec, to return it later. */
9448 temp
= value
= copy_to_reg (op0
);
9450 /* Arrange to return the incremented value. */
9451 /* Copy the rtx because expand_binop will protect from the queue,
9452 and the results of that would be invalid for us to return
9453 if our caller does emit_queue before using our result. */
9454 temp
= copy_rtx (value
= op0
);
9456 /* Increment however we can. */
9457 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9458 current_function_check_memory_usage
? NULL_RTX
: op0
,
9459 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9460 /* Make sure the value is stored into OP0. */
9462 emit_move_insn (op0
, op1
);
9467 /* At the start of a function, record that we have no previously-pushed
9468 arguments waiting to be popped. */
9471 init_pending_stack_adjust ()
9473 pending_stack_adjust
= 0;
9476 /* When exiting from function, if safe, clear out any pending stack adjust
9477 so the adjustment won't get done.
9479 Note, if the current function calls alloca, then it must have a
9480 frame pointer regardless of the value of flag_omit_frame_pointer. */
9483 clear_pending_stack_adjust ()
9485 #ifdef EXIT_IGNORE_STACK
9487 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9488 && EXIT_IGNORE_STACK
9489 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9490 && ! flag_inline_functions
)
9492 stack_pointer_delta
-= pending_stack_adjust
,
9493 pending_stack_adjust
= 0;
9498 /* Pop any previously-pushed arguments that have not been popped yet. */
9501 do_pending_stack_adjust ()
9503 if (inhibit_defer_pop
== 0)
9505 if (pending_stack_adjust
!= 0)
9506 adjust_stack (GEN_INT (pending_stack_adjust
));
9507 pending_stack_adjust
= 0;
9511 /* Expand conditional expressions. */
9513 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9514 LABEL is an rtx of code CODE_LABEL, in this function and all the
9518 jumpifnot (exp
, label
)
9522 do_jump (exp
, label
, NULL_RTX
);
9525 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9532 do_jump (exp
, NULL_RTX
, label
);
9535 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9536 the result is zero, or IF_TRUE_LABEL if the result is one.
9537 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9538 meaning fall through in that case.
9540 do_jump always does any pending stack adjust except when it does not
9541 actually perform a jump. An example where there is no jump
9542 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9544 This function is responsible for optimizing cases such as
9545 &&, || and comparison operators in EXP. */
9548 do_jump (exp
, if_false_label
, if_true_label
)
9550 rtx if_false_label
, if_true_label
;
9552 enum tree_code code
= TREE_CODE (exp
);
9553 /* Some cases need to create a label to jump to
9554 in order to properly fall through.
9555 These cases set DROP_THROUGH_LABEL nonzero. */
9556 rtx drop_through_label
= 0;
9560 enum machine_mode mode
;
9562 #ifdef MAX_INTEGER_COMPUTATION_MODE
9563 check_max_integer_computation_mode (exp
);
9574 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9580 /* This is not true with #pragma weak */
9582 /* The address of something can never be zero. */
9584 emit_jump (if_true_label
);
9589 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9590 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9591 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9592 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9595 /* If we are narrowing the operand, we have to do the compare in the
9597 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9598 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9600 case NON_LVALUE_EXPR
:
9601 case REFERENCE_EXPR
:
9606 /* These cannot change zero->non-zero or vice versa. */
9607 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9610 case WITH_RECORD_EXPR
:
9611 /* Put the object on the placeholder list, recurse through our first
9612 operand, and pop the list. */
9613 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9615 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9616 placeholder_list
= TREE_CHAIN (placeholder_list
);
9620 /* This is never less insns than evaluating the PLUS_EXPR followed by
9621 a test and can be longer if the test is eliminated. */
9623 /* Reduce to minus. */
9624 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9625 TREE_OPERAND (exp
, 0),
9626 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9627 TREE_OPERAND (exp
, 1))));
9628 /* Process as MINUS. */
9632 /* Non-zero iff operands of minus differ. */
9633 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9634 TREE_OPERAND (exp
, 0),
9635 TREE_OPERAND (exp
, 1)),
9636 NE
, NE
, if_false_label
, if_true_label
);
9640 /* If we are AND'ing with a small constant, do this comparison in the
9641 smallest type that fits. If the machine doesn't have comparisons
9642 that small, it will be converted back to the wider comparison.
9643 This helps if we are testing the sign bit of a narrower object.
9644 combine can't do this for us because it can't know whether a
9645 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9647 if (! SLOW_BYTE_ACCESS
9648 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9649 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9650 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9651 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9652 && (type
= type_for_mode (mode
, 1)) != 0
9653 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9654 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9655 != CODE_FOR_nothing
))
9657 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9662 case TRUTH_NOT_EXPR
:
9663 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9666 case TRUTH_ANDIF_EXPR
:
9667 if (if_false_label
== 0)
9668 if_false_label
= drop_through_label
= gen_label_rtx ();
9669 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9670 start_cleanup_deferral ();
9671 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9672 end_cleanup_deferral ();
9675 case TRUTH_ORIF_EXPR
:
9676 if (if_true_label
== 0)
9677 if_true_label
= drop_through_label
= gen_label_rtx ();
9678 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9679 start_cleanup_deferral ();
9680 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9681 end_cleanup_deferral ();
9686 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9687 preserve_temp_slots (NULL_RTX
);
9691 do_pending_stack_adjust ();
9692 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9698 case ARRAY_RANGE_REF
:
9700 HOST_WIDE_INT bitsize
, bitpos
;
9702 enum machine_mode mode
;
9706 unsigned int alignment
;
9708 /* Get description of this reference. We don't actually care
9709 about the underlying object here. */
9710 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9711 &unsignedp
, &volatilep
, &alignment
);
9713 type
= type_for_size (bitsize
, unsignedp
);
9714 if (! SLOW_BYTE_ACCESS
9715 && type
!= 0 && bitsize
>= 0
9716 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9717 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9718 != CODE_FOR_nothing
))
9720 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9727 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9728 if (integer_onep (TREE_OPERAND (exp
, 1))
9729 && integer_zerop (TREE_OPERAND (exp
, 2)))
9730 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9732 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9733 && integer_onep (TREE_OPERAND (exp
, 2)))
9734 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9738 rtx label1
= gen_label_rtx ();
9739 drop_through_label
= gen_label_rtx ();
9741 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9743 start_cleanup_deferral ();
9744 /* Now the THEN-expression. */
9745 do_jump (TREE_OPERAND (exp
, 1),
9746 if_false_label
? if_false_label
: drop_through_label
,
9747 if_true_label
? if_true_label
: drop_through_label
);
9748 /* In case the do_jump just above never jumps. */
9749 do_pending_stack_adjust ();
9750 emit_label (label1
);
9752 /* Now the ELSE-expression. */
9753 do_jump (TREE_OPERAND (exp
, 2),
9754 if_false_label
? if_false_label
: drop_through_label
,
9755 if_true_label
? if_true_label
: drop_through_label
);
9756 end_cleanup_deferral ();
9762 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9764 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9765 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9767 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9768 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9771 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9772 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9773 fold (build1 (REALPART_EXPR
,
9774 TREE_TYPE (inner_type
),
9776 fold (build1 (REALPART_EXPR
,
9777 TREE_TYPE (inner_type
),
9779 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9780 fold (build1 (IMAGPART_EXPR
,
9781 TREE_TYPE (inner_type
),
9783 fold (build1 (IMAGPART_EXPR
,
9784 TREE_TYPE (inner_type
),
9786 if_false_label
, if_true_label
);
9789 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9790 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9792 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9793 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9794 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9796 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9802 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9804 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9805 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9807 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9808 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9811 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9812 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9813 fold (build1 (REALPART_EXPR
,
9814 TREE_TYPE (inner_type
),
9816 fold (build1 (REALPART_EXPR
,
9817 TREE_TYPE (inner_type
),
9819 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9820 fold (build1 (IMAGPART_EXPR
,
9821 TREE_TYPE (inner_type
),
9823 fold (build1 (IMAGPART_EXPR
,
9824 TREE_TYPE (inner_type
),
9826 if_false_label
, if_true_label
);
9829 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9830 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9832 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9833 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9834 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9836 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9841 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9842 if (GET_MODE_CLASS (mode
) == MODE_INT
9843 && ! can_compare_p (LT
, mode
, ccp_jump
))
9844 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9846 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9850 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9851 if (GET_MODE_CLASS (mode
) == MODE_INT
9852 && ! can_compare_p (LE
, mode
, ccp_jump
))
9853 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9855 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9859 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9860 if (GET_MODE_CLASS (mode
) == MODE_INT
9861 && ! can_compare_p (GT
, mode
, ccp_jump
))
9862 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9864 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9868 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9869 if (GET_MODE_CLASS (mode
) == MODE_INT
9870 && ! can_compare_p (GE
, mode
, ccp_jump
))
9871 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9873 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9876 case UNORDERED_EXPR
:
9879 enum rtx_code cmp
, rcmp
;
9882 if (code
== UNORDERED_EXPR
)
9883 cmp
= UNORDERED
, rcmp
= ORDERED
;
9885 cmp
= ORDERED
, rcmp
= UNORDERED
;
9886 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9889 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9890 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9891 /* If the target doesn't provide either UNORDERED or ORDERED
9892 comparisons, canonicalize on UNORDERED for the library. */
9893 || rcmp
== UNORDERED
))
9897 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9899 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9904 enum rtx_code rcode1
;
9905 enum tree_code tcode2
;
9929 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9930 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9931 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9935 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9936 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9939 /* If the target doesn't support combined unordered
9940 compares, decompose into UNORDERED + comparison. */
9941 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9942 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9943 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9944 do_jump (exp
, if_false_label
, if_true_label
);
9950 __builtin_expect (<test>, 0) and
9951 __builtin_expect (<test>, 1)
9953 We need to do this here, so that <test> is not converted to a SCC
9954 operation on machines that use condition code registers and COMPARE
9955 like the PowerPC, and then the jump is done based on whether the SCC
9956 operation produced a 1 or 0. */
9958 /* Check for a built-in function. */
9959 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
9961 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
9962 tree arglist
= TREE_OPERAND (exp
, 1);
9964 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9965 && DECL_BUILT_IN (fndecl
)
9966 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
9967 && arglist
!= NULL_TREE
9968 && TREE_CHAIN (arglist
) != NULL_TREE
)
9970 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
9973 if (seq
!= NULL_RTX
)
9980 /* fall through and generate the normal code. */
9984 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9986 /* This is not needed any more and causes poor code since it causes
9987 comparisons and tests from non-SI objects to have different code
9989 /* Copy to register to avoid generating bad insns by cse
9990 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9991 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9992 temp
= copy_to_reg (temp
);
9994 do_pending_stack_adjust ();
9995 /* Do any postincrements in the expression that was tested. */
9998 if (GET_CODE (temp
) == CONST_INT
9999 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
10000 || GET_CODE (temp
) == LABEL_REF
)
10002 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
10004 emit_jump (target
);
10006 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10007 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
10008 /* Note swapping the labels gives us not-equal. */
10009 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10010 else if (GET_MODE (temp
) != VOIDmode
)
10011 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
10012 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10013 GET_MODE (temp
), NULL_RTX
, 0,
10014 if_false_label
, if_true_label
);
10019 if (drop_through_label
)
10021 /* If do_jump produces code that might be jumped around,
10022 do any stack adjusts from that code, before the place
10023 where control merges in. */
10024 do_pending_stack_adjust ();
10025 emit_label (drop_through_label
);
10029 /* Given a comparison expression EXP for values too wide to be compared
10030 with one insn, test the comparison and jump to the appropriate label.
10031 The code of EXP is ignored; we always test GT if SWAP is 0,
10032 and LT if SWAP is 1. */
10035 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10038 rtx if_false_label
, if_true_label
;
10040 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10041 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10042 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10043 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10045 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
10048 /* Compare OP0 with OP1, word at a time, in mode MODE.
10049 UNSIGNEDP says to do unsigned comparison.
10050 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10053 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10054 enum machine_mode mode
;
10057 rtx if_false_label
, if_true_label
;
10059 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10060 rtx drop_through_label
= 0;
10063 if (! if_true_label
|| ! if_false_label
)
10064 drop_through_label
= gen_label_rtx ();
10065 if (! if_true_label
)
10066 if_true_label
= drop_through_label
;
10067 if (! if_false_label
)
10068 if_false_label
= drop_through_label
;
10070 /* Compare a word at a time, high order first. */
10071 for (i
= 0; i
< nwords
; i
++)
10073 rtx op0_word
, op1_word
;
10075 if (WORDS_BIG_ENDIAN
)
10077 op0_word
= operand_subword_force (op0
, i
, mode
);
10078 op1_word
= operand_subword_force (op1
, i
, mode
);
10082 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10083 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10086 /* All but high-order word must be compared as unsigned. */
10087 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
10088 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
, 0,
10089 NULL_RTX
, if_true_label
);
10091 /* Consider lower words only if these are equal. */
10092 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10093 NULL_RTX
, 0, NULL_RTX
, if_false_label
);
10096 if (if_false_label
)
10097 emit_jump (if_false_label
);
10098 if (drop_through_label
)
10099 emit_label (drop_through_label
);
10102 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10103 with one insn, test the comparison and jump to the appropriate label. */
10106 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10108 rtx if_false_label
, if_true_label
;
10110 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10111 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10112 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10113 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10115 rtx drop_through_label
= 0;
10117 if (! if_false_label
)
10118 drop_through_label
= if_false_label
= gen_label_rtx ();
10120 for (i
= 0; i
< nwords
; i
++)
10121 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
10122 operand_subword_force (op1
, i
, mode
),
10123 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10124 word_mode
, NULL_RTX
, 0, if_false_label
,
10128 emit_jump (if_true_label
);
10129 if (drop_through_label
)
10130 emit_label (drop_through_label
);
10133 /* Jump according to whether OP0 is 0.
10134 We assume that OP0 has an integer mode that is too wide
10135 for the available compare insns. */
10138 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10140 rtx if_false_label
, if_true_label
;
10142 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10145 rtx drop_through_label
= 0;
10147 /* The fastest way of doing this comparison on almost any machine is to
10148 "or" all the words and compare the result. If all have to be loaded
10149 from memory and this is a very wide item, it's possible this may
10150 be slower, but that's highly unlikely. */
10152 part
= gen_reg_rtx (word_mode
);
10153 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
10154 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
10155 part
= expand_binop (word_mode
, ior_optab
, part
,
10156 operand_subword_force (op0
, i
, GET_MODE (op0
)),
10157 part
, 1, OPTAB_WIDEN
);
10161 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
10162 NULL_RTX
, 0, if_false_label
, if_true_label
);
10167 /* If we couldn't do the "or" simply, do this with a series of compares. */
10168 if (! if_false_label
)
10169 drop_through_label
= if_false_label
= gen_label_rtx ();
10171 for (i
= 0; i
< nwords
; i
++)
10172 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
10173 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0,
10174 if_false_label
, NULL_RTX
);
10177 emit_jump (if_true_label
);
10179 if (drop_through_label
)
10180 emit_label (drop_through_label
);
10183 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10184 (including code to compute the values to be compared)
10185 and set (CC0) according to the result.
10186 The decision as to signed or unsigned comparison must be made by the caller.
10188 We force a stack adjustment unless there are currently
10189 things pushed on the stack that aren't yet used.
10191 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10194 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10195 size of MODE should be used. */
10198 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10200 enum rtx_code code
;
10202 enum machine_mode mode
;
10204 unsigned int align
;
10208 /* If one operand is constant, make it the second one. Only do this
10209 if the other operand is not constant as well. */
10211 if (swap_commutative_operands_p (op0
, op1
))
10216 code
= swap_condition (code
);
10219 if (flag_force_mem
)
10221 op0
= force_not_mem (op0
);
10222 op1
= force_not_mem (op1
);
10225 do_pending_stack_adjust ();
10227 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10228 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10232 /* There's no need to do this now that combine.c can eliminate lots of
10233 sign extensions. This can be less efficient in certain cases on other
10236 /* If this is a signed equality comparison, we can do it as an
10237 unsigned comparison since zero-extension is cheaper than sign
10238 extension and comparisons with zero are done as unsigned. This is
10239 the case even on machines that can do fast sign extension, since
10240 zero-extension is easier to combine with other operations than
10241 sign-extension is. If we are comparing against a constant, we must
10242 convert it to what it would look like unsigned. */
10243 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10244 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10246 if (GET_CODE (op1
) == CONST_INT
10247 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10248 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10253 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10255 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10258 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10259 The decision as to signed or unsigned comparison must be made by the caller.
10261 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10264 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10265 size of MODE should be used. */
10268 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
, align
,
10269 if_false_label
, if_true_label
)
10271 enum rtx_code code
;
10273 enum machine_mode mode
;
10275 unsigned int align
;
10276 rtx if_false_label
, if_true_label
;
10279 int dummy_true_label
= 0;
10281 /* Reverse the comparison if that is safe and we want to jump if it is
10283 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10285 if_true_label
= if_false_label
;
10286 if_false_label
= 0;
10287 code
= reverse_condition (code
);
10290 /* If one operand is constant, make it the second one. Only do this
10291 if the other operand is not constant as well. */
10293 if (swap_commutative_operands_p (op0
, op1
))
10298 code
= swap_condition (code
);
10301 if (flag_force_mem
)
10303 op0
= force_not_mem (op0
);
10304 op1
= force_not_mem (op1
);
10307 do_pending_stack_adjust ();
10309 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10310 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10312 if (tem
== const_true_rtx
)
10315 emit_jump (if_true_label
);
10319 if (if_false_label
)
10320 emit_jump (if_false_label
);
10326 /* There's no need to do this now that combine.c can eliminate lots of
10327 sign extensions. This can be less efficient in certain cases on other
10330 /* If this is a signed equality comparison, we can do it as an
10331 unsigned comparison since zero-extension is cheaper than sign
10332 extension and comparisons with zero are done as unsigned. This is
10333 the case even on machines that can do fast sign extension, since
10334 zero-extension is easier to combine with other operations than
10335 sign-extension is. If we are comparing against a constant, we must
10336 convert it to what it would look like unsigned. */
10337 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10338 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10340 if (GET_CODE (op1
) == CONST_INT
10341 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10342 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10347 if (! if_true_label
)
10349 dummy_true_label
= 1;
10350 if_true_label
= gen_label_rtx ();
10353 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
, align
,
10356 if (if_false_label
)
10357 emit_jump (if_false_label
);
10358 if (dummy_true_label
)
10359 emit_label (if_true_label
);
10362 /* Generate code for a comparison expression EXP (including code to compute
10363 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10364 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10365 generated code will drop through.
10366 SIGNED_CODE should be the rtx operation for this comparison for
10367 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10369 We force a stack adjustment unless there are currently
10370 things pushed on the stack that aren't yet used. */
10373 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10376 enum rtx_code signed_code
, unsigned_code
;
10377 rtx if_false_label
, if_true_label
;
10379 unsigned int align0
, align1
;
10382 enum machine_mode mode
;
10384 enum rtx_code code
;
10386 /* Don't crash if the comparison was erroneous. */
10387 op0
= expand_expr_unaligned (TREE_OPERAND (exp
, 0), &align0
);
10388 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10391 op1
= expand_expr_unaligned (TREE_OPERAND (exp
, 1), &align1
);
10392 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10395 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10396 mode
= TYPE_MODE (type
);
10397 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10398 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10399 || (GET_MODE_BITSIZE (mode
)
10400 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10403 /* op0 might have been replaced by promoted constant, in which
10404 case the type of second argument should be used. */
10405 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10406 mode
= TYPE_MODE (type
);
10408 unsignedp
= TREE_UNSIGNED (type
);
10409 code
= unsignedp
? unsigned_code
: signed_code
;
10411 #ifdef HAVE_canonicalize_funcptr_for_compare
10412 /* If function pointers need to be "canonicalized" before they can
10413 be reliably compared, then canonicalize them. */
10414 if (HAVE_canonicalize_funcptr_for_compare
10415 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10416 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10419 rtx new_op0
= gen_reg_rtx (mode
);
10421 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10425 if (HAVE_canonicalize_funcptr_for_compare
10426 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10427 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10430 rtx new_op1
= gen_reg_rtx (mode
);
10432 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10437 /* Do any postincrements in the expression that was tested. */
10440 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10442 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10443 MIN (align0
, align1
),
10444 if_false_label
, if_true_label
);
10447 /* Generate code to calculate EXP using a store-flag instruction
10448 and return an rtx for the result. EXP is either a comparison
10449 or a TRUTH_NOT_EXPR whose operand is a comparison.
10451 If TARGET is nonzero, store the result there if convenient.
10453 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10456 Return zero if there is no suitable set-flag instruction
10457 available on this machine.
10459 Once expand_expr has been called on the arguments of the comparison,
10460 we are committed to doing the store flag, since it is not safe to
10461 re-evaluate the expression. We emit the store-flag insn by calling
10462 emit_store_flag, but only expand the arguments if we have a reason
10463 to believe that emit_store_flag will be successful. If we think that
10464 it will, but it isn't, we have to simulate the store-flag with a
10465 set/jump/set sequence. */
10468 do_store_flag (exp
, target
, mode
, only_cheap
)
10471 enum machine_mode mode
;
10474 enum rtx_code code
;
10475 tree arg0
, arg1
, type
;
10477 enum machine_mode operand_mode
;
10481 enum insn_code icode
;
10482 rtx subtarget
= target
;
10485 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10486 result at the end. We can't simply invert the test since it would
10487 have already been inverted if it were valid. This case occurs for
10488 some floating-point comparisons. */
10490 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10491 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10493 arg0
= TREE_OPERAND (exp
, 0);
10494 arg1
= TREE_OPERAND (exp
, 1);
10496 /* Don't crash if the comparison was erroneous. */
10497 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10500 type
= TREE_TYPE (arg0
);
10501 operand_mode
= TYPE_MODE (type
);
10502 unsignedp
= TREE_UNSIGNED (type
);
10504 /* We won't bother with BLKmode store-flag operations because it would mean
10505 passing a lot of information to emit_store_flag. */
10506 if (operand_mode
== BLKmode
)
10509 /* We won't bother with store-flag operations involving function pointers
10510 when function pointers must be canonicalized before comparisons. */
10511 #ifdef HAVE_canonicalize_funcptr_for_compare
10512 if (HAVE_canonicalize_funcptr_for_compare
10513 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10514 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10516 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10517 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10518 == FUNCTION_TYPE
))))
10525 /* Get the rtx comparison code to use. We know that EXP is a comparison
10526 operation of some type. Some comparisons against 1 and -1 can be
10527 converted to comparisons with zero. Do so here so that the tests
10528 below will be aware that we have a comparison with zero. These
10529 tests will not catch constants in the first operand, but constants
10530 are rarely passed as the first operand. */
10532 switch (TREE_CODE (exp
))
10541 if (integer_onep (arg1
))
10542 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10544 code
= unsignedp
? LTU
: LT
;
10547 if (! unsignedp
&& integer_all_onesp (arg1
))
10548 arg1
= integer_zero_node
, code
= LT
;
10550 code
= unsignedp
? LEU
: LE
;
10553 if (! unsignedp
&& integer_all_onesp (arg1
))
10554 arg1
= integer_zero_node
, code
= GE
;
10556 code
= unsignedp
? GTU
: GT
;
10559 if (integer_onep (arg1
))
10560 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10562 code
= unsignedp
? GEU
: GE
;
10565 case UNORDERED_EXPR
:
10591 /* Put a constant second. */
10592 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10594 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10595 code
= swap_condition (code
);
10598 /* If this is an equality or inequality test of a single bit, we can
10599 do this by shifting the bit being tested to the low-order bit and
10600 masking the result with the constant 1. If the condition was EQ,
10601 we xor it with 1. This does not require an scc insn and is faster
10602 than an scc insn even if we have it. */
10604 if ((code
== NE
|| code
== EQ
)
10605 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10606 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10608 tree inner
= TREE_OPERAND (arg0
, 0);
10609 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10612 /* If INNER is a right shift of a constant and it plus BITNUM does
10613 not overflow, adjust BITNUM and INNER. */
10615 if (TREE_CODE (inner
) == RSHIFT_EXPR
10616 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10617 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10618 && bitnum
< TYPE_PRECISION (type
)
10619 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10620 bitnum
- TYPE_PRECISION (type
)))
10622 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10623 inner
= TREE_OPERAND (inner
, 0);
10626 /* If we are going to be able to omit the AND below, we must do our
10627 operations as unsigned. If we must use the AND, we have a choice.
10628 Normally unsigned is faster, but for some machines signed is. */
10629 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10630 #ifdef LOAD_EXTEND_OP
10631 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10637 if (! get_subtarget (subtarget
)
10638 || GET_MODE (subtarget
) != operand_mode
10639 || ! safe_from_p (subtarget
, inner
, 1))
10642 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10645 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10646 size_int (bitnum
), subtarget
, ops_unsignedp
);
10648 if (GET_MODE (op0
) != mode
)
10649 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10651 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10652 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10653 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10655 /* Put the AND last so it can combine with more things. */
10656 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10657 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10662 /* Now see if we are likely to be able to do this. Return if not. */
10663 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10666 icode
= setcc_gen_code
[(int) code
];
10667 if (icode
== CODE_FOR_nothing
10668 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10670 /* We can only do this if it is one of the special cases that
10671 can be handled without an scc insn. */
10672 if ((code
== LT
&& integer_zerop (arg1
))
10673 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10675 else if (BRANCH_COST
>= 0
10676 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10677 && TREE_CODE (type
) != REAL_TYPE
10678 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10679 != CODE_FOR_nothing
)
10680 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10681 != CODE_FOR_nothing
)))
10687 if (! get_subtarget (target
)
10688 || GET_MODE (subtarget
) != operand_mode
10689 || ! safe_from_p (subtarget
, arg1
, 1))
10692 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10693 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10696 target
= gen_reg_rtx (mode
);
10698 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10699 because, if the emit_store_flag does anything it will succeed and
10700 OP0 and OP1 will not be used subsequently. */
10702 result
= emit_store_flag (target
, code
,
10703 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10704 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10705 operand_mode
, unsignedp
, 1);
10710 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10711 result
, 0, OPTAB_LIB_WIDEN
);
10715 /* If this failed, we have to do this with set/compare/jump/set code. */
10716 if (GET_CODE (target
) != REG
10717 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10718 target
= gen_reg_rtx (GET_MODE (target
));
10720 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10721 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10722 operand_mode
, NULL_RTX
, 0);
10723 if (GET_CODE (result
) == CONST_INT
)
10724 return (((result
== const0_rtx
&& ! invert
)
10725 || (result
!= const0_rtx
&& invert
))
10726 ? const0_rtx
: const1_rtx
);
10728 label
= gen_label_rtx ();
10729 if (bcc_gen_fctn
[(int) code
] == 0)
10732 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10733 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10734 emit_label (label
);
10740 /* Stubs in case we haven't got a casesi insn. */
10741 #ifndef HAVE_casesi
10742 # define HAVE_casesi 0
10743 # define gen_casesi(a, b, c, d, e) (0)
10744 # define CODE_FOR_casesi CODE_FOR_nothing
10747 /* If the machine does not have a case insn that compares the bounds,
10748 this means extra overhead for dispatch tables, which raises the
10749 threshold for using them. */
10750 #ifndef CASE_VALUES_THRESHOLD
10751 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10752 #endif /* CASE_VALUES_THRESHOLD */
10755 case_values_threshold ()
10757 return CASE_VALUES_THRESHOLD
;
10760 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10761 0 otherwise (i.e. if there is no casesi instruction). */
10763 try_casesi (index_type
, index_expr
, minval
, range
,
10764 table_label
, default_label
)
10765 tree index_type
, index_expr
, minval
, range
;
10766 rtx table_label ATTRIBUTE_UNUSED
;
10769 enum machine_mode index_mode
= SImode
;
10770 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10771 rtx op1
, op2
, index
;
10772 enum machine_mode op_mode
;
10777 /* Convert the index to SImode. */
10778 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10780 enum machine_mode omode
= TYPE_MODE (index_type
);
10781 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10783 /* We must handle the endpoints in the original mode. */
10784 index_expr
= build (MINUS_EXPR
, index_type
,
10785 index_expr
, minval
);
10786 minval
= integer_zero_node
;
10787 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10788 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10789 omode
, 1, 0, default_label
);
10790 /* Now we can safely truncate. */
10791 index
= convert_to_mode (index_mode
, index
, 0);
10795 if (TYPE_MODE (index_type
) != index_mode
)
10797 index_expr
= convert (type_for_size (index_bits
, 0),
10799 index_type
= TREE_TYPE (index_expr
);
10802 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10805 index
= protect_from_queue (index
, 0);
10806 do_pending_stack_adjust ();
10808 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10809 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10811 index
= copy_to_mode_reg (op_mode
, index
);
10813 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10815 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10816 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10817 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10818 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10820 op1
= copy_to_mode_reg (op_mode
, op1
);
10822 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10824 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10825 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10826 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10827 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10829 op2
= copy_to_mode_reg (op_mode
, op2
);
10831 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10832 table_label
, default_label
));
10836 /* Attempt to generate a tablejump instruction; same concept. */
10837 #ifndef HAVE_tablejump
10838 #define HAVE_tablejump 0
10839 #define gen_tablejump(x, y) (0)
10842 /* Subroutine of the next function.
10844 INDEX is the value being switched on, with the lowest value
10845 in the table already subtracted.
10846 MODE is its expected mode (needed if INDEX is constant).
10847 RANGE is the length of the jump table.
10848 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10850 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10851 index value is out of range. */
10854 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10855 rtx index
, range
, table_label
, default_label
;
10856 enum machine_mode mode
;
10860 /* Do an unsigned comparison (in the proper mode) between the index
10861 expression and the value which represents the length of the range.
10862 Since we just finished subtracting the lower bound of the range
10863 from the index expression, this comparison allows us to simultaneously
10864 check that the original index expression value is both greater than
10865 or equal to the minimum value of the range and less than or equal to
10866 the maximum value of the range. */
10868 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10871 /* If index is in range, it must fit in Pmode.
10872 Convert to Pmode so we can index with it. */
10874 index
= convert_to_mode (Pmode
, index
, 1);
10876 /* Don't let a MEM slip thru, because then INDEX that comes
10877 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10878 and break_out_memory_refs will go to work on it and mess it up. */
10879 #ifdef PIC_CASE_VECTOR_ADDRESS
10880 if (flag_pic
&& GET_CODE (index
) != REG
)
10881 index
= copy_to_mode_reg (Pmode
, index
);
10884 /* If flag_force_addr were to affect this address
10885 it could interfere with the tricky assumptions made
10886 about addresses that contain label-refs,
10887 which may be valid only very near the tablejump itself. */
10888 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10889 GET_MODE_SIZE, because this indicates how large insns are. The other
10890 uses should all be Pmode, because they are addresses. This code
10891 could fail if addresses and insns are not the same size. */
10892 index
= gen_rtx_PLUS (Pmode
,
10893 gen_rtx_MULT (Pmode
, index
,
10894 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10895 gen_rtx_LABEL_REF (Pmode
, table_label
));
10896 #ifdef PIC_CASE_VECTOR_ADDRESS
10898 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10901 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10902 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10903 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10904 RTX_UNCHANGING_P (vector
) = 1;
10905 convert_move (temp
, vector
, 0);
10907 emit_jump_insn (gen_tablejump (temp
, table_label
));
10909 /* If we are generating PIC code or if the table is PC-relative, the
10910 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10911 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10916 try_tablejump (index_type
, index_expr
, minval
, range
,
10917 table_label
, default_label
)
10918 tree index_type
, index_expr
, minval
, range
;
10919 rtx table_label
, default_label
;
10923 if (! HAVE_tablejump
)
10926 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10927 convert (index_type
, index_expr
),
10928 convert (index_type
, minval
)));
10929 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10931 index
= protect_from_queue (index
, 0);
10932 do_pending_stack_adjust ();
10934 do_tablejump (index
, TYPE_MODE (index_type
),
10935 convert_modes (TYPE_MODE (index_type
),
10936 TYPE_MODE (TREE_TYPE (range
)),
10937 expand_expr (range
, NULL_RTX
,
10939 TREE_UNSIGNED (TREE_TYPE (range
))),
10940 table_label
, default_label
);