1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list
= 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from
;
112 unsigned HOST_WIDE_INT len
;
113 HOST_WIDE_INT offset
;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len
;
127 HOST_WIDE_INT offset
;
128 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
133 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT
,
137 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
138 struct move_by_pieces
*));
139 static bool block_move_libcall_safe_for_call_parm
PARAMS ((void));
140 static bool emit_block_move_via_movstr
PARAMS ((rtx
, rtx
, rtx
, unsigned));
141 static rtx emit_block_move_via_libcall
PARAMS ((rtx
, rtx
, rtx
));
142 static tree emit_block_move_libcall_fn
PARAMS ((int));
143 static void emit_block_move_via_loop
PARAMS ((rtx
, rtx
, rtx
, unsigned));
144 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
146 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
148 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
150 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
152 struct store_by_pieces
*));
153 static bool clear_storage_via_clrstr
PARAMS ((rtx
, rtx
, unsigned));
154 static rtx clear_storage_via_libcall
PARAMS ((rtx
, rtx
));
155 static tree clear_storage_libcall_fn
PARAMS ((int));
156 static rtx compress_float_constant
PARAMS ((rtx
, rtx
));
157 static rtx get_subtarget
PARAMS ((rtx
));
158 static int is_zeros_p
PARAMS ((tree
));
159 static int mostly_zeros_p
PARAMS ((tree
));
160 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
161 HOST_WIDE_INT
, enum machine_mode
,
162 tree
, tree
, int, int));
163 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
164 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
165 HOST_WIDE_INT
, enum machine_mode
,
166 tree
, enum machine_mode
, int, tree
,
168 static rtx var_rtx
PARAMS ((tree
));
169 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
170 static HOST_WIDE_INT highest_pow2_factor_for_type
PARAMS ((tree
, tree
));
171 static int is_aligning_offset
PARAMS ((tree
, tree
));
172 static rtx expand_increment
PARAMS ((tree
, int, int));
173 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
175 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
177 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
183 static char direct_load
[NUM_MACHINE_MODES
];
184 static char direct_store
[NUM_MACHINE_MODES
];
186 /* Record for each mode whether we can float-extend from memory. */
188 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
190 /* If a memory-to-memory move would take MOVE_RATIO or more simple
191 move-instruction sequences, we will do a movstr or libcall instead. */
194 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197 /* If we are optimizing for space (-Os), cut down the default move ratio. */
198 #define MOVE_RATIO (optimize_size ? 3 : 15)
202 /* This macro is used to determine whether move_by_pieces should be called
203 to perform a structure copy. */
204 #ifndef MOVE_BY_PIECES_P
205 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
206 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
209 /* If a clear memory operation would take CLEAR_RATIO or more simple
210 move-instruction sequences, we will do a clrstr or libcall instead. */
213 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
214 #define CLEAR_RATIO 2
216 /* If we are optimizing for space, cut down the default clear ratio. */
217 #define CLEAR_RATIO (optimize_size ? 3 : 15)
221 /* This macro is used to determine whether clear_by_pieces should be
222 called to clear storage. */
223 #ifndef CLEAR_BY_PIECES_P
224 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
225 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
228 /* This macro is used to determine whether store_by_pieces should be
229 called to "memset" storage with byte values other than zero, or
230 to "memcpy" storage when the source is a constant string. */
231 #ifndef STORE_BY_PIECES_P
232 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
235 /* This array records the insn_code of insns to perform block moves. */
236 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
238 /* This array records the insn_code of insns to perform block clears. */
239 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run once per compilation to set up which modes can be used
248 directly in memory and to initialize the block move optab. */
254 enum machine_mode mode
;
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
263 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg
= gen_rtx_REG (VOIDmode
, -1);
269 insn
= rtx_alloc (INSN
);
270 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
271 PATTERN (insn
) = pat
;
273 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
274 mode
= (enum machine_mode
) ((int) mode
+ 1))
278 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
279 PUT_MODE (mem
, mode
);
280 PUT_MODE (mem1
, mode
);
281 PUT_MODE (reg
, mode
);
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
286 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
287 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
288 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
291 if (! HARD_REGNO_MODE_OK (regno
, mode
))
297 SET_DEST (pat
) = reg
;
298 if (recog (pat
, insn
, &num_clobbers
) >= 0)
299 direct_load
[(int) mode
] = 1;
301 SET_SRC (pat
) = mem1
;
302 SET_DEST (pat
) = reg
;
303 if (recog (pat
, insn
, &num_clobbers
) >= 0)
304 direct_load
[(int) mode
] = 1;
307 SET_DEST (pat
) = mem
;
308 if (recog (pat
, insn
, &num_clobbers
) >= 0)
309 direct_store
[(int) mode
] = 1;
312 SET_DEST (pat
) = mem1
;
313 if (recog (pat
, insn
, &num_clobbers
) >= 0)
314 direct_store
[(int) mode
] = 1;
318 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
320 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
321 mode
= GET_MODE_WIDER_MODE (mode
))
323 enum machine_mode srcmode
;
324 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
325 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
329 ic
= can_extend_p (mode
, srcmode
, 0);
330 if (ic
== CODE_FOR_nothing
)
333 PUT_MODE (mem
, srcmode
);
335 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
336 float_extend_from_mem
[mode
][srcmode
] = true;
341 /* This is run at the start of compiling a function. */
346 cfun
->expr
= (struct expr_status
*) ggc_alloc (sizeof (struct expr_status
));
349 pending_stack_adjust
= 0;
350 stack_pointer_delta
= 0;
351 inhibit_defer_pop
= 0;
353 apply_args_value
= 0;
357 /* Small sanity check that the queue is empty at the end of a function. */
360 finish_expr_for_function ()
366 /* Manage the queue of increment instructions to be output
367 for POSTINCREMENT_EXPR expressions, etc. */
369 /* Queue up to increment (or change) VAR later. BODY says how:
370 BODY should be the same thing you would pass to emit_insn
371 to increment right away. It will go to emit_insn later on.
373 The value is a QUEUED expression to be used in place of VAR
374 where you want to guarantee the pre-incrementation value of VAR. */
377 enqueue_insn (var
, body
)
380 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
381 body
, pending_chain
);
382 return pending_chain
;
385 /* Use protect_from_queue to convert a QUEUED expression
386 into something that you can put immediately into an instruction.
387 If the queued incrementation has not happened yet,
388 protect_from_queue returns the variable itself.
389 If the incrementation has happened, protect_from_queue returns a temp
390 that contains a copy of the old value of the variable.
392 Any time an rtx which might possibly be a QUEUED is to be put
393 into an instruction, it must be passed through protect_from_queue first.
394 QUEUED expressions are not meaningful in instructions.
396 Do not pass a value through protect_from_queue and then hold
397 on to it for a while before putting it in an instruction!
398 If the queue is flushed in between, incorrect code will result. */
401 protect_from_queue (x
, modify
)
405 RTX_CODE code
= GET_CODE (x
);
407 #if 0 /* A QUEUED can hang around after the queue is forced out. */
408 /* Shortcut for most common case. */
409 if (pending_chain
== 0)
415 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
416 use of autoincrement. Make a copy of the contents of the memory
417 location rather than a copy of the address, but not if the value is
418 of mode BLKmode. Don't modify X in place since it might be
420 if (code
== MEM
&& GET_MODE (x
) != BLKmode
421 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
424 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
428 rtx temp
= gen_reg_rtx (GET_MODE (x
));
430 emit_insn_before (gen_move_insn (temp
, new),
435 /* Copy the address into a pseudo, so that the returned value
436 remains correct across calls to emit_queue. */
437 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
440 /* Otherwise, recursively protect the subexpressions of all
441 the kinds of rtx's that can contain a QUEUED. */
444 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
445 if (tem
!= XEXP (x
, 0))
451 else if (code
== PLUS
|| code
== MULT
)
453 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
454 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
455 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
464 /* If the increment has not happened, use the variable itself. Copy it
465 into a new pseudo so that the value remains correct across calls to
467 if (QUEUED_INSN (x
) == 0)
468 return copy_to_reg (QUEUED_VAR (x
));
469 /* If the increment has happened and a pre-increment copy exists,
471 if (QUEUED_COPY (x
) != 0)
472 return QUEUED_COPY (x
);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
478 return QUEUED_COPY (x
);
481 /* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
490 enum rtx_code code
= GET_CODE (x
);
496 return queued_subexp_p (XEXP (x
, 0));
500 return (queued_subexp_p (XEXP (x
, 0))
501 || queued_subexp_p (XEXP (x
, 1)));
507 /* Perform all the pending incrementations. */
513 while ((p
= pending_chain
))
515 rtx body
= QUEUED_BODY (p
);
517 switch (GET_CODE (body
))
525 QUEUED_INSN (p
) = body
;
529 #ifdef ENABLE_CHECKING
536 QUEUED_INSN (p
) = emit_insn (body
);
540 pending_chain
= QUEUED_NEXT (p
);
544 /* Copy data from FROM to TO, where the machine modes are not the same.
545 Both modes may be integer, or both may be floating.
546 UNSIGNEDP should be nonzero if FROM is an unsigned type.
547 This causes zero-extension instead of sign-extension. */
550 convert_move (to
, from
, unsignedp
)
554 enum machine_mode to_mode
= GET_MODE (to
);
555 enum machine_mode from_mode
= GET_MODE (from
);
556 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
557 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
561 /* rtx code for making an equivalent value. */
562 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
563 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
565 to
= protect_from_queue (to
, 1);
566 from
= protect_from_queue (from
, 0);
568 if (to_real
!= from_real
)
571 /* If FROM is a SUBREG that indicates that we have already done at least
572 the required extension, strip it. We don't handle such SUBREGs as
575 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
576 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
577 >= GET_MODE_SIZE (to_mode
))
578 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
579 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
581 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
584 if (to_mode
== from_mode
585 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
587 emit_move_insn (to
, from
);
591 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
593 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
596 if (VECTOR_MODE_P (to_mode
))
597 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
599 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
601 emit_move_insn (to
, from
);
605 if (to_real
!= from_real
)
612 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
614 /* Try converting directly if the insn is supported. */
615 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
618 emit_unop_insn (code
, to
, from
, UNKNOWN
);
623 #ifdef HAVE_trunchfqf2
624 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
626 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
630 #ifdef HAVE_trunctqfqf2
631 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
633 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
637 #ifdef HAVE_truncsfqf2
638 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
640 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
644 #ifdef HAVE_truncdfqf2
645 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
647 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
651 #ifdef HAVE_truncxfqf2
652 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
654 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
658 #ifdef HAVE_trunctfqf2
659 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
661 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
666 #ifdef HAVE_trunctqfhf2
667 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
669 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
673 #ifdef HAVE_truncsfhf2
674 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
676 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
680 #ifdef HAVE_truncdfhf2
681 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
683 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
687 #ifdef HAVE_truncxfhf2
688 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
690 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
694 #ifdef HAVE_trunctfhf2
695 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
697 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
702 #ifdef HAVE_truncsftqf2
703 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
705 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
709 #ifdef HAVE_truncdftqf2
710 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
712 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
716 #ifdef HAVE_truncxftqf2
717 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
719 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
723 #ifdef HAVE_trunctftqf2
724 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
726 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
731 #ifdef HAVE_truncdfsf2
732 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
734 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
738 #ifdef HAVE_truncxfsf2
739 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
741 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
745 #ifdef HAVE_trunctfsf2
746 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
748 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
752 #ifdef HAVE_truncxfdf2
753 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
755 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
759 #ifdef HAVE_trunctfdf2
760 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
762 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
774 libcall
= extendsfdf2_libfunc
;
778 libcall
= extendsfxf2_libfunc
;
782 libcall
= extendsftf2_libfunc
;
794 libcall
= truncdfsf2_libfunc
;
798 libcall
= extenddfxf2_libfunc
;
802 libcall
= extenddftf2_libfunc
;
814 libcall
= truncxfsf2_libfunc
;
818 libcall
= truncxfdf2_libfunc
;
830 libcall
= trunctfsf2_libfunc
;
834 libcall
= trunctfdf2_libfunc
;
846 if (libcall
== (rtx
) 0)
847 /* This conversion is not implemented yet. */
851 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
853 insns
= get_insns ();
855 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
860 /* Now both modes are integers. */
862 /* Handle expanding beyond a word. */
863 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
864 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
871 enum machine_mode lowpart_mode
;
872 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
874 /* Try converting directly if the insn is supported. */
875 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
878 /* If FROM is a SUBREG, put it into a register. Do this
879 so that we always generate the same set of insns for
880 better cse'ing; if an intermediate assignment occurred,
881 we won't be doing the operation directly on the SUBREG. */
882 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
883 from
= force_reg (from_mode
, from
);
884 emit_unop_insn (code
, to
, from
, equiv_code
);
887 /* Next, try converting via full word. */
888 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
889 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
890 != CODE_FOR_nothing
))
892 if (GET_CODE (to
) == REG
)
893 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
894 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
895 emit_unop_insn (code
, to
,
896 gen_lowpart (word_mode
, to
), equiv_code
);
900 /* No special multiword conversion insn; do it by hand. */
903 /* Since we will turn this into a no conflict block, we must ensure
904 that the source does not overlap the target. */
906 if (reg_overlap_mentioned_p (to
, from
))
907 from
= force_reg (from_mode
, from
);
909 /* Get a copy of FROM widened to a word, if necessary. */
910 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
911 lowpart_mode
= word_mode
;
913 lowpart_mode
= from_mode
;
915 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
917 lowpart
= gen_lowpart (lowpart_mode
, to
);
918 emit_move_insn (lowpart
, lowfrom
);
920 /* Compute the value to put in each remaining word. */
922 fill_value
= const0_rtx
;
927 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
928 && STORE_FLAG_VALUE
== -1)
930 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
932 fill_value
= gen_reg_rtx (word_mode
);
933 emit_insn (gen_slt (fill_value
));
939 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
940 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
942 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
946 /* Fill the remaining words. */
947 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
949 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
950 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
955 if (fill_value
!= subword
)
956 emit_move_insn (subword
, fill_value
);
959 insns
= get_insns ();
962 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
963 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
967 /* Truncating multi-word to a word or less. */
968 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
969 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
971 if (!((GET_CODE (from
) == MEM
972 && ! MEM_VOLATILE_P (from
)
973 && direct_load
[(int) to_mode
]
974 && ! mode_dependent_address_p (XEXP (from
, 0)))
975 || GET_CODE (from
) == REG
976 || GET_CODE (from
) == SUBREG
))
977 from
= force_reg (from_mode
, from
);
978 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
982 /* Handle pointer conversion. */ /* SPEE 900220. */
983 if (to_mode
== PQImode
)
985 if (from_mode
!= QImode
)
986 from
= convert_to_mode (QImode
, from
, unsignedp
);
988 #ifdef HAVE_truncqipqi2
989 if (HAVE_truncqipqi2
)
991 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
994 #endif /* HAVE_truncqipqi2 */
998 if (from_mode
== PQImode
)
1000 if (to_mode
!= QImode
)
1002 from
= convert_to_mode (QImode
, from
, unsignedp
);
1007 #ifdef HAVE_extendpqiqi2
1008 if (HAVE_extendpqiqi2
)
1010 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
1013 #endif /* HAVE_extendpqiqi2 */
1018 if (to_mode
== PSImode
)
1020 if (from_mode
!= SImode
)
1021 from
= convert_to_mode (SImode
, from
, unsignedp
);
1023 #ifdef HAVE_truncsipsi2
1024 if (HAVE_truncsipsi2
)
1026 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1029 #endif /* HAVE_truncsipsi2 */
1033 if (from_mode
== PSImode
)
1035 if (to_mode
!= SImode
)
1037 from
= convert_to_mode (SImode
, from
, unsignedp
);
1042 #ifdef HAVE_extendpsisi2
1043 if (! unsignedp
&& HAVE_extendpsisi2
)
1045 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1048 #endif /* HAVE_extendpsisi2 */
1049 #ifdef HAVE_zero_extendpsisi2
1050 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1052 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1055 #endif /* HAVE_zero_extendpsisi2 */
1060 if (to_mode
== PDImode
)
1062 if (from_mode
!= DImode
)
1063 from
= convert_to_mode (DImode
, from
, unsignedp
);
1065 #ifdef HAVE_truncdipdi2
1066 if (HAVE_truncdipdi2
)
1068 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1071 #endif /* HAVE_truncdipdi2 */
1075 if (from_mode
== PDImode
)
1077 if (to_mode
!= DImode
)
1079 from
= convert_to_mode (DImode
, from
, unsignedp
);
1084 #ifdef HAVE_extendpdidi2
1085 if (HAVE_extendpdidi2
)
1087 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1090 #endif /* HAVE_extendpdidi2 */
1095 /* Now follow all the conversions between integers
1096 no more than a word long. */
1098 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1099 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1100 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1101 GET_MODE_BITSIZE (from_mode
)))
1103 if (!((GET_CODE (from
) == MEM
1104 && ! MEM_VOLATILE_P (from
)
1105 && direct_load
[(int) to_mode
]
1106 && ! mode_dependent_address_p (XEXP (from
, 0)))
1107 || GET_CODE (from
) == REG
1108 || GET_CODE (from
) == SUBREG
))
1109 from
= force_reg (from_mode
, from
);
1110 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1111 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1112 from
= copy_to_reg (from
);
1113 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1117 /* Handle extension. */
1118 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1120 /* Convert directly if that works. */
1121 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1122 != CODE_FOR_nothing
)
1125 from
= force_not_mem (from
);
1127 emit_unop_insn (code
, to
, from
, equiv_code
);
1132 enum machine_mode intermediate
;
1136 /* Search for a mode to convert via. */
1137 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1138 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1139 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1140 != CODE_FOR_nothing
)
1141 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1142 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1143 GET_MODE_BITSIZE (intermediate
))))
1144 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1145 != CODE_FOR_nothing
))
1147 convert_move (to
, convert_to_mode (intermediate
, from
,
1148 unsignedp
), unsignedp
);
1152 /* No suitable intermediate mode.
1153 Generate what we need with shifts. */
1154 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1155 - GET_MODE_BITSIZE (from_mode
), 0);
1156 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1157 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1159 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1162 emit_move_insn (to
, tmp
);
1167 /* Support special truncate insns for certain modes. */
1169 if (from_mode
== DImode
&& to_mode
== SImode
)
1171 #ifdef HAVE_truncdisi2
1172 if (HAVE_truncdisi2
)
1174 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1178 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1182 if (from_mode
== DImode
&& to_mode
== HImode
)
1184 #ifdef HAVE_truncdihi2
1185 if (HAVE_truncdihi2
)
1187 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1191 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1195 if (from_mode
== DImode
&& to_mode
== QImode
)
1197 #ifdef HAVE_truncdiqi2
1198 if (HAVE_truncdiqi2
)
1200 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1204 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1208 if (from_mode
== SImode
&& to_mode
== HImode
)
1210 #ifdef HAVE_truncsihi2
1211 if (HAVE_truncsihi2
)
1213 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1217 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1221 if (from_mode
== SImode
&& to_mode
== QImode
)
1223 #ifdef HAVE_truncsiqi2
1224 if (HAVE_truncsiqi2
)
1226 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1230 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1234 if (from_mode
== HImode
&& to_mode
== QImode
)
1236 #ifdef HAVE_trunchiqi2
1237 if (HAVE_trunchiqi2
)
1239 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1243 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1247 if (from_mode
== TImode
&& to_mode
== DImode
)
1249 #ifdef HAVE_trunctidi2
1250 if (HAVE_trunctidi2
)
1252 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1256 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1260 if (from_mode
== TImode
&& to_mode
== SImode
)
1262 #ifdef HAVE_trunctisi2
1263 if (HAVE_trunctisi2
)
1265 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1269 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1273 if (from_mode
== TImode
&& to_mode
== HImode
)
1275 #ifdef HAVE_trunctihi2
1276 if (HAVE_trunctihi2
)
1278 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1282 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1286 if (from_mode
== TImode
&& to_mode
== QImode
)
1288 #ifdef HAVE_trunctiqi2
1289 if (HAVE_trunctiqi2
)
1291 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1295 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1299 /* Handle truncation of volatile memrefs, and so on;
1300 the things that couldn't be truncated directly,
1301 and for which there was no special instruction. */
1302 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1304 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1305 emit_move_insn (to
, temp
);
1309 /* Mode combination is not recognized. */
1313 /* Return an rtx for a value that would result
1314 from converting X to mode MODE.
1315 Both X and MODE may be floating, or both integer.
1316 UNSIGNEDP is nonzero if X is an unsigned value.
1317 This can be done by referring to a part of X in place
1318 or by copying to a new temporary with conversion.
1320 This function *must not* call protect_from_queue
1321 except when putting X into an insn (in which case convert_move does it). */
1324 convert_to_mode (mode
, x
, unsignedp
)
1325 enum machine_mode mode
;
1329 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1332 /* Return an rtx for a value that would result
1333 from converting X from mode OLDMODE to mode MODE.
1334 Both modes may be floating, or both integer.
1335 UNSIGNEDP is nonzero if X is an unsigned value.
1337 This can be done by referring to a part of X in place
1338 or by copying to a new temporary with conversion.
1340 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1342 This function *must not* call protect_from_queue
1343 except when putting X into an insn (in which case convert_move does it). */
1346 convert_modes (mode
, oldmode
, x
, unsignedp
)
1347 enum machine_mode mode
, oldmode
;
1353 /* If FROM is a SUBREG that indicates that we have already done at least
1354 the required extension, strip it. */
1356 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1357 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1358 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1359 x
= gen_lowpart (mode
, x
);
1361 if (GET_MODE (x
) != VOIDmode
)
1362 oldmode
= GET_MODE (x
);
1364 if (mode
== oldmode
)
1367 /* There is one case that we must handle specially: If we are converting
1368 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1369 we are to interpret the constant as unsigned, gen_lowpart will do
1370 the wrong if the constant appears negative. What we want to do is
1371 make the high-order word of the constant zero, not all ones. */
1373 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1374 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1375 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1377 HOST_WIDE_INT val
= INTVAL (x
);
1379 if (oldmode
!= VOIDmode
1380 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1382 int width
= GET_MODE_BITSIZE (oldmode
);
1384 /* We need to zero extend VAL. */
1385 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1388 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1391 /* We can do this with a gen_lowpart if both desired and current modes
1392 are integer, and this is either a constant integer, a register, or a
1393 non-volatile MEM. Except for the constant case where MODE is no
1394 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1396 if ((GET_CODE (x
) == CONST_INT
1397 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1398 || (GET_MODE_CLASS (mode
) == MODE_INT
1399 && GET_MODE_CLASS (oldmode
) == MODE_INT
1400 && (GET_CODE (x
) == CONST_DOUBLE
1401 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1402 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1403 && direct_load
[(int) mode
])
1404 || (GET_CODE (x
) == REG
1405 && (! HARD_REGISTER_P (x
)
1406 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
1407 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1408 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1410 /* ?? If we don't know OLDMODE, we have to assume here that
1411 X does not need sign- or zero-extension. This may not be
1412 the case, but it's the best we can do. */
1413 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1414 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1416 HOST_WIDE_INT val
= INTVAL (x
);
1417 int width
= GET_MODE_BITSIZE (oldmode
);
1419 /* We must sign or zero-extend in this case. Start by
1420 zero-extending, then sign extend if we need to. */
1421 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1423 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1424 val
|= (HOST_WIDE_INT
) (-1) << width
;
1426 return gen_int_mode (val
, mode
);
1429 return gen_lowpart (mode
, x
);
1432 temp
= gen_reg_rtx (mode
);
1433 convert_move (temp
, x
, unsignedp
);
1437 /* This macro is used to determine what the largest unit size that
1438 move_by_pieces can use is. */
1440 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1441 move efficiently, as opposed to MOVE_MAX which is the maximum
1442 number of bytes we can move with a single instruction. */
1444 #ifndef MOVE_MAX_PIECES
1445 #define MOVE_MAX_PIECES MOVE_MAX
1448 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1449 store efficiently. Due to internal GCC limitations, this is
1450 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1451 for an immediate constant. */
1453 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1455 /* Generate several move instructions to copy LEN bytes from block FROM to
1456 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1457 and TO through protect_from_queue before calling.
1459 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1460 used to push FROM to the stack.
1462 ALIGN is maximum alignment we can assume. */
1465 move_by_pieces (to
, from
, len
, align
)
1467 unsigned HOST_WIDE_INT len
;
1470 struct move_by_pieces data
;
1471 rtx to_addr
, from_addr
= XEXP (from
, 0);
1472 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1473 enum machine_mode mode
= VOIDmode
, tmode
;
1474 enum insn_code icode
;
1477 data
.from_addr
= from_addr
;
1480 to_addr
= XEXP (to
, 0);
1483 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1484 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1486 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1493 #ifdef STACK_GROWS_DOWNWARD
1499 data
.to_addr
= to_addr
;
1502 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1503 || GET_CODE (from_addr
) == POST_INC
1504 || GET_CODE (from_addr
) == POST_DEC
);
1506 data
.explicit_inc_from
= 0;
1507 data
.explicit_inc_to
= 0;
1508 if (data
.reverse
) data
.offset
= len
;
1511 /* If copying requires more than two move insns,
1512 copy addresses to registers (to make displacements shorter)
1513 and use post-increment if available. */
1514 if (!(data
.autinc_from
&& data
.autinc_to
)
1515 && move_by_pieces_ninsns (len
, align
) > 2)
1517 /* Find the mode of the largest move... */
1518 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1519 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1520 if (GET_MODE_SIZE (tmode
) < max_size
)
1523 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1525 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1526 data
.autinc_from
= 1;
1527 data
.explicit_inc_from
= -1;
1529 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1531 data
.from_addr
= copy_addr_to_reg (from_addr
);
1532 data
.autinc_from
= 1;
1533 data
.explicit_inc_from
= 1;
1535 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1536 data
.from_addr
= copy_addr_to_reg (from_addr
);
1537 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1539 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1541 data
.explicit_inc_to
= -1;
1543 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1545 data
.to_addr
= copy_addr_to_reg (to_addr
);
1547 data
.explicit_inc_to
= 1;
1549 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1550 data
.to_addr
= copy_addr_to_reg (to_addr
);
1553 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1554 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1555 align
= MOVE_MAX
* BITS_PER_UNIT
;
1557 /* First move what we can in the largest integer mode, then go to
1558 successively smaller modes. */
1560 while (max_size
> 1)
1562 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1563 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1564 if (GET_MODE_SIZE (tmode
) < max_size
)
1567 if (mode
== VOIDmode
)
1570 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1571 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1572 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1574 max_size
= GET_MODE_SIZE (mode
);
1577 /* The code above should have handled everything. */
1582 /* Return number of insns required to move L bytes by pieces.
1583 ALIGN (in bits) is maximum alignment we can assume. */
1585 static unsigned HOST_WIDE_INT
1586 move_by_pieces_ninsns (l
, align
)
1587 unsigned HOST_WIDE_INT l
;
1590 unsigned HOST_WIDE_INT n_insns
= 0;
1591 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1593 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1594 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1595 align
= MOVE_MAX
* BITS_PER_UNIT
;
1597 while (max_size
> 1)
1599 enum machine_mode mode
= VOIDmode
, tmode
;
1600 enum insn_code icode
;
1602 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1603 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1604 if (GET_MODE_SIZE (tmode
) < max_size
)
1607 if (mode
== VOIDmode
)
1610 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1611 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1612 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1614 max_size
= GET_MODE_SIZE (mode
);
1622 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1623 with move instructions for mode MODE. GENFUN is the gen_... function
1624 to make a move insn for that mode. DATA has all the other info. */
1627 move_by_pieces_1 (genfun
, mode
, data
)
1628 rtx (*genfun
) PARAMS ((rtx
, ...));
1629 enum machine_mode mode
;
1630 struct move_by_pieces
*data
;
1632 unsigned int size
= GET_MODE_SIZE (mode
);
1633 rtx to1
= NULL_RTX
, from1
;
1635 while (data
->len
>= size
)
1638 data
->offset
-= size
;
1642 if (data
->autinc_to
)
1643 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1646 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1649 if (data
->autinc_from
)
1650 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1653 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1655 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1656 emit_insn (gen_add2_insn (data
->to_addr
,
1657 GEN_INT (-(HOST_WIDE_INT
)size
)));
1658 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1659 emit_insn (gen_add2_insn (data
->from_addr
,
1660 GEN_INT (-(HOST_WIDE_INT
)size
)));
1663 emit_insn ((*genfun
) (to1
, from1
));
1666 #ifdef PUSH_ROUNDING
1667 emit_single_push_insn (mode
, from1
, NULL
);
1673 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1674 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1675 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1676 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1678 if (! data
->reverse
)
1679 data
->offset
+= size
;
1685 /* Emit code to move a block Y to a block X. This may be done with
1686 string-move instructions, with multiple scalar move instructions,
1687 or with a library call.
1689 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1690 SIZE is an rtx that says how long they are.
1691 ALIGN is the maximum alignment we can assume they have.
1692 METHOD describes what kind of copy this is, and what mechanisms may be used.
1694 Return the address of the new block, if memcpy is called and returns it,
1698 emit_block_move (x
, y
, size
, method
)
1700 enum block_op_methods method
;
1708 case BLOCK_OP_NORMAL
:
1709 may_use_call
= true;
1712 case BLOCK_OP_CALL_PARM
:
1713 may_use_call
= block_move_libcall_safe_for_call_parm ();
1715 /* Make inhibit_defer_pop nonzero around the library call
1716 to force it to pop the arguments right away. */
1720 case BLOCK_OP_NO_LIBCALL
:
1721 may_use_call
= false;
1728 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1730 if (GET_MODE (x
) != BLKmode
)
1732 if (GET_MODE (y
) != BLKmode
)
1735 x
= protect_from_queue (x
, 1);
1736 y
= protect_from_queue (y
, 0);
1737 size
= protect_from_queue (size
, 0);
1739 if (GET_CODE (x
) != MEM
)
1741 if (GET_CODE (y
) != MEM
)
1746 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1747 can be incorrect is coming from __builtin_memcpy. */
1748 if (GET_CODE (size
) == CONST_INT
)
1750 x
= shallow_copy_rtx (x
);
1751 y
= shallow_copy_rtx (y
);
1752 set_mem_size (x
, size
);
1753 set_mem_size (y
, size
);
1756 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1757 move_by_pieces (x
, y
, INTVAL (size
), align
);
1758 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1760 else if (may_use_call
)
1761 retval
= emit_block_move_via_libcall (x
, y
, size
);
1763 emit_block_move_via_loop (x
, y
, size
, align
);
1765 if (method
== BLOCK_OP_CALL_PARM
)
1771 /* A subroutine of emit_block_move. Returns true if calling the
1772 block move libcall will not clobber any parameters which may have
1773 already been placed on the stack. */
1776 block_move_libcall_safe_for_call_parm ()
1782 /* Check to see whether memcpy takes all register arguments. */
1784 takes_regs_uninit
, takes_regs_no
, takes_regs_yes
1785 } takes_regs
= takes_regs_uninit
;
1789 case takes_regs_uninit
:
1791 CUMULATIVE_ARGS args_so_far
;
1794 fn
= emit_block_move_libcall_fn (false);
1795 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1797 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1798 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1800 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1801 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1802 if (!tmp
|| !REG_P (tmp
))
1803 goto fail_takes_regs
;
1804 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1805 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1807 goto fail_takes_regs
;
1809 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1812 takes_regs
= takes_regs_yes
;
1815 case takes_regs_yes
:
1819 takes_regs
= takes_regs_no
;
1830 /* A subroutine of emit_block_move. Expand a movstr pattern;
1831 return true if successful. */
1834 emit_block_move_via_movstr (x
, y
, size
, align
)
1838 /* Try the most limited insn first, because there's no point
1839 including more than one in the machine description unless
1840 the more limited one has some advantage. */
1842 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1843 enum machine_mode mode
;
1845 /* Since this is a move insn, we don't care about volatility. */
1848 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1849 mode
= GET_MODE_WIDER_MODE (mode
))
1851 enum insn_code code
= movstr_optab
[(int) mode
];
1852 insn_operand_predicate_fn pred
;
1854 if (code
!= CODE_FOR_nothing
1855 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1856 here because if SIZE is less than the mode mask, as it is
1857 returned by the macro, it will definitely be less than the
1858 actual mode mask. */
1859 && ((GET_CODE (size
) == CONST_INT
1860 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1861 <= (GET_MODE_MASK (mode
) >> 1)))
1862 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1863 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1864 || (*pred
) (x
, BLKmode
))
1865 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1866 || (*pred
) (y
, BLKmode
))
1867 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1868 || (*pred
) (opalign
, VOIDmode
)))
1871 rtx last
= get_last_insn ();
1874 op2
= convert_to_mode (mode
, size
, 1);
1875 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1876 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1877 op2
= copy_to_mode_reg (mode
, op2
);
1879 /* ??? When called via emit_block_move_for_call, it'd be
1880 nice if there were some way to inform the backend, so
1881 that it doesn't fail the expansion because it thinks
1882 emitting the libcall would be more efficient. */
1884 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1892 delete_insns_since (last
);
1900 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1901 Return the return value from memcpy, 0 otherwise. */
1904 emit_block_move_via_libcall (dst
, src
, size
)
1907 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1908 enum machine_mode size_mode
;
1911 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1913 It is unsafe to save the value generated by protect_from_queue
1914 and reuse it later. Consider what happens if emit_queue is
1915 called before the return value from protect_from_queue is used.
1917 Expansion of the CALL_EXPR below will call emit_queue before
1918 we are finished emitting RTL for argument setup. So if we are
1919 not careful we could get the wrong value for an argument.
1921 To avoid this problem we go ahead and emit code to copy X, Y &
1922 SIZE into new pseudos. We can then place those new pseudos
1923 into an RTL_EXPR and use them later, even after a call to
1926 Note this is not strictly needed for library calls since they
1927 do not call emit_queue before loading their arguments. However,
1928 we may need to have library calls call emit_queue in the future
1929 since failing to do so could cause problems for targets which
1930 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1932 dst
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1933 src
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1935 if (TARGET_MEM_FUNCTIONS
)
1936 size_mode
= TYPE_MODE (sizetype
);
1938 size_mode
= TYPE_MODE (unsigned_type_node
);
1939 size
= convert_to_mode (size_mode
, size
, 1);
1940 size
= copy_to_mode_reg (size_mode
, size
);
1942 /* It is incorrect to use the libcall calling conventions to call
1943 memcpy in this context. This could be a user call to memcpy and
1944 the user may wish to examine the return value from memcpy. For
1945 targets where libcalls and normal calls have different conventions
1946 for returning pointers, we could end up generating incorrect code.
1948 For convenience, we generate the call to bcopy this way as well. */
1950 dst_tree
= make_tree (ptr_type_node
, dst
);
1951 src_tree
= make_tree (ptr_type_node
, src
);
1952 if (TARGET_MEM_FUNCTIONS
)
1953 size_tree
= make_tree (sizetype
, size
);
1955 size_tree
= make_tree (unsigned_type_node
, size
);
1957 fn
= emit_block_move_libcall_fn (true);
1958 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1959 if (TARGET_MEM_FUNCTIONS
)
1961 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1962 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1966 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1967 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1970 /* Now we have to build up the CALL_EXPR itself. */
1971 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1972 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1973 call_expr
, arg_list
, NULL_TREE
);
1974 TREE_SIDE_EFFECTS (call_expr
) = 1;
1976 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1978 /* If we are initializing a readonly value, show the above call
1979 clobbered it. Otherwise, a load from it may erroneously be
1980 hoisted from a loop. */
1981 if (RTX_UNCHANGING_P (dst
))
1982 emit_insn (gen_rtx_CLOBBER (VOIDmode
, dst
));
1984 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
1987 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1988 for the function we use for block copies. The first time FOR_CALL
1989 is true, we call assemble_external. */
1991 static GTY(()) tree block_move_fn
;
1994 emit_block_move_libcall_fn (for_call
)
1997 static bool emitted_extern
;
1998 tree fn
= block_move_fn
, args
;
2002 if (TARGET_MEM_FUNCTIONS
)
2004 fn
= get_identifier ("memcpy");
2005 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2006 const_ptr_type_node
, sizetype
,
2011 fn
= get_identifier ("bcopy");
2012 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
2013 ptr_type_node
, unsigned_type_node
,
2017 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2018 DECL_EXTERNAL (fn
) = 1;
2019 TREE_PUBLIC (fn
) = 1;
2020 DECL_ARTIFICIAL (fn
) = 1;
2021 TREE_NOTHROW (fn
) = 1;
2026 if (for_call
&& !emitted_extern
)
2028 emitted_extern
= true;
2029 make_decl_rtl (fn
, NULL
);
2030 assemble_external (fn
);
2036 /* A subroutine of emit_block_move. Copy the data via an explicit
2037 loop. This is used only when libcalls are forbidden. */
2038 /* ??? It'd be nice to copy in hunks larger than QImode. */
2041 emit_block_move_via_loop (x
, y
, size
, align
)
2043 unsigned int align ATTRIBUTE_UNUSED
;
2045 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
2046 enum machine_mode iter_mode
;
2048 iter_mode
= GET_MODE (size
);
2049 if (iter_mode
== VOIDmode
)
2050 iter_mode
= word_mode
;
2052 top_label
= gen_label_rtx ();
2053 cmp_label
= gen_label_rtx ();
2054 iter
= gen_reg_rtx (iter_mode
);
2056 emit_move_insn (iter
, const0_rtx
);
2058 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
2059 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
2060 do_pending_stack_adjust ();
2062 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
2064 emit_jump (cmp_label
);
2065 emit_label (top_label
);
2067 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
2068 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
2069 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
2070 x
= change_address (x
, QImode
, x_addr
);
2071 y
= change_address (y
, QImode
, y_addr
);
2073 emit_move_insn (x
, y
);
2075 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
2076 true, OPTAB_LIB_WIDEN
);
2078 emit_move_insn (iter
, tmp
);
2080 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
2081 emit_label (cmp_label
);
2083 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
2086 emit_note (NULL
, NOTE_INSN_LOOP_END
);
2089 /* Copy all or part of a value X into registers starting at REGNO.
2090 The number of registers to be filled is NREGS. */
2093 move_block_to_reg (regno
, x
, nregs
, mode
)
2097 enum machine_mode mode
;
2100 #ifdef HAVE_load_multiple
2108 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2109 x
= validize_mem (force_const_mem (mode
, x
));
2111 /* See if the machine can do this with a load multiple insn. */
2112 #ifdef HAVE_load_multiple
2113 if (HAVE_load_multiple
)
2115 last
= get_last_insn ();
2116 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
2124 delete_insns_since (last
);
2128 for (i
= 0; i
< nregs
; i
++)
2129 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2130 operand_subword_force (x
, i
, mode
));
2133 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2134 The number of registers to be filled is NREGS. SIZE indicates the number
2135 of bytes in the object X. */
2138 move_block_from_reg (regno
, x
, nregs
, size
)
2145 #ifdef HAVE_store_multiple
2149 enum machine_mode mode
;
2154 /* If SIZE is that of a mode no bigger than a word, just use that
2155 mode's store operation. */
2156 if (size
<= UNITS_PER_WORD
2157 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
2159 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
2163 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2164 to the left before storing to memory. Note that the previous test
2165 doesn't handle all cases (e.g. SIZE == 3). */
2166 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
2168 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
2174 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
2175 gen_rtx_REG (word_mode
, regno
),
2176 build_int_2 ((UNITS_PER_WORD
- size
)
2177 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
2178 emit_move_insn (tem
, shift
);
2182 /* See if the machine can do this with a store multiple insn. */
2183 #ifdef HAVE_store_multiple
2184 if (HAVE_store_multiple
)
2186 last
= get_last_insn ();
2187 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
2195 delete_insns_since (last
);
2199 for (i
= 0; i
< nregs
; i
++)
2201 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2206 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2210 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2211 ORIG, where ORIG is a non-consecutive group of registers represented by
2212 a PARALLEL. The clone is identical to the original except in that the
2213 original set of registers is replaced by a new set of pseudo registers.
2214 The new set has the same modes as the original set. */
2217 gen_group_rtx (orig
)
2223 if (GET_CODE (orig
) != PARALLEL
)
2226 length
= XVECLEN (orig
, 0);
2227 tmps
= (rtx
*) alloca (sizeof (rtx
) * length
);
2229 /* Skip a NULL entry in first slot. */
2230 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2235 for (; i
< length
; i
++)
2237 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2238 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2240 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2243 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2246 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2247 registers represented by a PARALLEL. SSIZE represents the total size of
2248 block SRC in bytes, or -1 if not known. */
2249 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2250 the balance will be in what would be the low-order memory addresses, i.e.
2251 left justified for big endian, right justified for little endian. This
2252 happens to be true for the targets currently using this support. If this
2253 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2257 emit_group_load (dst
, orig_src
, ssize
)
2264 if (GET_CODE (dst
) != PARALLEL
)
2267 /* Check for a NULL entry, used to indicate that the parameter goes
2268 both on the stack and in registers. */
2269 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2274 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
2276 /* Process the pieces. */
2277 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2279 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2280 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2281 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2284 /* Handle trailing fragments that run over the size of the struct. */
2285 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2287 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2288 bytelen
= ssize
- bytepos
;
2293 /* If we won't be loading directly from memory, protect the real source
2294 from strange tricks we might play; but make sure that the source can
2295 be loaded directly into the destination. */
2297 if (GET_CODE (orig_src
) != MEM
2298 && (!CONSTANT_P (orig_src
)
2299 || (GET_MODE (orig_src
) != mode
2300 && GET_MODE (orig_src
) != VOIDmode
)))
2302 if (GET_MODE (orig_src
) == VOIDmode
)
2303 src
= gen_reg_rtx (mode
);
2305 src
= gen_reg_rtx (GET_MODE (orig_src
));
2307 emit_move_insn (src
, orig_src
);
2310 /* Optimize the access just a bit. */
2311 if (GET_CODE (src
) == MEM
2312 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2313 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2314 && bytelen
== GET_MODE_SIZE (mode
))
2316 tmps
[i
] = gen_reg_rtx (mode
);
2317 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2319 else if (GET_CODE (src
) == CONCAT
)
2321 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2322 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2324 if ((bytepos
== 0 && bytelen
== slen0
)
2325 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2327 /* The following assumes that the concatenated objects all
2328 have the same size. In this case, a simple calculation
2329 can be used to determine the object and the bit field
2331 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2332 if (! CONSTANT_P (tmps
[i
])
2333 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2334 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2335 (bytepos
% slen0
) * BITS_PER_UNIT
,
2336 1, NULL_RTX
, mode
, mode
, ssize
);
2338 else if (bytepos
== 0)
2340 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2341 emit_move_insn (mem
, src
);
2342 tmps
[i
] = adjust_address (mem
, mode
, 0);
2347 else if (CONSTANT_P (src
)
2348 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2351 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2352 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2355 if (BYTES_BIG_ENDIAN
&& shift
)
2356 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2357 tmps
[i
], 0, OPTAB_WIDEN
);
2362 /* Copy the extracted pieces into the proper (probable) hard regs. */
2363 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2364 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2367 /* Emit code to move a block SRC to block DST, where SRC and DST are
2368 non-consecutive groups of registers, each represented by a PARALLEL. */
2371 emit_group_move (dst
, src
)
2376 if (GET_CODE (src
) != PARALLEL
2377 || GET_CODE (dst
) != PARALLEL
2378 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
2381 /* Skip first entry if NULL. */
2382 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2383 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2384 XEXP (XVECEXP (src
, 0, i
), 0));
2387 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2388 registers represented by a PARALLEL. SSIZE represents the total size of
2389 block DST, or -1 if not known. */
2392 emit_group_store (orig_dst
, src
, ssize
)
2399 if (GET_CODE (src
) != PARALLEL
)
2402 /* Check for a NULL entry, used to indicate that the parameter goes
2403 both on the stack and in registers. */
2404 if (XEXP (XVECEXP (src
, 0, 0), 0))
2409 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2411 /* Copy the (probable) hard regs into pseudos. */
2412 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2414 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2415 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2416 emit_move_insn (tmps
[i
], reg
);
2420 /* If we won't be storing directly into memory, protect the real destination
2421 from strange tricks we might play. */
2423 if (GET_CODE (dst
) == PARALLEL
)
2427 /* We can get a PARALLEL dst if there is a conditional expression in
2428 a return statement. In that case, the dst and src are the same,
2429 so no action is necessary. */
2430 if (rtx_equal_p (dst
, src
))
2433 /* It is unclear if we can ever reach here, but we may as well handle
2434 it. Allocate a temporary, and split this into a store/load to/from
2437 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2438 emit_group_store (temp
, src
, ssize
);
2439 emit_group_load (dst
, temp
, ssize
);
2442 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2444 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2445 /* Make life a bit easier for combine. */
2446 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2449 /* Process the pieces. */
2450 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2452 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2453 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2454 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2457 /* Handle trailing fragments that run over the size of the struct. */
2458 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2460 if (BYTES_BIG_ENDIAN
)
2462 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2463 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2464 tmps
[i
], 0, OPTAB_WIDEN
);
2466 bytelen
= ssize
- bytepos
;
2469 if (GET_CODE (dst
) == CONCAT
)
2471 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2472 dest
= XEXP (dst
, 0);
2473 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2475 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2476 dest
= XEXP (dst
, 1);
2478 else if (bytepos
== 0 && XVECLEN (src
, 0))
2480 dest
= assign_stack_temp (GET_MODE (dest
),
2481 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2482 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2491 /* Optimize the access just a bit. */
2492 if (GET_CODE (dest
) == MEM
2493 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2494 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2495 && bytelen
== GET_MODE_SIZE (mode
))
2496 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2498 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2499 mode
, tmps
[i
], ssize
);
2504 /* Copy from the pseudo into the (probable) hard reg. */
2505 if (orig_dst
!= dst
)
2506 emit_move_insn (orig_dst
, dst
);
2509 /* Generate code to copy a BLKmode object of TYPE out of a
2510 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2511 is null, a stack temporary is created. TGTBLK is returned.
2513 The primary purpose of this routine is to handle functions
2514 that return BLKmode structures in registers. Some machines
2515 (the PA for example) want to return all small structures
2516 in registers regardless of the structure's alignment. */
2519 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2524 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2525 rtx src
= NULL
, dst
= NULL
;
2526 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2527 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2531 tgtblk
= assign_temp (build_qualified_type (type
,
2533 | TYPE_QUAL_CONST
)),
2535 preserve_temp_slots (tgtblk
);
2538 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2539 into a new pseudo which is a full word. */
2541 if (GET_MODE (srcreg
) != BLKmode
2542 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2543 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2545 /* Structures whose size is not a multiple of a word are aligned
2546 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2547 machine, this means we must skip the empty high order bytes when
2548 calculating the bit offset. */
2549 if (BYTES_BIG_ENDIAN
2550 && bytes
% UNITS_PER_WORD
)
2551 big_endian_correction
2552 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2554 /* Copy the structure BITSIZE bites at a time.
2556 We could probably emit more efficient code for machines which do not use
2557 strict alignment, but it doesn't seem worth the effort at the current
2559 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2560 bitpos
< bytes
* BITS_PER_UNIT
;
2561 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2563 /* We need a new source operand each time xbitpos is on a
2564 word boundary and when xbitpos == big_endian_correction
2565 (the first time through). */
2566 if (xbitpos
% BITS_PER_WORD
== 0
2567 || xbitpos
== big_endian_correction
)
2568 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2571 /* We need a new destination operand each time bitpos is on
2573 if (bitpos
% BITS_PER_WORD
== 0)
2574 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2576 /* Use xbitpos for the source extraction (right justified) and
2577 xbitpos for the destination store (left justified). */
2578 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2579 extract_bit_field (src
, bitsize
,
2580 xbitpos
% BITS_PER_WORD
, 1,
2581 NULL_RTX
, word_mode
, word_mode
,
2589 /* Add a USE expression for REG to the (possibly empty) list pointed
2590 to by CALL_FUSAGE. REG must denote a hard register. */
2593 use_reg (call_fusage
, reg
)
2594 rtx
*call_fusage
, reg
;
2596 if (GET_CODE (reg
) != REG
2597 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2601 = gen_rtx_EXPR_LIST (VOIDmode
,
2602 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2605 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2606 starting at REGNO. All of these registers must be hard registers. */
2609 use_regs (call_fusage
, regno
, nregs
)
2616 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2619 for (i
= 0; i
< nregs
; i
++)
2620 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2623 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2624 PARALLEL REGS. This is for calls that pass values in multiple
2625 non-contiguous locations. The Irix 6 ABI has examples of this. */
2628 use_group_regs (call_fusage
, regs
)
2634 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2636 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2638 /* A NULL entry means the parameter goes both on the stack and in
2639 registers. This can also be a MEM for targets that pass values
2640 partially on the stack and partially in registers. */
2641 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2642 use_reg (call_fusage
, reg
);
2647 /* Determine whether the LEN bytes generated by CONSTFUN can be
2648 stored to memory using several move instructions. CONSTFUNDATA is
2649 a pointer which will be passed as argument in every CONSTFUN call.
2650 ALIGN is maximum alignment we can assume. Return nonzero if a
2651 call to store_by_pieces should succeed. */
2654 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2655 unsigned HOST_WIDE_INT len
;
2656 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2660 unsigned HOST_WIDE_INT max_size
, l
;
2661 HOST_WIDE_INT offset
= 0;
2662 enum machine_mode mode
, tmode
;
2663 enum insn_code icode
;
2667 if (! STORE_BY_PIECES_P (len
, align
))
2670 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2671 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2672 align
= MOVE_MAX
* BITS_PER_UNIT
;
2674 /* We would first store what we can in the largest integer mode, then go to
2675 successively smaller modes. */
2678 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2683 max_size
= STORE_MAX_PIECES
+ 1;
2684 while (max_size
> 1)
2686 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2687 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2688 if (GET_MODE_SIZE (tmode
) < max_size
)
2691 if (mode
== VOIDmode
)
2694 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2695 if (icode
!= CODE_FOR_nothing
2696 && align
>= GET_MODE_ALIGNMENT (mode
))
2698 unsigned int size
= GET_MODE_SIZE (mode
);
2705 cst
= (*constfun
) (constfundata
, offset
, mode
);
2706 if (!LEGITIMATE_CONSTANT_P (cst
))
2716 max_size
= GET_MODE_SIZE (mode
);
2719 /* The code above should have handled everything. */
2727 /* Generate several move instructions to store LEN bytes generated by
2728 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2729 pointer which will be passed as argument in every CONSTFUN call.
2730 ALIGN is maximum alignment we can assume. */
2733 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2735 unsigned HOST_WIDE_INT len
;
2736 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2740 struct store_by_pieces data
;
2742 if (! STORE_BY_PIECES_P (len
, align
))
2744 to
= protect_from_queue (to
, 1);
2745 data
.constfun
= constfun
;
2746 data
.constfundata
= constfundata
;
2749 store_by_pieces_1 (&data
, align
);
2752 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2753 rtx with BLKmode). The caller must pass TO through protect_from_queue
2754 before calling. ALIGN is maximum alignment we can assume. */
2757 clear_by_pieces (to
, len
, align
)
2759 unsigned HOST_WIDE_INT len
;
2762 struct store_by_pieces data
;
2764 data
.constfun
= clear_by_pieces_1
;
2765 data
.constfundata
= NULL
;
2768 store_by_pieces_1 (&data
, align
);
2771 /* Callback routine for clear_by_pieces.
2772 Return const0_rtx unconditionally. */
2775 clear_by_pieces_1 (data
, offset
, mode
)
2776 PTR data ATTRIBUTE_UNUSED
;
2777 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2778 enum machine_mode mode ATTRIBUTE_UNUSED
;
2783 /* Subroutine of clear_by_pieces and store_by_pieces.
2784 Generate several move instructions to store LEN bytes of block TO. (A MEM
2785 rtx with BLKmode). The caller must pass TO through protect_from_queue
2786 before calling. ALIGN is maximum alignment we can assume. */
2789 store_by_pieces_1 (data
, align
)
2790 struct store_by_pieces
*data
;
2793 rtx to_addr
= XEXP (data
->to
, 0);
2794 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2795 enum machine_mode mode
= VOIDmode
, tmode
;
2796 enum insn_code icode
;
2799 data
->to_addr
= to_addr
;
2801 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2802 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2804 data
->explicit_inc_to
= 0;
2806 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2808 data
->offset
= data
->len
;
2810 /* If storing requires more than two move insns,
2811 copy addresses to registers (to make displacements shorter)
2812 and use post-increment if available. */
2813 if (!data
->autinc_to
2814 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2816 /* Determine the main mode we'll be using. */
2817 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2818 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2819 if (GET_MODE_SIZE (tmode
) < max_size
)
2822 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2824 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2825 data
->autinc_to
= 1;
2826 data
->explicit_inc_to
= -1;
2829 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2830 && ! data
->autinc_to
)
2832 data
->to_addr
= copy_addr_to_reg (to_addr
);
2833 data
->autinc_to
= 1;
2834 data
->explicit_inc_to
= 1;
2837 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2838 data
->to_addr
= copy_addr_to_reg (to_addr
);
2841 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2842 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2843 align
= MOVE_MAX
* BITS_PER_UNIT
;
2845 /* First store what we can in the largest integer mode, then go to
2846 successively smaller modes. */
2848 while (max_size
> 1)
2850 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2851 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2852 if (GET_MODE_SIZE (tmode
) < max_size
)
2855 if (mode
== VOIDmode
)
2858 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2859 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2860 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2862 max_size
= GET_MODE_SIZE (mode
);
2865 /* The code above should have handled everything. */
2870 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2871 with move instructions for mode MODE. GENFUN is the gen_... function
2872 to make a move insn for that mode. DATA has all the other info. */
2875 store_by_pieces_2 (genfun
, mode
, data
)
2876 rtx (*genfun
) PARAMS ((rtx
, ...));
2877 enum machine_mode mode
;
2878 struct store_by_pieces
*data
;
2880 unsigned int size
= GET_MODE_SIZE (mode
);
2883 while (data
->len
>= size
)
2886 data
->offset
-= size
;
2888 if (data
->autinc_to
)
2889 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2892 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2894 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2895 emit_insn (gen_add2_insn (data
->to_addr
,
2896 GEN_INT (-(HOST_WIDE_INT
) size
)));
2898 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2899 emit_insn ((*genfun
) (to1
, cst
));
2901 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2902 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2904 if (! data
->reverse
)
2905 data
->offset
+= size
;
2911 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2912 its length in bytes. */
2915 clear_storage (object
, size
)
2920 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2921 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2923 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2924 just move a zero. Otherwise, do this a piece at a time. */
2925 if (GET_MODE (object
) != BLKmode
2926 && GET_CODE (size
) == CONST_INT
2927 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2928 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2931 object
= protect_from_queue (object
, 1);
2932 size
= protect_from_queue (size
, 0);
2934 if (GET_CODE (size
) == CONST_INT
2935 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2936 clear_by_pieces (object
, INTVAL (size
), align
);
2937 else if (clear_storage_via_clrstr (object
, size
, align
))
2940 retval
= clear_storage_via_libcall (object
, size
);
2946 /* A subroutine of clear_storage. Expand a clrstr pattern;
2947 return true if successful. */
2950 clear_storage_via_clrstr (object
, size
, align
)
2954 /* Try the most limited insn first, because there's no point
2955 including more than one in the machine description unless
2956 the more limited one has some advantage. */
2958 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2959 enum machine_mode mode
;
2961 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2962 mode
= GET_MODE_WIDER_MODE (mode
))
2964 enum insn_code code
= clrstr_optab
[(int) mode
];
2965 insn_operand_predicate_fn pred
;
2967 if (code
!= CODE_FOR_nothing
2968 /* We don't need MODE to be narrower than
2969 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2970 the mode mask, as it is returned by the macro, it will
2971 definitely be less than the actual mode mask. */
2972 && ((GET_CODE (size
) == CONST_INT
2973 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2974 <= (GET_MODE_MASK (mode
) >> 1)))
2975 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2976 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2977 || (*pred
) (object
, BLKmode
))
2978 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2979 || (*pred
) (opalign
, VOIDmode
)))
2982 rtx last
= get_last_insn ();
2985 op1
= convert_to_mode (mode
, size
, 1);
2986 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2987 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2988 op1
= copy_to_mode_reg (mode
, op1
);
2990 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2997 delete_insns_since (last
);
3004 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3005 Return the return value of memset, 0 otherwise. */
3008 clear_storage_via_libcall (object
, size
)
3011 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
3012 enum machine_mode size_mode
;
3015 /* OBJECT or SIZE may have been passed through protect_from_queue.
3017 It is unsafe to save the value generated by protect_from_queue
3018 and reuse it later. Consider what happens if emit_queue is
3019 called before the return value from protect_from_queue is used.
3021 Expansion of the CALL_EXPR below will call emit_queue before
3022 we are finished emitting RTL for argument setup. So if we are
3023 not careful we could get the wrong value for an argument.
3025 To avoid this problem we go ahead and emit code to copy OBJECT
3026 and SIZE into new pseudos. We can then place those new pseudos
3027 into an RTL_EXPR and use them later, even after a call to
3030 Note this is not strictly needed for library calls since they
3031 do not call emit_queue before loading their arguments. However,
3032 we may need to have library calls call emit_queue in the future
3033 since failing to do so could cause problems for targets which
3034 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3036 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
3038 if (TARGET_MEM_FUNCTIONS
)
3039 size_mode
= TYPE_MODE (sizetype
);
3041 size_mode
= TYPE_MODE (unsigned_type_node
);
3042 size
= convert_to_mode (size_mode
, size
, 1);
3043 size
= copy_to_mode_reg (size_mode
, size
);
3045 /* It is incorrect to use the libcall calling conventions to call
3046 memset in this context. This could be a user call to memset and
3047 the user may wish to examine the return value from memset. For
3048 targets where libcalls and normal calls have different conventions
3049 for returning pointers, we could end up generating incorrect code.
3051 For convenience, we generate the call to bzero this way as well. */
3053 object_tree
= make_tree (ptr_type_node
, object
);
3054 if (TARGET_MEM_FUNCTIONS
)
3055 size_tree
= make_tree (sizetype
, size
);
3057 size_tree
= make_tree (unsigned_type_node
, size
);
3059 fn
= clear_storage_libcall_fn (true);
3060 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
3061 if (TARGET_MEM_FUNCTIONS
)
3062 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
3063 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
3065 /* Now we have to build up the CALL_EXPR itself. */
3066 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
3067 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
3068 call_expr
, arg_list
, NULL_TREE
);
3069 TREE_SIDE_EFFECTS (call_expr
) = 1;
3071 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
3073 /* If we are initializing a readonly value, show the above call
3074 clobbered it. Otherwise, a load from it may erroneously be
3075 hoisted from a loop. */
3076 if (RTX_UNCHANGING_P (object
))
3077 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
3079 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
3082 /* A subroutine of clear_storage_via_libcall. Create the tree node
3083 for the function we use for block clears. The first time FOR_CALL
3084 is true, we call assemble_external. */
3086 static GTY(()) tree block_clear_fn
;
3089 clear_storage_libcall_fn (for_call
)
3092 static bool emitted_extern
;
3093 tree fn
= block_clear_fn
, args
;
3097 if (TARGET_MEM_FUNCTIONS
)
3099 fn
= get_identifier ("memset");
3100 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
3101 integer_type_node
, sizetype
,
3106 fn
= get_identifier ("bzero");
3107 args
= build_function_type_list (void_type_node
, ptr_type_node
,
3108 unsigned_type_node
, NULL_TREE
);
3111 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
3112 DECL_EXTERNAL (fn
) = 1;
3113 TREE_PUBLIC (fn
) = 1;
3114 DECL_ARTIFICIAL (fn
) = 1;
3115 TREE_NOTHROW (fn
) = 1;
3117 block_clear_fn
= fn
;
3120 if (for_call
&& !emitted_extern
)
3122 emitted_extern
= true;
3123 make_decl_rtl (fn
, NULL
);
3124 assemble_external (fn
);
3130 /* Generate code to copy Y into X.
3131 Both Y and X must have the same mode, except that
3132 Y can be a constant with VOIDmode.
3133 This mode cannot be BLKmode; use emit_block_move for that.
3135 Return the last instruction emitted. */
3138 emit_move_insn (x
, y
)
3141 enum machine_mode mode
= GET_MODE (x
);
3142 rtx y_cst
= NULL_RTX
;
3145 x
= protect_from_queue (x
, 1);
3146 y
= protect_from_queue (y
, 0);
3148 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
3151 /* Never force constant_p_rtx to memory. */
3152 if (GET_CODE (y
) == CONSTANT_P_RTX
)
3154 else if (CONSTANT_P (y
))
3157 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3158 && (last_insn
= compress_float_constant (x
, y
)))
3161 if (!LEGITIMATE_CONSTANT_P (y
))
3164 y
= force_const_mem (mode
, y
);
3166 /* If the target's cannot_force_const_mem prevented the spill,
3167 assume that the target's move expanders will also take care
3168 of the non-legitimate constant. */
3174 /* If X or Y are memory references, verify that their addresses are valid
3176 if (GET_CODE (x
) == MEM
3177 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3178 && ! push_operand (x
, GET_MODE (x
)))
3180 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3181 x
= validize_mem (x
);
3183 if (GET_CODE (y
) == MEM
3184 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3186 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3187 y
= validize_mem (y
);
3189 if (mode
== BLKmode
)
3192 last_insn
= emit_move_insn_1 (x
, y
);
3194 if (y_cst
&& GET_CODE (x
) == REG
)
3195 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3200 /* Low level part of emit_move_insn.
3201 Called just like emit_move_insn, but assumes X and Y
3202 are basically valid. */
3205 emit_move_insn_1 (x
, y
)
3208 enum machine_mode mode
= GET_MODE (x
);
3209 enum machine_mode submode
;
3210 enum mode_class
class = GET_MODE_CLASS (mode
);
3212 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
3215 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
3217 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
3219 /* Expand complex moves by moving real part and imag part, if possible. */
3220 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
3221 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
3222 && (mov_optab
->handlers
[(int) submode
].insn_code
3223 != CODE_FOR_nothing
))
3225 /* Don't split destination if it is a stack push. */
3226 int stack
= push_operand (x
, GET_MODE (x
));
3228 #ifdef PUSH_ROUNDING
3229 /* In case we output to the stack, but the size is smaller machine can
3230 push exactly, we need to use move instructions. */
3232 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
3233 != GET_MODE_SIZE (submode
)))
3236 HOST_WIDE_INT offset1
, offset2
;
3238 /* Do not use anti_adjust_stack, since we don't want to update
3239 stack_pointer_delta. */
3240 temp
= expand_binop (Pmode
,
3241 #ifdef STACK_GROWS_DOWNWARD
3249 (GET_MODE_SIZE (GET_MODE (x
)))),
3250 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3252 if (temp
!= stack_pointer_rtx
)
3253 emit_move_insn (stack_pointer_rtx
, temp
);
3255 #ifdef STACK_GROWS_DOWNWARD
3257 offset2
= GET_MODE_SIZE (submode
);
3259 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
3260 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
3261 + GET_MODE_SIZE (submode
));
3264 emit_move_insn (change_address (x
, submode
,
3265 gen_rtx_PLUS (Pmode
,
3267 GEN_INT (offset1
))),
3268 gen_realpart (submode
, y
));
3269 emit_move_insn (change_address (x
, submode
,
3270 gen_rtx_PLUS (Pmode
,
3272 GEN_INT (offset2
))),
3273 gen_imagpart (submode
, y
));
3277 /* If this is a stack, push the highpart first, so it
3278 will be in the argument order.
3280 In that case, change_address is used only to convert
3281 the mode, not to change the address. */
3284 /* Note that the real part always precedes the imag part in memory
3285 regardless of machine's endianness. */
3286 #ifdef STACK_GROWS_DOWNWARD
3287 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3288 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3289 gen_imagpart (submode
, y
)));
3290 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3291 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3292 gen_realpart (submode
, y
)));
3294 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3295 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3296 gen_realpart (submode
, y
)));
3297 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3298 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3299 gen_imagpart (submode
, y
)));
3304 rtx realpart_x
, realpart_y
;
3305 rtx imagpart_x
, imagpart_y
;
3307 /* If this is a complex value with each part being smaller than a
3308 word, the usual calling sequence will likely pack the pieces into
3309 a single register. Unfortunately, SUBREG of hard registers only
3310 deals in terms of words, so we have a problem converting input
3311 arguments to the CONCAT of two registers that is used elsewhere
3312 for complex values. If this is before reload, we can copy it into
3313 memory and reload. FIXME, we should see about using extract and
3314 insert on integer registers, but complex short and complex char
3315 variables should be rarely used. */
3316 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
3317 && (reload_in_progress
| reload_completed
) == 0)
3320 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3322 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
3324 if (packed_dest_p
|| packed_src_p
)
3326 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
3327 ? MODE_FLOAT
: MODE_INT
);
3329 enum machine_mode reg_mode
3330 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
3332 if (reg_mode
!= BLKmode
)
3334 rtx mem
= assign_stack_temp (reg_mode
,
3335 GET_MODE_SIZE (mode
), 0);
3336 rtx cmem
= adjust_address (mem
, mode
, 0);
3339 = N_("function using short complex types cannot be inline");
3343 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3345 emit_move_insn_1 (cmem
, y
);
3346 return emit_move_insn_1 (sreg
, mem
);
3350 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3352 emit_move_insn_1 (mem
, sreg
);
3353 return emit_move_insn_1 (x
, cmem
);
3359 realpart_x
= gen_realpart (submode
, x
);
3360 realpart_y
= gen_realpart (submode
, y
);
3361 imagpart_x
= gen_imagpart (submode
, x
);
3362 imagpart_y
= gen_imagpart (submode
, y
);
3364 /* Show the output dies here. This is necessary for SUBREGs
3365 of pseudos since we cannot track their lifetimes correctly;
3366 hard regs shouldn't appear here except as return values.
3367 We never want to emit such a clobber after reload. */
3369 && ! (reload_in_progress
|| reload_completed
)
3370 && (GET_CODE (realpart_x
) == SUBREG
3371 || GET_CODE (imagpart_x
) == SUBREG
))
3372 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3374 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3375 (realpart_x
, realpart_y
));
3376 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
3377 (imagpart_x
, imagpart_y
));
3380 return get_last_insn ();
3383 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3384 find a mode to do it in. If we have a movcc, use it. Otherwise,
3385 find the MODE_INT mode of the same width. */
3386 else if (GET_MODE_CLASS (mode
) == MODE_CC
3387 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3389 enum insn_code insn_code
;
3390 enum machine_mode tmode
= VOIDmode
;
3394 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3397 for (tmode
= QImode
; tmode
!= VOIDmode
;
3398 tmode
= GET_MODE_WIDER_MODE (tmode
))
3399 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3402 if (tmode
== VOIDmode
)
3405 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3406 may call change_address which is not appropriate if we were
3407 called when a reload was in progress. We don't have to worry
3408 about changing the address since the size in bytes is supposed to
3409 be the same. Copy the MEM to change the mode and move any
3410 substitutions from the old MEM to the new one. */
3412 if (reload_in_progress
)
3414 x
= gen_lowpart_common (tmode
, x1
);
3415 if (x
== 0 && GET_CODE (x1
) == MEM
)
3417 x
= adjust_address_nv (x1
, tmode
, 0);
3418 copy_replacements (x1
, x
);
3421 y
= gen_lowpart_common (tmode
, y1
);
3422 if (y
== 0 && GET_CODE (y1
) == MEM
)
3424 y
= adjust_address_nv (y1
, tmode
, 0);
3425 copy_replacements (y1
, y
);
3430 x
= gen_lowpart (tmode
, x
);
3431 y
= gen_lowpart (tmode
, y
);
3434 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3435 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3438 /* This will handle any multi-word or full-word mode that lacks a move_insn
3439 pattern. However, you will get better code if you define such patterns,
3440 even if they must turn into multiple assembler instructions. */
3441 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3448 #ifdef PUSH_ROUNDING
3450 /* If X is a push on the stack, do the push now and replace
3451 X with a reference to the stack pointer. */
3452 if (push_operand (x
, GET_MODE (x
)))
3457 /* Do not use anti_adjust_stack, since we don't want to update
3458 stack_pointer_delta. */
3459 temp
= expand_binop (Pmode
,
3460 #ifdef STACK_GROWS_DOWNWARD
3468 (GET_MODE_SIZE (GET_MODE (x
)))),
3469 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3471 if (temp
!= stack_pointer_rtx
)
3472 emit_move_insn (stack_pointer_rtx
, temp
);
3474 code
= GET_CODE (XEXP (x
, 0));
3476 /* Just hope that small offsets off SP are OK. */
3477 if (code
== POST_INC
)
3478 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3479 GEN_INT (-((HOST_WIDE_INT
)
3480 GET_MODE_SIZE (GET_MODE (x
)))));
3481 else if (code
== POST_DEC
)
3482 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3483 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3485 temp
= stack_pointer_rtx
;
3487 x
= change_address (x
, VOIDmode
, temp
);
3491 /* If we are in reload, see if either operand is a MEM whose address
3492 is scheduled for replacement. */
3493 if (reload_in_progress
&& GET_CODE (x
) == MEM
3494 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3495 x
= replace_equiv_address_nv (x
, inner
);
3496 if (reload_in_progress
&& GET_CODE (y
) == MEM
3497 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3498 y
= replace_equiv_address_nv (y
, inner
);
3504 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3507 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3508 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3510 /* If we can't get a part of Y, put Y into memory if it is a
3511 constant. Otherwise, force it into a register. If we still
3512 can't get a part of Y, abort. */
3513 if (ypart
== 0 && CONSTANT_P (y
))
3515 y
= force_const_mem (mode
, y
);
3516 ypart
= operand_subword (y
, i
, 1, mode
);
3518 else if (ypart
== 0)
3519 ypart
= operand_subword_force (y
, i
, mode
);
3521 if (xpart
== 0 || ypart
== 0)
3524 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3526 last_insn
= emit_move_insn (xpart
, ypart
);
3532 /* Show the output dies here. This is necessary for SUBREGs
3533 of pseudos since we cannot track their lifetimes correctly;
3534 hard regs shouldn't appear here except as return values.
3535 We never want to emit such a clobber after reload. */
3537 && ! (reload_in_progress
|| reload_completed
)
3538 && need_clobber
!= 0)
3539 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3549 /* If Y is representable exactly in a narrower mode, and the target can
3550 perform the extension directly from constant or memory, then emit the
3551 move as an extension. */
3554 compress_float_constant (x
, y
)
3557 enum machine_mode dstmode
= GET_MODE (x
);
3558 enum machine_mode orig_srcmode
= GET_MODE (y
);
3559 enum machine_mode srcmode
;
3562 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3564 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3565 srcmode
!= orig_srcmode
;
3566 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3569 rtx trunc_y
, last_insn
;
3571 /* Skip if the target can't extend this way. */
3572 ic
= can_extend_p (dstmode
, srcmode
, 0);
3573 if (ic
== CODE_FOR_nothing
)
3576 /* Skip if the narrowed value isn't exact. */
3577 if (! exact_real_truncate (srcmode
, &r
))
3580 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3582 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3584 /* Skip if the target needs extra instructions to perform
3586 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3589 else if (float_extend_from_mem
[dstmode
][srcmode
])
3590 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3594 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3595 last_insn
= get_last_insn ();
3597 if (GET_CODE (x
) == REG
)
3598 REG_NOTES (last_insn
)
3599 = gen_rtx_EXPR_LIST (REG_EQUAL
, y
, REG_NOTES (last_insn
));
3607 /* Pushing data onto the stack. */
3609 /* Push a block of length SIZE (perhaps variable)
3610 and return an rtx to address the beginning of the block.
3611 Note that it is not possible for the value returned to be a QUEUED.
3612 The value may be virtual_outgoing_args_rtx.
3614 EXTRA is the number of bytes of padding to push in addition to SIZE.
3615 BELOW nonzero means this padding comes at low addresses;
3616 otherwise, the padding comes at high addresses. */
3619 push_block (size
, extra
, below
)
3625 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3626 if (CONSTANT_P (size
))
3627 anti_adjust_stack (plus_constant (size
, extra
));
3628 else if (GET_CODE (size
) == REG
&& extra
== 0)
3629 anti_adjust_stack (size
);
3632 temp
= copy_to_mode_reg (Pmode
, size
);
3634 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3635 temp
, 0, OPTAB_LIB_WIDEN
);
3636 anti_adjust_stack (temp
);
3639 #ifndef STACK_GROWS_DOWNWARD
3645 temp
= virtual_outgoing_args_rtx
;
3646 if (extra
!= 0 && below
)
3647 temp
= plus_constant (temp
, extra
);
3651 if (GET_CODE (size
) == CONST_INT
)
3652 temp
= plus_constant (virtual_outgoing_args_rtx
,
3653 -INTVAL (size
) - (below
? 0 : extra
));
3654 else if (extra
!= 0 && !below
)
3655 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3656 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3658 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3659 negate_rtx (Pmode
, size
));
3662 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3665 #ifdef PUSH_ROUNDING
3667 /* Emit single push insn. */
3670 emit_single_push_insn (mode
, x
, type
)
3672 enum machine_mode mode
;
3676 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3678 enum insn_code icode
;
3679 insn_operand_predicate_fn pred
;
3681 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3682 /* If there is push pattern, use it. Otherwise try old way of throwing
3683 MEM representing push operation to move expander. */
3684 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3685 if (icode
!= CODE_FOR_nothing
)
3687 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3688 && !((*pred
) (x
, mode
))))
3689 x
= force_reg (mode
, x
);
3690 emit_insn (GEN_FCN (icode
) (x
));
3693 if (GET_MODE_SIZE (mode
) == rounded_size
)
3694 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3697 #ifdef STACK_GROWS_DOWNWARD
3698 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3699 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3701 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3702 GEN_INT (rounded_size
));
3704 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3707 dest
= gen_rtx_MEM (mode
, dest_addr
);
3711 set_mem_attributes (dest
, type
, 1);
3713 if (flag_optimize_sibling_calls
)
3714 /* Function incoming arguments may overlap with sibling call
3715 outgoing arguments and we cannot allow reordering of reads
3716 from function arguments with stores to outgoing arguments
3717 of sibling calls. */
3718 set_mem_alias_set (dest
, 0);
3720 emit_move_insn (dest
, x
);
3724 /* Generate code to push X onto the stack, assuming it has mode MODE and
3726 MODE is redundant except when X is a CONST_INT (since they don't
3728 SIZE is an rtx for the size of data to be copied (in bytes),
3729 needed only if X is BLKmode.
3731 ALIGN (in bits) is maximum alignment we can assume.
3733 If PARTIAL and REG are both nonzero, then copy that many of the first
3734 words of X into registers starting with REG, and push the rest of X.
3735 The amount of space pushed is decreased by PARTIAL words,
3736 rounded *down* to a multiple of PARM_BOUNDARY.
3737 REG must be a hard register in this case.
3738 If REG is zero but PARTIAL is not, take any all others actions for an
3739 argument partially in registers, but do not actually load any
3742 EXTRA is the amount in bytes of extra space to leave next to this arg.
3743 This is ignored if an argument block has already been allocated.
3745 On a machine that lacks real push insns, ARGS_ADDR is the address of
3746 the bottom of the argument block for this call. We use indexing off there
3747 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3748 argument block has not been preallocated.
3750 ARGS_SO_FAR is the size of args previously pushed for this call.
3752 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3753 for arguments passed in registers. If nonzero, it will be the number
3754 of bytes required. */
3757 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3758 args_addr
, args_so_far
, reg_parm_stack_space
,
3761 enum machine_mode mode
;
3770 int reg_parm_stack_space
;
3774 enum direction stack_direction
3775 #ifdef STACK_GROWS_DOWNWARD
3781 /* Decide where to pad the argument: `downward' for below,
3782 `upward' for above, or `none' for don't pad it.
3783 Default is below for small data on big-endian machines; else above. */
3784 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3786 /* Invert direction if stack is post-decrement.
3788 if (STACK_PUSH_CODE
== POST_DEC
)
3789 if (where_pad
!= none
)
3790 where_pad
= (where_pad
== downward
? upward
: downward
);
3792 xinner
= x
= protect_from_queue (x
, 0);
3794 if (mode
== BLKmode
)
3796 /* Copy a block into the stack, entirely or partially. */
3799 int used
= partial
* UNITS_PER_WORD
;
3800 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3808 /* USED is now the # of bytes we need not copy to the stack
3809 because registers will take care of them. */
3812 xinner
= adjust_address (xinner
, BLKmode
, used
);
3814 /* If the partial register-part of the arg counts in its stack size,
3815 skip the part of stack space corresponding to the registers.
3816 Otherwise, start copying to the beginning of the stack space,
3817 by setting SKIP to 0. */
3818 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3820 #ifdef PUSH_ROUNDING
3821 /* Do it with several push insns if that doesn't take lots of insns
3822 and if there is no difficulty with push insns that skip bytes
3823 on the stack for alignment purposes. */
3826 && GET_CODE (size
) == CONST_INT
3828 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3829 /* Here we avoid the case of a structure whose weak alignment
3830 forces many pushes of a small amount of data,
3831 and such small pushes do rounding that causes trouble. */
3832 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3833 || align
>= BIGGEST_ALIGNMENT
3834 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3835 == (align
/ BITS_PER_UNIT
)))
3836 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3838 /* Push padding now if padding above and stack grows down,
3839 or if padding below and stack grows up.
3840 But if space already allocated, this has already been done. */
3841 if (extra
&& args_addr
== 0
3842 && where_pad
!= none
&& where_pad
!= stack_direction
)
3843 anti_adjust_stack (GEN_INT (extra
));
3845 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3848 #endif /* PUSH_ROUNDING */
3852 /* Otherwise make space on the stack and copy the data
3853 to the address of that space. */
3855 /* Deduct words put into registers from the size we must copy. */
3858 if (GET_CODE (size
) == CONST_INT
)
3859 size
= GEN_INT (INTVAL (size
) - used
);
3861 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3862 GEN_INT (used
), NULL_RTX
, 0,
3866 /* Get the address of the stack space.
3867 In this case, we do not deal with EXTRA separately.
3868 A single stack adjust will do. */
3871 temp
= push_block (size
, extra
, where_pad
== downward
);
3874 else if (GET_CODE (args_so_far
) == CONST_INT
)
3875 temp
= memory_address (BLKmode
,
3876 plus_constant (args_addr
,
3877 skip
+ INTVAL (args_so_far
)));
3879 temp
= memory_address (BLKmode
,
3880 plus_constant (gen_rtx_PLUS (Pmode
,
3885 if (!ACCUMULATE_OUTGOING_ARGS
)
3887 /* If the source is referenced relative to the stack pointer,
3888 copy it to another register to stabilize it. We do not need
3889 to do this if we know that we won't be changing sp. */
3891 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3892 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3893 temp
= copy_to_reg (temp
);
3896 target
= gen_rtx_MEM (BLKmode
, temp
);
3900 set_mem_attributes (target
, type
, 1);
3901 /* Function incoming arguments may overlap with sibling call
3902 outgoing arguments and we cannot allow reordering of reads
3903 from function arguments with stores to outgoing arguments
3904 of sibling calls. */
3905 set_mem_alias_set (target
, 0);
3908 /* ALIGN may well be better aligned than TYPE, e.g. due to
3909 PARM_BOUNDARY. Assume the caller isn't lying. */
3910 set_mem_align (target
, align
);
3912 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3915 else if (partial
> 0)
3917 /* Scalar partly in registers. */
3919 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3922 /* # words of start of argument
3923 that we must make space for but need not store. */
3924 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3925 int args_offset
= INTVAL (args_so_far
);
3928 /* Push padding now if padding above and stack grows down,
3929 or if padding below and stack grows up.
3930 But if space already allocated, this has already been done. */
3931 if (extra
&& args_addr
== 0
3932 && where_pad
!= none
&& where_pad
!= stack_direction
)
3933 anti_adjust_stack (GEN_INT (extra
));
3935 /* If we make space by pushing it, we might as well push
3936 the real data. Otherwise, we can leave OFFSET nonzero
3937 and leave the space uninitialized. */
3941 /* Now NOT_STACK gets the number of words that we don't need to
3942 allocate on the stack. */
3943 not_stack
= partial
- offset
;
3945 /* If the partial register-part of the arg counts in its stack size,
3946 skip the part of stack space corresponding to the registers.
3947 Otherwise, start copying to the beginning of the stack space,
3948 by setting SKIP to 0. */
3949 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3951 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3952 x
= validize_mem (force_const_mem (mode
, x
));
3954 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3955 SUBREGs of such registers are not allowed. */
3956 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3957 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3958 x
= copy_to_reg (x
);
3960 /* Loop over all the words allocated on the stack for this arg. */
3961 /* We can do it by words, because any scalar bigger than a word
3962 has a size a multiple of a word. */
3963 #ifndef PUSH_ARGS_REVERSED
3964 for (i
= not_stack
; i
< size
; i
++)
3966 for (i
= size
- 1; i
>= not_stack
; i
--)
3968 if (i
>= not_stack
+ offset
)
3969 emit_push_insn (operand_subword_force (x
, i
, mode
),
3970 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3972 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3974 reg_parm_stack_space
, alignment_pad
);
3981 /* Push padding now if padding above and stack grows down,
3982 or if padding below and stack grows up.
3983 But if space already allocated, this has already been done. */
3984 if (extra
&& args_addr
== 0
3985 && where_pad
!= none
&& where_pad
!= stack_direction
)
3986 anti_adjust_stack (GEN_INT (extra
));
3988 #ifdef PUSH_ROUNDING
3989 if (args_addr
== 0 && PUSH_ARGS
)
3990 emit_single_push_insn (mode
, x
, type
);
3994 if (GET_CODE (args_so_far
) == CONST_INT
)
3996 = memory_address (mode
,
3997 plus_constant (args_addr
,
3998 INTVAL (args_so_far
)));
4000 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4002 dest
= gen_rtx_MEM (mode
, addr
);
4005 set_mem_attributes (dest
, type
, 1);
4006 /* Function incoming arguments may overlap with sibling call
4007 outgoing arguments and we cannot allow reordering of reads
4008 from function arguments with stores to outgoing arguments
4009 of sibling calls. */
4010 set_mem_alias_set (dest
, 0);
4013 emit_move_insn (dest
, x
);
4017 /* If part should go in registers, copy that part
4018 into the appropriate registers. Do this now, at the end,
4019 since mem-to-mem copies above may do function calls. */
4020 if (partial
> 0 && reg
!= 0)
4022 /* Handle calls that pass values in multiple non-contiguous locations.
4023 The Irix 6 ABI has examples of this. */
4024 if (GET_CODE (reg
) == PARALLEL
)
4025 emit_group_load (reg
, x
, -1); /* ??? size? */
4027 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
4030 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4031 anti_adjust_stack (GEN_INT (extra
));
4033 if (alignment_pad
&& args_addr
== 0)
4034 anti_adjust_stack (alignment_pad
);
4037 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4045 /* Only registers can be subtargets. */
4046 || GET_CODE (x
) != REG
4047 /* If the register is readonly, it can't be set more than once. */
4048 || RTX_UNCHANGING_P (x
)
4049 /* Don't use hard regs to avoid extending their life. */
4050 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4051 /* Avoid subtargets inside loops,
4052 since they hide some invariant expressions. */
4053 || preserve_subexpressions_p ())
4057 /* Expand an assignment that stores the value of FROM into TO.
4058 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4059 (This may contain a QUEUED rtx;
4060 if the value is constant, this rtx is a constant.)
4061 Otherwise, the returned value is NULL_RTX.
4063 SUGGEST_REG is no longer actually used.
4064 It used to mean, copy the value through a register
4065 and return that register, if that is possible.
4066 We now use WANT_VALUE to decide whether to do this. */
4069 expand_assignment (to
, from
, want_value
, suggest_reg
)
4072 int suggest_reg ATTRIBUTE_UNUSED
;
4077 /* Don't crash if the lhs of the assignment was erroneous. */
4079 if (TREE_CODE (to
) == ERROR_MARK
)
4081 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4082 return want_value
? result
: NULL_RTX
;
4085 /* Assignment of a structure component needs special treatment
4086 if the structure component's rtx is not simply a MEM.
4087 Assignment of an array element at a constant index, and assignment of
4088 an array element in an unaligned packed structure field, has the same
4091 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
4092 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
4093 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4095 enum machine_mode mode1
;
4096 HOST_WIDE_INT bitsize
, bitpos
;
4104 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4105 &unsignedp
, &volatilep
);
4107 /* If we are going to use store_bit_field and extract_bit_field,
4108 make sure to_rtx will be safe for multiple use. */
4110 if (mode1
== VOIDmode
&& want_value
)
4111 tem
= stabilize_reference (tem
);
4113 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
4117 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4119 if (GET_CODE (to_rtx
) != MEM
)
4122 #ifdef POINTERS_EXTEND_UNSIGNED
4123 if (GET_MODE (offset_rtx
) != Pmode
)
4124 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4126 if (GET_MODE (offset_rtx
) != ptr_mode
)
4127 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4130 /* A constant address in TO_RTX can have VOIDmode, we must not try
4131 to call force_reg for that case. Avoid that case. */
4132 if (GET_CODE (to_rtx
) == MEM
4133 && GET_MODE (to_rtx
) == BLKmode
4134 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4136 && (bitpos
% bitsize
) == 0
4137 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4138 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4140 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4144 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4145 highest_pow2_factor_for_type (TREE_TYPE (to
),
4149 if (GET_CODE (to_rtx
) == MEM
)
4151 /* If the field is at offset zero, we could have been given the
4152 DECL_RTX of the parent struct. Don't munge it. */
4153 to_rtx
= shallow_copy_rtx (to_rtx
);
4155 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4158 /* Deal with volatile and readonly fields. The former is only done
4159 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4160 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
4162 if (to_rtx
== orig_to_rtx
)
4163 to_rtx
= copy_rtx (to_rtx
);
4164 MEM_VOLATILE_P (to_rtx
) = 1;
4167 if (TREE_CODE (to
) == COMPONENT_REF
4168 && TREE_READONLY (TREE_OPERAND (to
, 1)))
4170 if (to_rtx
== orig_to_rtx
)
4171 to_rtx
= copy_rtx (to_rtx
);
4172 RTX_UNCHANGING_P (to_rtx
) = 1;
4175 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
4177 if (to_rtx
== orig_to_rtx
)
4178 to_rtx
= copy_rtx (to_rtx
);
4179 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4182 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4184 /* Spurious cast for HPUX compiler. */
4185 ? ((enum machine_mode
)
4186 TYPE_MODE (TREE_TYPE (to
)))
4188 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
4190 preserve_temp_slots (result
);
4194 /* If the value is meaningful, convert RESULT to the proper mode.
4195 Otherwise, return nothing. */
4196 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
4197 TYPE_MODE (TREE_TYPE (from
)),
4199 TREE_UNSIGNED (TREE_TYPE (to
)))
4203 /* If the rhs is a function call and its value is not an aggregate,
4204 call the function before we start to compute the lhs.
4205 This is needed for correct code for cases such as
4206 val = setjmp (buf) on machines where reference to val
4207 requires loading up part of an address in a separate insn.
4209 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4210 since it might be a promoted variable where the zero- or sign- extension
4211 needs to be done. Handling this in the normal way is safe because no
4212 computation is done before the call. */
4213 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
4214 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4215 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4216 && GET_CODE (DECL_RTL (to
)) == REG
))
4221 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4223 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4225 /* Handle calls that return values in multiple non-contiguous locations.
4226 The Irix 6 ABI has examples of this. */
4227 if (GET_CODE (to_rtx
) == PARALLEL
)
4228 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
4229 else if (GET_MODE (to_rtx
) == BLKmode
)
4230 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4233 #ifdef POINTERS_EXTEND_UNSIGNED
4234 if (POINTER_TYPE_P (TREE_TYPE (to
))
4235 && GET_MODE (to_rtx
) != GET_MODE (value
))
4236 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4238 emit_move_insn (to_rtx
, value
);
4240 preserve_temp_slots (to_rtx
);
4243 return want_value
? to_rtx
: NULL_RTX
;
4246 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4247 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4250 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4252 /* Don't move directly into a return register. */
4253 if (TREE_CODE (to
) == RESULT_DECL
4254 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
4259 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4261 if (GET_CODE (to_rtx
) == PARALLEL
)
4262 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
4264 emit_move_insn (to_rtx
, temp
);
4266 preserve_temp_slots (to_rtx
);
4269 return want_value
? to_rtx
: NULL_RTX
;
4272 /* In case we are returning the contents of an object which overlaps
4273 the place the value is being stored, use a safe function when copying
4274 a value through a pointer into a structure value return block. */
4275 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4276 && current_function_returns_struct
4277 && !current_function_returns_pcc_struct
)
4282 size
= expr_size (from
);
4283 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4285 if (TARGET_MEM_FUNCTIONS
)
4286 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4287 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4288 XEXP (from_rtx
, 0), Pmode
,
4289 convert_to_mode (TYPE_MODE (sizetype
),
4290 size
, TREE_UNSIGNED (sizetype
)),
4291 TYPE_MODE (sizetype
));
4293 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4294 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4295 XEXP (to_rtx
, 0), Pmode
,
4296 convert_to_mode (TYPE_MODE (integer_type_node
),
4298 TREE_UNSIGNED (integer_type_node
)),
4299 TYPE_MODE (integer_type_node
));
4301 preserve_temp_slots (to_rtx
);
4304 return want_value
? to_rtx
: NULL_RTX
;
4307 /* Compute FROM and store the value in the rtx we got. */
4310 result
= store_expr (from
, to_rtx
, want_value
);
4311 preserve_temp_slots (result
);
4314 return want_value
? result
: NULL_RTX
;
4317 /* Generate code for computing expression EXP,
4318 and storing the value into TARGET.
4319 TARGET may contain a QUEUED rtx.
4321 If WANT_VALUE & 1 is nonzero, return a copy of the value
4322 not in TARGET, so that we can be sure to use the proper
4323 value in a containing expression even if TARGET has something
4324 else stored in it. If possible, we copy the value through a pseudo
4325 and return that pseudo. Or, if the value is constant, we try to
4326 return the constant. In some cases, we return a pseudo
4327 copied *from* TARGET.
4329 If the mode is BLKmode then we may return TARGET itself.
4330 It turns out that in BLKmode it doesn't cause a problem.
4331 because C has no operators that could combine two different
4332 assignments into the same BLKmode object with different values
4333 with no sequence point. Will other languages need this to
4336 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4337 to catch quickly any cases where the caller uses the value
4338 and fails to set WANT_VALUE.
4340 If WANT_VALUE & 2 is set, this is a store into a call param on the
4341 stack, and block moves may need to be treated specially. */
4344 store_expr (exp
, target
, want_value
)
4350 int dont_return_target
= 0;
4351 int dont_store_target
= 0;
4353 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4355 /* C++ can generate ?: expressions with a throw expression in one
4356 branch and an rvalue in the other. Here, we resolve attempts to
4357 store the throw expression's nonexistant result. */
4360 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4363 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4365 /* Perform first part of compound expression, then assign from second
4367 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4368 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4370 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4372 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4374 /* For conditional expression, get safe form of the target. Then
4375 test the condition, doing the appropriate assignment on either
4376 side. This avoids the creation of unnecessary temporaries.
4377 For non-BLKmode, it is more efficient not to do this. */
4379 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4382 target
= protect_from_queue (target
, 1);
4384 do_pending_stack_adjust ();
4386 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4387 start_cleanup_deferral ();
4388 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4389 end_cleanup_deferral ();
4391 emit_jump_insn (gen_jump (lab2
));
4394 start_cleanup_deferral ();
4395 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4396 end_cleanup_deferral ();
4401 return want_value
& 1 ? target
: NULL_RTX
;
4403 else if (queued_subexp_p (target
))
4404 /* If target contains a postincrement, let's not risk
4405 using it as the place to generate the rhs. */
4407 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4409 /* Expand EXP into a new pseudo. */
4410 temp
= gen_reg_rtx (GET_MODE (target
));
4411 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4413 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4416 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4418 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4420 /* If target is volatile, ANSI requires accessing the value
4421 *from* the target, if it is accessed. So make that happen.
4422 In no case return the target itself. */
4423 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4424 dont_return_target
= 1;
4426 else if ((want_value
& 1) != 0
4427 && GET_CODE (target
) == MEM
4428 && ! MEM_VOLATILE_P (target
)
4429 && GET_MODE (target
) != BLKmode
)
4430 /* If target is in memory and caller wants value in a register instead,
4431 arrange that. Pass TARGET as target for expand_expr so that,
4432 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4433 We know expand_expr will not use the target in that case.
4434 Don't do this if TARGET is volatile because we are supposed
4435 to write it and then read it. */
4437 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4438 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4439 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4441 /* If TEMP is already in the desired TARGET, only copy it from
4442 memory and don't store it there again. */
4444 || (rtx_equal_p (temp
, target
)
4445 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4446 dont_store_target
= 1;
4447 temp
= copy_to_reg (temp
);
4449 dont_return_target
= 1;
4451 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4452 /* If this is a scalar in a register that is stored in a wider mode
4453 than the declared mode, compute the result into its declared mode
4454 and then convert to the wider mode. Our value is the computed
4457 rtx inner_target
= 0;
4459 /* If we don't want a value, we can do the conversion inside EXP,
4460 which will often result in some optimizations. Do the conversion
4461 in two steps: first change the signedness, if needed, then
4462 the extend. But don't do this if the type of EXP is a subtype
4463 of something else since then the conversion might involve
4464 more than just converting modes. */
4465 if ((want_value
& 1) == 0
4466 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4467 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4469 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4470 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4472 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4473 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4475 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4476 (GET_MODE (SUBREG_REG (target
)),
4477 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4480 inner_target
= SUBREG_REG (target
);
4483 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4484 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4486 /* If TEMP is a MEM and we want a result value, make the access
4487 now so it gets done only once. Strictly speaking, this is
4488 only necessary if the MEM is volatile, or if the address
4489 overlaps TARGET. But not performing the load twice also
4490 reduces the amount of rtl we generate and then have to CSE. */
4491 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4492 temp
= copy_to_reg (temp
);
4494 /* If TEMP is a VOIDmode constant, use convert_modes to make
4495 sure that we properly convert it. */
4496 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4498 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4499 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4500 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4501 GET_MODE (target
), temp
,
4502 SUBREG_PROMOTED_UNSIGNED_P (target
));
4505 convert_move (SUBREG_REG (target
), temp
,
4506 SUBREG_PROMOTED_UNSIGNED_P (target
));
4508 /* If we promoted a constant, change the mode back down to match
4509 target. Otherwise, the caller might get confused by a result whose
4510 mode is larger than expected. */
4512 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4514 if (GET_MODE (temp
) != VOIDmode
)
4516 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4517 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4518 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4519 SUBREG_PROMOTED_UNSIGNED_P (target
));
4522 temp
= convert_modes (GET_MODE (target
),
4523 GET_MODE (SUBREG_REG (target
)),
4524 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4527 return want_value
& 1 ? temp
: NULL_RTX
;
4531 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4532 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4533 /* Return TARGET if it's a specified hardware register.
4534 If TARGET is a volatile mem ref, either return TARGET
4535 or return a reg copied *from* TARGET; ANSI requires this.
4537 Otherwise, if TEMP is not TARGET, return TEMP
4538 if it is constant (for efficiency),
4539 or if we really want the correct value. */
4540 if (!(target
&& GET_CODE (target
) == REG
4541 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4542 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4543 && ! rtx_equal_p (temp
, target
)
4544 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4545 dont_return_target
= 1;
4548 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4549 the same as that of TARGET, adjust the constant. This is needed, for
4550 example, in case it is a CONST_DOUBLE and we want only a word-sized
4552 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4553 && TREE_CODE (exp
) != ERROR_MARK
4554 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4555 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4556 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4558 /* If value was not generated in the target, store it there.
4559 Convert the value to TARGET's type first if necessary.
4560 If TEMP and TARGET compare equal according to rtx_equal_p, but
4561 one or both of them are volatile memory refs, we have to distinguish
4563 - expand_expr has used TARGET. In this case, we must not generate
4564 another copy. This can be detected by TARGET being equal according
4566 - expand_expr has not used TARGET - that means that the source just
4567 happens to have the same RTX form. Since temp will have been created
4568 by expand_expr, it will compare unequal according to == .
4569 We must generate a copy in this case, to reach the correct number
4570 of volatile memory references. */
4572 if ((! rtx_equal_p (temp
, target
)
4573 || (temp
!= target
&& (side_effects_p (temp
)
4574 || side_effects_p (target
))))
4575 && TREE_CODE (exp
) != ERROR_MARK
4576 && ! dont_store_target
4577 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4578 but TARGET is not valid memory reference, TEMP will differ
4579 from TARGET although it is really the same location. */
4580 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4581 || target
!= DECL_RTL_IF_SET (exp
))
4582 /* If there's nothing to copy, don't bother. Don't call expr_size
4583 unless necessary, because some front-ends (C++) expr_size-hook
4584 aborts on objects that are not supposed to be bit-copied or
4586 && expr_size (exp
) != const0_rtx
)
4588 target
= protect_from_queue (target
, 1);
4589 if (GET_MODE (temp
) != GET_MODE (target
)
4590 && GET_MODE (temp
) != VOIDmode
)
4592 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4593 if (dont_return_target
)
4595 /* In this case, we will return TEMP,
4596 so make sure it has the proper mode.
4597 But don't forget to store the value into TARGET. */
4598 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4599 emit_move_insn (target
, temp
);
4602 convert_move (target
, temp
, unsignedp
);
4605 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4607 /* Handle copying a string constant into an array. The string
4608 constant may be shorter than the array. So copy just the string's
4609 actual length, and clear the rest. First get the size of the data
4610 type of the string, which is actually the size of the target. */
4611 rtx size
= expr_size (exp
);
4613 if (GET_CODE (size
) == CONST_INT
4614 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4615 emit_block_move (target
, temp
, size
,
4617 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4620 /* Compute the size of the data to copy from the string. */
4622 = size_binop (MIN_EXPR
,
4623 make_tree (sizetype
, size
),
4624 size_int (TREE_STRING_LENGTH (exp
)));
4626 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4628 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4631 /* Copy that much. */
4632 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
, 0);
4633 emit_block_move (target
, temp
, copy_size_rtx
,
4635 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4637 /* Figure out how much is left in TARGET that we have to clear.
4638 Do all calculations in ptr_mode. */
4639 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4641 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4642 target
= adjust_address (target
, BLKmode
,
4643 INTVAL (copy_size_rtx
));
4647 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4648 copy_size_rtx
, NULL_RTX
, 0,
4651 #ifdef POINTERS_EXTEND_UNSIGNED
4652 if (GET_MODE (copy_size_rtx
) != Pmode
)
4653 copy_size_rtx
= convert_memory_address (Pmode
,
4657 target
= offset_address (target
, copy_size_rtx
,
4658 highest_pow2_factor (copy_size
));
4659 label
= gen_label_rtx ();
4660 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4661 GET_MODE (size
), 0, label
);
4664 if (size
!= const0_rtx
)
4665 clear_storage (target
, size
);
4671 /* Handle calls that return values in multiple non-contiguous locations.
4672 The Irix 6 ABI has examples of this. */
4673 else if (GET_CODE (target
) == PARALLEL
)
4674 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4675 else if (GET_MODE (temp
) == BLKmode
)
4676 emit_block_move (target
, temp
, expr_size (exp
),
4678 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4680 emit_move_insn (target
, temp
);
4683 /* If we don't want a value, return NULL_RTX. */
4684 if ((want_value
& 1) == 0)
4687 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4688 ??? The latter test doesn't seem to make sense. */
4689 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4692 /* Return TARGET itself if it is a hard register. */
4693 else if ((want_value
& 1) != 0
4694 && GET_MODE (target
) != BLKmode
4695 && ! (GET_CODE (target
) == REG
4696 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4697 return copy_to_reg (target
);
4703 /* Return 1 if EXP just contains zeros. */
4711 switch (TREE_CODE (exp
))
4715 case NON_LVALUE_EXPR
:
4716 case VIEW_CONVERT_EXPR
:
4717 return is_zeros_p (TREE_OPERAND (exp
, 0));
4720 return integer_zerop (exp
);
4724 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4727 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4730 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4731 elt
= TREE_CHAIN (elt
))
4732 if (!is_zeros_p (TREE_VALUE (elt
)))
4738 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4739 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4740 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4741 if (! is_zeros_p (TREE_VALUE (elt
)))
4751 /* Return 1 if EXP contains mostly (3/4) zeros. */
4754 mostly_zeros_p (exp
)
4757 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4759 int elts
= 0, zeros
= 0;
4760 tree elt
= CONSTRUCTOR_ELTS (exp
);
4761 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4763 /* If there are no ranges of true bits, it is all zero. */
4764 return elt
== NULL_TREE
;
4766 for (; elt
; elt
= TREE_CHAIN (elt
))
4768 /* We do not handle the case where the index is a RANGE_EXPR,
4769 so the statistic will be somewhat inaccurate.
4770 We do make a more accurate count in store_constructor itself,
4771 so since this function is only used for nested array elements,
4772 this should be close enough. */
4773 if (mostly_zeros_p (TREE_VALUE (elt
)))
4778 return 4 * zeros
>= 3 * elts
;
4781 return is_zeros_p (exp
);
4784 /* Helper function for store_constructor.
4785 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4786 TYPE is the type of the CONSTRUCTOR, not the element type.
4787 CLEARED is as for store_constructor.
4788 ALIAS_SET is the alias set to use for any stores.
4790 This provides a recursive shortcut back to store_constructor when it isn't
4791 necessary to go through store_field. This is so that we can pass through
4792 the cleared field to let store_constructor know that we may not have to
4793 clear a substructure if the outer structure has already been cleared. */
4796 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4799 unsigned HOST_WIDE_INT bitsize
;
4800 HOST_WIDE_INT bitpos
;
4801 enum machine_mode mode
;
4806 if (TREE_CODE (exp
) == CONSTRUCTOR
4807 && bitpos
% BITS_PER_UNIT
== 0
4808 /* If we have a nonzero bitpos for a register target, then we just
4809 let store_field do the bitfield handling. This is unlikely to
4810 generate unnecessary clear instructions anyways. */
4811 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4813 if (GET_CODE (target
) == MEM
)
4815 = adjust_address (target
,
4816 GET_MODE (target
) == BLKmode
4818 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4819 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4822 /* Update the alias set, if required. */
4823 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4824 && MEM_ALIAS_SET (target
) != 0)
4826 target
= copy_rtx (target
);
4827 set_mem_alias_set (target
, alias_set
);
4830 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4833 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4837 /* Store the value of constructor EXP into the rtx TARGET.
4838 TARGET is either a REG or a MEM; we know it cannot conflict, since
4839 safe_from_p has been called.
4840 CLEARED is true if TARGET is known to have been zero'd.
4841 SIZE is the number of bytes of TARGET we are allowed to modify: this
4842 may not be the same as the size of EXP if we are assigning to a field
4843 which has been packed to exclude padding bits. */
4846 store_constructor (exp
, target
, cleared
, size
)
4852 tree type
= TREE_TYPE (exp
);
4853 #ifdef WORD_REGISTER_OPERATIONS
4854 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4857 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4858 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4862 /* We either clear the aggregate or indicate the value is dead. */
4863 if ((TREE_CODE (type
) == UNION_TYPE
4864 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4866 && ! CONSTRUCTOR_ELTS (exp
))
4867 /* If the constructor is empty, clear the union. */
4869 clear_storage (target
, expr_size (exp
));
4873 /* If we are building a static constructor into a register,
4874 set the initial value as zero so we can fold the value into
4875 a constant. But if more than one register is involved,
4876 this probably loses. */
4877 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4878 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4880 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4884 /* If the constructor has fewer fields than the structure
4885 or if we are initializing the structure to mostly zeros,
4886 clear the whole structure first. Don't do this if TARGET is a
4887 register whose mode size isn't equal to SIZE since clear_storage
4888 can't handle this case. */
4889 else if (! cleared
&& size
> 0
4890 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4891 != fields_length (type
))
4892 || mostly_zeros_p (exp
))
4893 && (GET_CODE (target
) != REG
4894 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4897 clear_storage (target
, GEN_INT (size
));
4902 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4904 /* Store each element of the constructor into
4905 the corresponding field of TARGET. */
4907 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4909 tree field
= TREE_PURPOSE (elt
);
4910 tree value
= TREE_VALUE (elt
);
4911 enum machine_mode mode
;
4912 HOST_WIDE_INT bitsize
;
4913 HOST_WIDE_INT bitpos
= 0;
4915 rtx to_rtx
= target
;
4917 /* Just ignore missing fields.
4918 We cleared the whole structure, above,
4919 if any fields are missing. */
4923 if (cleared
&& is_zeros_p (value
))
4926 if (host_integerp (DECL_SIZE (field
), 1))
4927 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4931 mode
= DECL_MODE (field
);
4932 if (DECL_BIT_FIELD (field
))
4935 offset
= DECL_FIELD_OFFSET (field
);
4936 if (host_integerp (offset
, 0)
4937 && host_integerp (bit_position (field
), 0))
4939 bitpos
= int_bit_position (field
);
4943 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4949 if (contains_placeholder_p (offset
))
4950 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4951 offset
, make_tree (TREE_TYPE (exp
), target
));
4953 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4954 if (GET_CODE (to_rtx
) != MEM
)
4957 #ifdef POINTERS_EXTEND_UNSIGNED
4958 if (GET_MODE (offset_rtx
) != Pmode
)
4959 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4961 if (GET_MODE (offset_rtx
) != ptr_mode
)
4962 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4965 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4966 highest_pow2_factor (offset
));
4969 if (TREE_READONLY (field
))
4971 if (GET_CODE (to_rtx
) == MEM
)
4972 to_rtx
= copy_rtx (to_rtx
);
4974 RTX_UNCHANGING_P (to_rtx
) = 1;
4977 #ifdef WORD_REGISTER_OPERATIONS
4978 /* If this initializes a field that is smaller than a word, at the
4979 start of a word, try to widen it to a full word.
4980 This special case allows us to output C++ member function
4981 initializations in a form that the optimizers can understand. */
4982 if (GET_CODE (target
) == REG
4983 && bitsize
< BITS_PER_WORD
4984 && bitpos
% BITS_PER_WORD
== 0
4985 && GET_MODE_CLASS (mode
) == MODE_INT
4986 && TREE_CODE (value
) == INTEGER_CST
4988 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4990 tree type
= TREE_TYPE (value
);
4992 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4994 type
= (*lang_hooks
.types
.type_for_size
)
4995 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4996 value
= convert (type
, value
);
4999 if (BYTES_BIG_ENDIAN
)
5001 = fold (build (LSHIFT_EXPR
, type
, value
,
5002 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
5003 bitsize
= BITS_PER_WORD
;
5008 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5009 && DECL_NONADDRESSABLE_P (field
))
5011 to_rtx
= copy_rtx (to_rtx
);
5012 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5015 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5016 value
, type
, cleared
,
5017 get_alias_set (TREE_TYPE (field
)));
5020 else if (TREE_CODE (type
) == ARRAY_TYPE
5021 || TREE_CODE (type
) == VECTOR_TYPE
)
5026 tree domain
= TYPE_DOMAIN (type
);
5027 tree elttype
= TREE_TYPE (type
);
5029 HOST_WIDE_INT minelt
= 0;
5030 HOST_WIDE_INT maxelt
= 0;
5032 /* Vectors are like arrays, but the domain is stored via an array
5034 if (TREE_CODE (type
) == VECTOR_TYPE
)
5036 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5037 the same field as TYPE_DOMAIN, we are not guaranteed that
5039 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
5040 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
5043 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5044 && TYPE_MAX_VALUE (domain
)
5045 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5046 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5048 /* If we have constant bounds for the range of the type, get them. */
5051 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5052 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5055 /* If the constructor has fewer elements than the array,
5056 clear the whole array first. Similarly if this is
5057 static constructor of a non-BLKmode object. */
5058 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
5062 HOST_WIDE_INT count
= 0, zero_count
= 0;
5063 need_to_clear
= ! const_bounds_p
;
5065 /* This loop is a more accurate version of the loop in
5066 mostly_zeros_p (it handles RANGE_EXPR in an index).
5067 It is also needed to check for missing elements. */
5068 for (elt
= CONSTRUCTOR_ELTS (exp
);
5069 elt
!= NULL_TREE
&& ! need_to_clear
;
5070 elt
= TREE_CHAIN (elt
))
5072 tree index
= TREE_PURPOSE (elt
);
5073 HOST_WIDE_INT this_node_count
;
5075 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5077 tree lo_index
= TREE_OPERAND (index
, 0);
5078 tree hi_index
= TREE_OPERAND (index
, 1);
5080 if (! host_integerp (lo_index
, 1)
5081 || ! host_integerp (hi_index
, 1))
5087 this_node_count
= (tree_low_cst (hi_index
, 1)
5088 - tree_low_cst (lo_index
, 1) + 1);
5091 this_node_count
= 1;
5093 count
+= this_node_count
;
5094 if (mostly_zeros_p (TREE_VALUE (elt
)))
5095 zero_count
+= this_node_count
;
5098 /* Clear the entire array first if there are any missing elements,
5099 or if the incidence of zero elements is >= 75%. */
5101 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
5105 if (need_to_clear
&& size
> 0)
5110 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5112 clear_storage (target
, GEN_INT (size
));
5116 else if (REG_P (target
))
5117 /* Inform later passes that the old value is dead. */
5118 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5120 /* Store each element of the constructor into
5121 the corresponding element of TARGET, determined
5122 by counting the elements. */
5123 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5125 elt
= TREE_CHAIN (elt
), i
++)
5127 enum machine_mode mode
;
5128 HOST_WIDE_INT bitsize
;
5129 HOST_WIDE_INT bitpos
;
5131 tree value
= TREE_VALUE (elt
);
5132 tree index
= TREE_PURPOSE (elt
);
5133 rtx xtarget
= target
;
5135 if (cleared
&& is_zeros_p (value
))
5138 unsignedp
= TREE_UNSIGNED (elttype
);
5139 mode
= TYPE_MODE (elttype
);
5140 if (mode
== BLKmode
)
5141 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5142 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5145 bitsize
= GET_MODE_BITSIZE (mode
);
5147 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5149 tree lo_index
= TREE_OPERAND (index
, 0);
5150 tree hi_index
= TREE_OPERAND (index
, 1);
5151 rtx index_r
, pos_rtx
, loop_end
;
5152 struct nesting
*loop
;
5153 HOST_WIDE_INT lo
, hi
, count
;
5156 /* If the range is constant and "small", unroll the loop. */
5158 && host_integerp (lo_index
, 0)
5159 && host_integerp (hi_index
, 0)
5160 && (lo
= tree_low_cst (lo_index
, 0),
5161 hi
= tree_low_cst (hi_index
, 0),
5162 count
= hi
- lo
+ 1,
5163 (GET_CODE (target
) != MEM
5165 || (host_integerp (TYPE_SIZE (elttype
), 1)
5166 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5169 lo
-= minelt
; hi
-= minelt
;
5170 for (; lo
<= hi
; lo
++)
5172 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5174 if (GET_CODE (target
) == MEM
5175 && !MEM_KEEP_ALIAS_SET_P (target
)
5176 && TREE_CODE (type
) == ARRAY_TYPE
5177 && TYPE_NONALIASED_COMPONENT (type
))
5179 target
= copy_rtx (target
);
5180 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5183 store_constructor_field
5184 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5185 get_alias_set (elttype
));
5190 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5191 loop_end
= gen_label_rtx ();
5193 unsignedp
= TREE_UNSIGNED (domain
);
5195 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5198 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5200 SET_DECL_RTL (index
, index_r
);
5201 if (TREE_CODE (value
) == SAVE_EXPR
5202 && SAVE_EXPR_RTL (value
) == 0)
5204 /* Make sure value gets expanded once before the
5206 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5209 store_expr (lo_index
, index_r
, 0);
5210 loop
= expand_start_loop (0);
5212 /* Assign value to element index. */
5214 = convert (ssizetype
,
5215 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5216 index
, TYPE_MIN_VALUE (domain
))));
5217 position
= size_binop (MULT_EXPR
, position
,
5219 TYPE_SIZE_UNIT (elttype
)));
5221 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5222 xtarget
= offset_address (target
, pos_rtx
,
5223 highest_pow2_factor (position
));
5224 xtarget
= adjust_address (xtarget
, mode
, 0);
5225 if (TREE_CODE (value
) == CONSTRUCTOR
)
5226 store_constructor (value
, xtarget
, cleared
,
5227 bitsize
/ BITS_PER_UNIT
);
5229 store_expr (value
, xtarget
, 0);
5231 expand_exit_loop_if_false (loop
,
5232 build (LT_EXPR
, integer_type_node
,
5235 expand_increment (build (PREINCREMENT_EXPR
,
5237 index
, integer_one_node
), 0, 0);
5239 emit_label (loop_end
);
5242 else if ((index
!= 0 && ! host_integerp (index
, 0))
5243 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5248 index
= ssize_int (1);
5251 index
= convert (ssizetype
,
5252 fold (build (MINUS_EXPR
, index
,
5253 TYPE_MIN_VALUE (domain
))));
5255 position
= size_binop (MULT_EXPR
, index
,
5257 TYPE_SIZE_UNIT (elttype
)));
5258 xtarget
= offset_address (target
,
5259 expand_expr (position
, 0, VOIDmode
, 0),
5260 highest_pow2_factor (position
));
5261 xtarget
= adjust_address (xtarget
, mode
, 0);
5262 store_expr (value
, xtarget
, 0);
5267 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5268 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5270 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5272 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5273 && TREE_CODE (type
) == ARRAY_TYPE
5274 && TYPE_NONALIASED_COMPONENT (type
))
5276 target
= copy_rtx (target
);
5277 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5280 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5281 type
, cleared
, get_alias_set (elttype
));
5287 /* Set constructor assignments. */
5288 else if (TREE_CODE (type
) == SET_TYPE
)
5290 tree elt
= CONSTRUCTOR_ELTS (exp
);
5291 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5292 tree domain
= TYPE_DOMAIN (type
);
5293 tree domain_min
, domain_max
, bitlength
;
5295 /* The default implementation strategy is to extract the constant
5296 parts of the constructor, use that to initialize the target,
5297 and then "or" in whatever non-constant ranges we need in addition.
5299 If a large set is all zero or all ones, it is
5300 probably better to set it using memset (if available) or bzero.
5301 Also, if a large set has just a single range, it may also be
5302 better to first clear all the first clear the set (using
5303 bzero/memset), and set the bits we want. */
5305 /* Check for all zeros. */
5306 if (elt
== NULL_TREE
&& size
> 0)
5309 clear_storage (target
, GEN_INT (size
));
5313 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5314 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5315 bitlength
= size_binop (PLUS_EXPR
,
5316 size_diffop (domain_max
, domain_min
),
5319 nbits
= tree_low_cst (bitlength
, 1);
5321 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5322 are "complicated" (more than one range), initialize (the
5323 constant parts) by copying from a constant. */
5324 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5325 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5327 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5328 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5329 char *bit_buffer
= (char *) alloca (nbits
);
5330 HOST_WIDE_INT word
= 0;
5331 unsigned int bit_pos
= 0;
5332 unsigned int ibit
= 0;
5333 unsigned int offset
= 0; /* In bytes from beginning of set. */
5335 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5338 if (bit_buffer
[ibit
])
5340 if (BYTES_BIG_ENDIAN
)
5341 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5343 word
|= 1 << bit_pos
;
5347 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5349 if (word
!= 0 || ! cleared
)
5351 rtx datum
= GEN_INT (word
);
5354 /* The assumption here is that it is safe to use
5355 XEXP if the set is multi-word, but not if
5356 it's single-word. */
5357 if (GET_CODE (target
) == MEM
)
5358 to_rtx
= adjust_address (target
, mode
, offset
);
5359 else if (offset
== 0)
5363 emit_move_insn (to_rtx
, datum
);
5370 offset
+= set_word_size
/ BITS_PER_UNIT
;
5375 /* Don't bother clearing storage if the set is all ones. */
5376 if (TREE_CHAIN (elt
) != NULL_TREE
5377 || (TREE_PURPOSE (elt
) == NULL_TREE
5379 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5380 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5381 || (tree_low_cst (TREE_VALUE (elt
), 0)
5382 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5383 != (HOST_WIDE_INT
) nbits
))))
5384 clear_storage (target
, expr_size (exp
));
5386 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5388 /* Start of range of element or NULL. */
5389 tree startbit
= TREE_PURPOSE (elt
);
5390 /* End of range of element, or element value. */
5391 tree endbit
= TREE_VALUE (elt
);
5392 HOST_WIDE_INT startb
, endb
;
5393 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5395 bitlength_rtx
= expand_expr (bitlength
,
5396 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5398 /* Handle non-range tuple element like [ expr ]. */
5399 if (startbit
== NULL_TREE
)
5401 startbit
= save_expr (endbit
);
5405 startbit
= convert (sizetype
, startbit
);
5406 endbit
= convert (sizetype
, endbit
);
5407 if (! integer_zerop (domain_min
))
5409 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5410 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5412 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5413 EXPAND_CONST_ADDRESS
);
5414 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5415 EXPAND_CONST_ADDRESS
);
5421 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5422 (GET_MODE (target
), 0),
5425 emit_move_insn (targetx
, target
);
5428 else if (GET_CODE (target
) == MEM
)
5433 /* Optimization: If startbit and endbit are constants divisible
5434 by BITS_PER_UNIT, call memset instead. */
5435 if (TARGET_MEM_FUNCTIONS
5436 && TREE_CODE (startbit
) == INTEGER_CST
5437 && TREE_CODE (endbit
) == INTEGER_CST
5438 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5439 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5441 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5443 plus_constant (XEXP (targetx
, 0),
5444 startb
/ BITS_PER_UNIT
),
5446 constm1_rtx
, TYPE_MODE (integer_type_node
),
5447 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5448 TYPE_MODE (sizetype
));
5451 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5452 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5453 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5454 startbit_rtx
, TYPE_MODE (sizetype
),
5455 endbit_rtx
, TYPE_MODE (sizetype
));
5458 emit_move_insn (target
, targetx
);
5466 /* Store the value of EXP (an expression tree)
5467 into a subfield of TARGET which has mode MODE and occupies
5468 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5469 If MODE is VOIDmode, it means that we are storing into a bit-field.
5471 If VALUE_MODE is VOIDmode, return nothing in particular.
5472 UNSIGNEDP is not used in this case.
5474 Otherwise, return an rtx for the value stored. This rtx
5475 has mode VALUE_MODE if that is convenient to do.
5476 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5478 TYPE is the type of the underlying object,
5480 ALIAS_SET is the alias set for the destination. This value will
5481 (in general) be different from that for TARGET, since TARGET is a
5482 reference to the containing structure. */
5485 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5488 HOST_WIDE_INT bitsize
;
5489 HOST_WIDE_INT bitpos
;
5490 enum machine_mode mode
;
5492 enum machine_mode value_mode
;
5497 HOST_WIDE_INT width_mask
= 0;
5499 if (TREE_CODE (exp
) == ERROR_MARK
)
5502 /* If we have nothing to store, do nothing unless the expression has
5505 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5506 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5507 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5509 /* If we are storing into an unaligned field of an aligned union that is
5510 in a register, we may have the mode of TARGET being an integer mode but
5511 MODE == BLKmode. In that case, get an aligned object whose size and
5512 alignment are the same as TARGET and store TARGET into it (we can avoid
5513 the store if the field being stored is the entire width of TARGET). Then
5514 call ourselves recursively to store the field into a BLKmode version of
5515 that object. Finally, load from the object into TARGET. This is not
5516 very efficient in general, but should only be slightly more expensive
5517 than the otherwise-required unaligned accesses. Perhaps this can be
5518 cleaned up later. */
5521 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5525 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5527 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5529 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5530 emit_move_insn (object
, target
);
5532 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5535 emit_move_insn (target
, object
);
5537 /* We want to return the BLKmode version of the data. */
5541 if (GET_CODE (target
) == CONCAT
)
5543 /* We're storing into a struct containing a single __complex. */
5547 return store_expr (exp
, target
, 0);
5550 /* If the structure is in a register or if the component
5551 is a bit field, we cannot use addressing to access it.
5552 Use bit-field techniques or SUBREG to store in it. */
5554 if (mode
== VOIDmode
5555 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5556 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5557 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5558 || GET_CODE (target
) == REG
5559 || GET_CODE (target
) == SUBREG
5560 /* If the field isn't aligned enough to store as an ordinary memref,
5561 store it as a bit field. */
5562 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5563 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5564 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5565 /* If the RHS and field are a constant size and the size of the
5566 RHS isn't the same size as the bitfield, we must use bitfield
5569 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5570 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5572 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5574 /* If BITSIZE is narrower than the size of the type of EXP
5575 we will be narrowing TEMP. Normally, what's wanted are the
5576 low-order bits. However, if EXP's type is a record and this is
5577 big-endian machine, we want the upper BITSIZE bits. */
5578 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5579 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5580 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5581 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5582 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5586 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5588 if (mode
!= VOIDmode
&& mode
!= BLKmode
5589 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5590 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5592 /* If the modes of TARGET and TEMP are both BLKmode, both
5593 must be in memory and BITPOS must be aligned on a byte
5594 boundary. If so, we simply do a block copy. */
5595 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5597 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5598 || bitpos
% BITS_PER_UNIT
!= 0)
5601 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5602 emit_block_move (target
, temp
,
5603 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5607 return value_mode
== VOIDmode
? const0_rtx
: target
;
5610 /* Store the value in the bitfield. */
5611 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5612 int_size_in_bytes (type
));
5614 if (value_mode
!= VOIDmode
)
5616 /* The caller wants an rtx for the value.
5617 If possible, avoid refetching from the bitfield itself. */
5619 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5622 enum machine_mode tmode
;
5624 tmode
= GET_MODE (temp
);
5625 if (tmode
== VOIDmode
)
5629 return expand_and (tmode
, temp
,
5630 gen_int_mode (width_mask
, tmode
),
5633 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5634 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5635 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5638 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5639 NULL_RTX
, value_mode
, VOIDmode
,
5640 int_size_in_bytes (type
));
5646 rtx addr
= XEXP (target
, 0);
5647 rtx to_rtx
= target
;
5649 /* If a value is wanted, it must be the lhs;
5650 so make the address stable for multiple use. */
5652 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5653 && ! CONSTANT_ADDRESS_P (addr
)
5654 /* A frame-pointer reference is already stable. */
5655 && ! (GET_CODE (addr
) == PLUS
5656 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5657 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5658 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5659 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5661 /* Now build a reference to just the desired component. */
5663 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5665 if (to_rtx
== target
)
5666 to_rtx
= copy_rtx (to_rtx
);
5668 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5669 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5670 set_mem_alias_set (to_rtx
, alias_set
);
5672 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5676 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5677 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5678 codes and find the ultimate containing object, which we return.
5680 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5681 bit position, and *PUNSIGNEDP to the signedness of the field.
5682 If the position of the field is variable, we store a tree
5683 giving the variable offset (in units) in *POFFSET.
5684 This offset is in addition to the bit position.
5685 If the position is not variable, we store 0 in *POFFSET.
5687 If any of the extraction expressions is volatile,
5688 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5690 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5691 is a mode that can be used to access the field. In that case, *PBITSIZE
5694 If the field describes a variable-sized object, *PMODE is set to
5695 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5696 this case, but the address of the object can be found. */
5699 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5700 punsignedp
, pvolatilep
)
5702 HOST_WIDE_INT
*pbitsize
;
5703 HOST_WIDE_INT
*pbitpos
;
5705 enum machine_mode
*pmode
;
5710 enum machine_mode mode
= VOIDmode
;
5711 tree offset
= size_zero_node
;
5712 tree bit_offset
= bitsize_zero_node
;
5713 tree placeholder_ptr
= 0;
5716 /* First get the mode, signedness, and size. We do this from just the
5717 outermost expression. */
5718 if (TREE_CODE (exp
) == COMPONENT_REF
)
5720 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5721 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5722 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5724 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5726 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5728 size_tree
= TREE_OPERAND (exp
, 1);
5729 *punsignedp
= TREE_UNSIGNED (exp
);
5733 mode
= TYPE_MODE (TREE_TYPE (exp
));
5734 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5736 if (mode
== BLKmode
)
5737 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5739 *pbitsize
= GET_MODE_BITSIZE (mode
);
5744 if (! host_integerp (size_tree
, 1))
5745 mode
= BLKmode
, *pbitsize
= -1;
5747 *pbitsize
= tree_low_cst (size_tree
, 1);
5750 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5751 and find the ultimate containing object. */
5754 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5755 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5756 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5758 tree field
= TREE_OPERAND (exp
, 1);
5759 tree this_offset
= DECL_FIELD_OFFSET (field
);
5761 /* If this field hasn't been filled in yet, don't go
5762 past it. This should only happen when folding expressions
5763 made during type construction. */
5764 if (this_offset
== 0)
5766 else if (! TREE_CONSTANT (this_offset
)
5767 && contains_placeholder_p (this_offset
))
5768 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5770 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5771 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5772 DECL_FIELD_BIT_OFFSET (field
));
5774 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5777 else if (TREE_CODE (exp
) == ARRAY_REF
5778 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5780 tree index
= TREE_OPERAND (exp
, 1);
5781 tree array
= TREE_OPERAND (exp
, 0);
5782 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5783 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5784 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5786 /* We assume all arrays have sizes that are a multiple of a byte.
5787 First subtract the lower bound, if any, in the type of the
5788 index, then convert to sizetype and multiply by the size of the
5790 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5791 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5794 /* If the index has a self-referential type, pass it to a
5795 WITH_RECORD_EXPR; if the component size is, pass our
5796 component to one. */
5797 if (! TREE_CONSTANT (index
)
5798 && contains_placeholder_p (index
))
5799 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5800 if (! TREE_CONSTANT (unit_size
)
5801 && contains_placeholder_p (unit_size
))
5802 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5804 offset
= size_binop (PLUS_EXPR
, offset
,
5805 size_binop (MULT_EXPR
,
5806 convert (sizetype
, index
),
5810 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5812 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5814 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5815 We might have been called from tree optimization where we
5816 haven't set up an object yet. */
5824 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5825 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5826 && ! ((TREE_CODE (exp
) == NOP_EXPR
5827 || TREE_CODE (exp
) == CONVERT_EXPR
)
5828 && (TYPE_MODE (TREE_TYPE (exp
))
5829 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5832 /* If any reference in the chain is volatile, the effect is volatile. */
5833 if (TREE_THIS_VOLATILE (exp
))
5836 exp
= TREE_OPERAND (exp
, 0);
5839 /* If OFFSET is constant, see if we can return the whole thing as a
5840 constant bit position. Otherwise, split it up. */
5841 if (host_integerp (offset
, 0)
5842 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5844 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5845 && host_integerp (tem
, 0))
5846 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5848 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5854 /* Return 1 if T is an expression that get_inner_reference handles. */
5857 handled_component_p (t
)
5860 switch (TREE_CODE (t
))
5865 case ARRAY_RANGE_REF
:
5866 case NON_LVALUE_EXPR
:
5867 case VIEW_CONVERT_EXPR
:
5872 return (TYPE_MODE (TREE_TYPE (t
))
5873 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5880 /* Given an rtx VALUE that may contain additions and multiplications, return
5881 an equivalent value that just refers to a register, memory, or constant.
5882 This is done by generating instructions to perform the arithmetic and
5883 returning a pseudo-register containing the value.
5885 The returned value may be a REG, SUBREG, MEM or constant. */
5888 force_operand (value
, target
)
5892 /* Use subtarget as the target for operand 0 of a binary operation. */
5893 rtx subtarget
= get_subtarget (target
);
5894 enum rtx_code code
= GET_CODE (value
);
5896 /* Check for a PIC address load. */
5897 if ((code
== PLUS
|| code
== MINUS
)
5898 && XEXP (value
, 0) == pic_offset_table_rtx
5899 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5900 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5901 || GET_CODE (XEXP (value
, 1)) == CONST
))
5904 subtarget
= gen_reg_rtx (GET_MODE (value
));
5905 emit_move_insn (subtarget
, value
);
5909 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5912 target
= gen_reg_rtx (GET_MODE (value
));
5913 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5914 code
== ZERO_EXTEND
);
5918 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5920 op2
= XEXP (value
, 1);
5921 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5923 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5926 op2
= negate_rtx (GET_MODE (value
), op2
);
5929 /* Check for an addition with OP2 a constant integer and our first
5930 operand a PLUS of a virtual register and something else. In that
5931 case, we want to emit the sum of the virtual register and the
5932 constant first and then add the other value. This allows virtual
5933 register instantiation to simply modify the constant rather than
5934 creating another one around this addition. */
5935 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5936 && GET_CODE (XEXP (value
, 0)) == PLUS
5937 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5938 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5939 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5941 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5942 XEXP (XEXP (value
, 0), 0), op2
,
5943 subtarget
, 0, OPTAB_LIB_WIDEN
);
5944 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5945 force_operand (XEXP (XEXP (value
,
5947 target
, 0, OPTAB_LIB_WIDEN
);
5950 op1
= force_operand (XEXP (value
, 0), subtarget
);
5951 op2
= force_operand (op2
, NULL_RTX
);
5955 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5957 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5958 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5959 target
, 1, OPTAB_LIB_WIDEN
);
5961 return expand_divmod (0,
5962 FLOAT_MODE_P (GET_MODE (value
))
5963 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5964 GET_MODE (value
), op1
, op2
, target
, 0);
5967 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5971 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5975 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5979 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5980 target
, 0, OPTAB_LIB_WIDEN
);
5983 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5984 target
, 1, OPTAB_LIB_WIDEN
);
5987 if (GET_RTX_CLASS (code
) == '1')
5989 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5990 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5993 #ifdef INSN_SCHEDULING
5994 /* On machines that have insn scheduling, we want all memory reference to be
5995 explicit, so we need to deal with such paradoxical SUBREGs. */
5996 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5997 && (GET_MODE_SIZE (GET_MODE (value
))
5998 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6000 = simplify_gen_subreg (GET_MODE (value
),
6001 force_reg (GET_MODE (SUBREG_REG (value
)),
6002 force_operand (SUBREG_REG (value
),
6004 GET_MODE (SUBREG_REG (value
)),
6005 SUBREG_BYTE (value
));
6011 /* Subroutine of expand_expr: return nonzero iff there is no way that
6012 EXP can reference X, which is being modified. TOP_P is nonzero if this
6013 call is going to be used to determine whether we need a temporary
6014 for EXP, as opposed to a recursive call to this function.
6016 It is always safe for this routine to return zero since it merely
6017 searches for optimization opportunities. */
6020 safe_from_p (x
, exp
, top_p
)
6027 static tree save_expr_list
;
6030 /* If EXP has varying size, we MUST use a target since we currently
6031 have no way of allocating temporaries of variable size
6032 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6033 So we assume here that something at a higher level has prevented a
6034 clash. This is somewhat bogus, but the best we can do. Only
6035 do this when X is BLKmode and when we are at the top level. */
6036 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6037 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6038 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6039 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6040 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6042 && GET_MODE (x
) == BLKmode
)
6043 /* If X is in the outgoing argument area, it is always safe. */
6044 || (GET_CODE (x
) == MEM
6045 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6046 || (GET_CODE (XEXP (x
, 0)) == PLUS
6047 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6050 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6051 find the underlying pseudo. */
6052 if (GET_CODE (x
) == SUBREG
)
6055 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6059 /* A SAVE_EXPR might appear many times in the expression passed to the
6060 top-level safe_from_p call, and if it has a complex subexpression,
6061 examining it multiple times could result in a combinatorial explosion.
6062 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6063 with optimization took about 28 minutes to compile -- even though it was
6064 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6065 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6066 we have processed. Note that the only test of top_p was above. */
6075 rtn
= safe_from_p (x
, exp
, 0);
6077 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
6078 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
6083 /* Now look at our tree code and possibly recurse. */
6084 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6087 exp_rtl
= DECL_RTL_IF_SET (exp
);
6094 if (TREE_CODE (exp
) == TREE_LIST
)
6095 return ((TREE_VALUE (exp
) == 0
6096 || safe_from_p (x
, TREE_VALUE (exp
), 0))
6097 && (TREE_CHAIN (exp
) == 0
6098 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
6099 else if (TREE_CODE (exp
) == ERROR_MARK
)
6100 return 1; /* An already-visited SAVE_EXPR? */
6105 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6109 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
6110 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
6114 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6115 the expression. If it is set, we conflict iff we are that rtx or
6116 both are in memory. Otherwise, we check all operands of the
6117 expression recursively. */
6119 switch (TREE_CODE (exp
))
6122 /* If the operand is static or we are static, we can't conflict.
6123 Likewise if we don't conflict with the operand at all. */
6124 if (staticp (TREE_OPERAND (exp
, 0))
6125 || TREE_STATIC (exp
)
6126 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6129 /* Otherwise, the only way this can conflict is if we are taking
6130 the address of a DECL a that address if part of X, which is
6132 exp
= TREE_OPERAND (exp
, 0);
6135 if (!DECL_RTL_SET_P (exp
)
6136 || GET_CODE (DECL_RTL (exp
)) != MEM
)
6139 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6144 if (GET_CODE (x
) == MEM
6145 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6146 get_alias_set (exp
)))
6151 /* Assume that the call will clobber all hard registers and
6153 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6154 || GET_CODE (x
) == MEM
)
6159 /* If a sequence exists, we would have to scan every instruction
6160 in the sequence to see if it was safe. This is probably not
6162 if (RTL_EXPR_SEQUENCE (exp
))
6165 exp_rtl
= RTL_EXPR_RTL (exp
);
6168 case WITH_CLEANUP_EXPR
:
6169 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6172 case CLEANUP_POINT_EXPR
:
6173 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6176 exp_rtl
= SAVE_EXPR_RTL (exp
);
6180 /* If we've already scanned this, don't do it again. Otherwise,
6181 show we've scanned it and record for clearing the flag if we're
6183 if (TREE_PRIVATE (exp
))
6186 TREE_PRIVATE (exp
) = 1;
6187 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6189 TREE_PRIVATE (exp
) = 0;
6193 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6197 /* The only operand we look at is operand 1. The rest aren't
6198 part of the expression. */
6199 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6201 case METHOD_CALL_EXPR
:
6202 /* This takes an rtx argument, but shouldn't appear here. */
6209 /* If we have an rtx, we do not need to scan our operands. */
6213 nops
= first_rtl_op (TREE_CODE (exp
));
6214 for (i
= 0; i
< nops
; i
++)
6215 if (TREE_OPERAND (exp
, i
) != 0
6216 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6219 /* If this is a language-specific tree code, it may require
6220 special handling. */
6221 if ((unsigned int) TREE_CODE (exp
)
6222 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6223 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
6227 /* If we have an rtl, find any enclosed object. Then see if we conflict
6231 if (GET_CODE (exp_rtl
) == SUBREG
)
6233 exp_rtl
= SUBREG_REG (exp_rtl
);
6234 if (GET_CODE (exp_rtl
) == REG
6235 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6239 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6240 are memory and they conflict. */
6241 return ! (rtx_equal_p (x
, exp_rtl
)
6242 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6243 && true_dependence (exp_rtl
, VOIDmode
, x
,
6244 rtx_addr_varies_p
)));
6247 /* If we reach here, it is safe. */
6251 /* Subroutine of expand_expr: return rtx if EXP is a
6252 variable or parameter; else return 0. */
6259 switch (TREE_CODE (exp
))
6263 return DECL_RTL (exp
);
6269 #ifdef MAX_INTEGER_COMPUTATION_MODE
6272 check_max_integer_computation_mode (exp
)
6275 enum tree_code code
;
6276 enum machine_mode mode
;
6278 /* Strip any NOPs that don't change the mode. */
6280 code
= TREE_CODE (exp
);
6282 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6283 if (code
== NOP_EXPR
6284 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6287 /* First check the type of the overall operation. We need only look at
6288 unary, binary and relational operations. */
6289 if (TREE_CODE_CLASS (code
) == '1'
6290 || TREE_CODE_CLASS (code
) == '2'
6291 || TREE_CODE_CLASS (code
) == '<')
6293 mode
= TYPE_MODE (TREE_TYPE (exp
));
6294 if (GET_MODE_CLASS (mode
) == MODE_INT
6295 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6296 internal_error ("unsupported wide integer operation");
6299 /* Check operand of a unary op. */
6300 if (TREE_CODE_CLASS (code
) == '1')
6302 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6303 if (GET_MODE_CLASS (mode
) == MODE_INT
6304 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6305 internal_error ("unsupported wide integer operation");
6308 /* Check operands of a binary/comparison op. */
6309 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
6311 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6312 if (GET_MODE_CLASS (mode
) == MODE_INT
6313 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6314 internal_error ("unsupported wide integer operation");
6316 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6317 if (GET_MODE_CLASS (mode
) == MODE_INT
6318 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6319 internal_error ("unsupported wide integer operation");
6324 /* Return the highest power of two that EXP is known to be a multiple of.
6325 This is used in updating alignment of MEMs in array references. */
6327 static HOST_WIDE_INT
6328 highest_pow2_factor (exp
)
6331 HOST_WIDE_INT c0
, c1
;
6333 switch (TREE_CODE (exp
))
6336 /* We can find the lowest bit that's a one. If the low
6337 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6338 We need to handle this case since we can find it in a COND_EXPR,
6339 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6340 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6342 if (TREE_CONSTANT_OVERFLOW (exp
))
6343 return BIGGEST_ALIGNMENT
;
6346 /* Note: tree_low_cst is intentionally not used here,
6347 we don't care about the upper bits. */
6348 c0
= TREE_INT_CST_LOW (exp
);
6350 return c0
? c0
: BIGGEST_ALIGNMENT
;
6354 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6355 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6356 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6357 return MIN (c0
, c1
);
6360 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6361 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6364 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6366 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6367 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6369 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6370 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6371 return MAX (1, c0
/ c1
);
6375 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6376 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6377 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6380 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6383 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6384 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6385 return MIN (c0
, c1
);
6394 /* Similar, except that it is known that the expression must be a multiple
6395 of the alignment of TYPE. */
6397 static HOST_WIDE_INT
6398 highest_pow2_factor_for_type (type
, exp
)
6402 HOST_WIDE_INT type_align
, factor
;
6404 factor
= highest_pow2_factor (exp
);
6405 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6406 return MAX (factor
, type_align
);
6409 /* Return an object on the placeholder list that matches EXP, a
6410 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6411 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6412 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6413 is a location which initially points to a starting location in the
6414 placeholder list (zero means start of the list) and where a pointer into
6415 the placeholder list at which the object is found is placed. */
6418 find_placeholder (exp
, plist
)
6422 tree type
= TREE_TYPE (exp
);
6423 tree placeholder_expr
;
6425 for (placeholder_expr
6426 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6427 placeholder_expr
!= 0;
6428 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6430 tree need_type
= TYPE_MAIN_VARIANT (type
);
6433 /* Find the outermost reference that is of the type we want. If none,
6434 see if any object has a type that is a pointer to the type we
6436 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6437 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6438 || TREE_CODE (elt
) == COND_EXPR
)
6439 ? TREE_OPERAND (elt
, 1)
6440 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6441 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6442 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6443 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6444 ? TREE_OPERAND (elt
, 0) : 0))
6445 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6448 *plist
= placeholder_expr
;
6452 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6454 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6455 || TREE_CODE (elt
) == COND_EXPR
)
6456 ? TREE_OPERAND (elt
, 1)
6457 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6458 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6459 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6460 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6461 ? TREE_OPERAND (elt
, 0) : 0))
6462 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6463 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6467 *plist
= placeholder_expr
;
6468 return build1 (INDIRECT_REF
, need_type
, elt
);
6475 /* expand_expr: generate code for computing expression EXP.
6476 An rtx for the computed value is returned. The value is never null.
6477 In the case of a void EXP, const0_rtx is returned.
6479 The value may be stored in TARGET if TARGET is nonzero.
6480 TARGET is just a suggestion; callers must assume that
6481 the rtx returned may not be the same as TARGET.
6483 If TARGET is CONST0_RTX, it means that the value will be ignored.
6485 If TMODE is not VOIDmode, it suggests generating the
6486 result in mode TMODE. But this is done only when convenient.
6487 Otherwise, TMODE is ignored and the value generated in its natural mode.
6488 TMODE is just a suggestion; callers must assume that
6489 the rtx returned may not have mode TMODE.
6491 Note that TARGET may have neither TMODE nor MODE. In that case, it
6492 probably will not be used.
6494 If MODIFIER is EXPAND_SUM then when EXP is an addition
6495 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6496 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6497 products as above, or REG or MEM, or constant.
6498 Ordinarily in such cases we would output mul or add instructions
6499 and then return a pseudo reg containing the sum.
6501 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6502 it also marks a label as absolutely required (it can't be dead).
6503 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6504 This is used for outputting expressions used in initializers.
6506 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6507 with a constant address even if that address is not normally legitimate.
6508 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6510 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6511 a call parameter. Such targets require special care as we haven't yet
6512 marked TARGET so that it's safe from being trashed by libcalls. We
6513 don't want to use TARGET for anything but the final result;
6514 Intermediate values must go elsewhere. Additionally, calls to
6515 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6518 expand_expr (exp
, target
, tmode
, modifier
)
6521 enum machine_mode tmode
;
6522 enum expand_modifier modifier
;
6525 tree type
= TREE_TYPE (exp
);
6526 int unsignedp
= TREE_UNSIGNED (type
);
6527 enum machine_mode mode
;
6528 enum tree_code code
= TREE_CODE (exp
);
6530 rtx subtarget
, original_target
;
6534 /* Handle ERROR_MARK before anybody tries to access its type. */
6535 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6537 op0
= CONST0_RTX (tmode
);
6543 mode
= TYPE_MODE (type
);
6544 /* Use subtarget as the target for operand 0 of a binary operation. */
6545 subtarget
= get_subtarget (target
);
6546 original_target
= target
;
6547 ignore
= (target
== const0_rtx
6548 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6549 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6550 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6551 && TREE_CODE (type
) == VOID_TYPE
));
6553 /* If we are going to ignore this result, we need only do something
6554 if there is a side-effect somewhere in the expression. If there
6555 is, short-circuit the most common cases here. Note that we must
6556 not call expand_expr with anything but const0_rtx in case this
6557 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6561 if (! TREE_SIDE_EFFECTS (exp
))
6564 /* Ensure we reference a volatile object even if value is ignored, but
6565 don't do this if all we are doing is taking its address. */
6566 if (TREE_THIS_VOLATILE (exp
)
6567 && TREE_CODE (exp
) != FUNCTION_DECL
6568 && mode
!= VOIDmode
&& mode
!= BLKmode
6569 && modifier
!= EXPAND_CONST_ADDRESS
)
6571 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6572 if (GET_CODE (temp
) == MEM
)
6573 temp
= copy_to_reg (temp
);
6577 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6578 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6579 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6582 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6583 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6585 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6586 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6589 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6590 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6591 /* If the second operand has no side effects, just evaluate
6593 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6595 else if (code
== BIT_FIELD_REF
)
6597 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6598 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6599 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6606 #ifdef MAX_INTEGER_COMPUTATION_MODE
6607 /* Only check stuff here if the mode we want is different from the mode
6608 of the expression; if it's the same, check_max_integer_computation_mode
6609 will handle it. Do we really need to check this stuff at all? */
6612 && GET_MODE (target
) != mode
6613 && TREE_CODE (exp
) != INTEGER_CST
6614 && TREE_CODE (exp
) != PARM_DECL
6615 && TREE_CODE (exp
) != ARRAY_REF
6616 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6617 && TREE_CODE (exp
) != COMPONENT_REF
6618 && TREE_CODE (exp
) != BIT_FIELD_REF
6619 && TREE_CODE (exp
) != INDIRECT_REF
6620 && TREE_CODE (exp
) != CALL_EXPR
6621 && TREE_CODE (exp
) != VAR_DECL
6622 && TREE_CODE (exp
) != RTL_EXPR
)
6624 enum machine_mode mode
= GET_MODE (target
);
6626 if (GET_MODE_CLASS (mode
) == MODE_INT
6627 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6628 internal_error ("unsupported wide integer operation");
6632 && TREE_CODE (exp
) != INTEGER_CST
6633 && TREE_CODE (exp
) != PARM_DECL
6634 && TREE_CODE (exp
) != ARRAY_REF
6635 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6636 && TREE_CODE (exp
) != COMPONENT_REF
6637 && TREE_CODE (exp
) != BIT_FIELD_REF
6638 && TREE_CODE (exp
) != INDIRECT_REF
6639 && TREE_CODE (exp
) != VAR_DECL
6640 && TREE_CODE (exp
) != CALL_EXPR
6641 && TREE_CODE (exp
) != RTL_EXPR
6642 && GET_MODE_CLASS (tmode
) == MODE_INT
6643 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6644 internal_error ("unsupported wide integer operation");
6646 check_max_integer_computation_mode (exp
);
6649 /* If will do cse, generate all results into pseudo registers
6650 since 1) that allows cse to find more things
6651 and 2) otherwise cse could produce an insn the machine
6652 cannot support. An exception is a CONSTRUCTOR into a multi-word
6653 MEM: that's much more likely to be most efficient into the MEM.
6654 Another is a CALL_EXPR which must return in memory. */
6656 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6657 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6658 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6659 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
)))
6666 tree function
= decl_function_context (exp
);
6667 /* Handle using a label in a containing function. */
6668 if (function
!= current_function_decl
6669 && function
!= inline_function_decl
&& function
!= 0)
6671 struct function
*p
= find_function_data (function
);
6672 p
->expr
->x_forced_labels
6673 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6674 p
->expr
->x_forced_labels
);
6678 if (modifier
== EXPAND_INITIALIZER
)
6679 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6684 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6685 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6686 if (function
!= current_function_decl
6687 && function
!= inline_function_decl
&& function
!= 0)
6688 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6693 if (!DECL_RTL_SET_P (exp
))
6695 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6696 return CONST0_RTX (mode
);
6699 /* ... fall through ... */
6702 /* If a static var's type was incomplete when the decl was written,
6703 but the type is complete now, lay out the decl now. */
6704 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6705 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6707 rtx value
= DECL_RTL_IF_SET (exp
);
6709 layout_decl (exp
, 0);
6711 /* If the RTL was already set, update its mode and memory
6715 PUT_MODE (value
, DECL_MODE (exp
));
6716 SET_DECL_RTL (exp
, 0);
6717 set_mem_attributes (value
, exp
, 1);
6718 SET_DECL_RTL (exp
, value
);
6722 /* ... fall through ... */
6726 if (DECL_RTL (exp
) == 0)
6729 /* Ensure variable marked as used even if it doesn't go through
6730 a parser. If it hasn't be used yet, write out an external
6732 if (! TREE_USED (exp
))
6734 assemble_external (exp
);
6735 TREE_USED (exp
) = 1;
6738 /* Show we haven't gotten RTL for this yet. */
6741 /* Handle variables inherited from containing functions. */
6742 context
= decl_function_context (exp
);
6744 /* We treat inline_function_decl as an alias for the current function
6745 because that is the inline function whose vars, types, etc.
6746 are being merged into the current function.
6747 See expand_inline_function. */
6749 if (context
!= 0 && context
!= current_function_decl
6750 && context
!= inline_function_decl
6751 /* If var is static, we don't need a static chain to access it. */
6752 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6753 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6757 /* Mark as non-local and addressable. */
6758 DECL_NONLOCAL (exp
) = 1;
6759 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6761 (*lang_hooks
.mark_addressable
) (exp
);
6762 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6764 addr
= XEXP (DECL_RTL (exp
), 0);
6765 if (GET_CODE (addr
) == MEM
)
6767 = replace_equiv_address (addr
,
6768 fix_lexical_addr (XEXP (addr
, 0), exp
));
6770 addr
= fix_lexical_addr (addr
, exp
);
6772 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6775 /* This is the case of an array whose size is to be determined
6776 from its initializer, while the initializer is still being parsed.
6779 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6780 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6781 temp
= validize_mem (DECL_RTL (exp
));
6783 /* If DECL_RTL is memory, we are in the normal case and either
6784 the address is not valid or it is not a register and -fforce-addr
6785 is specified, get the address into a register. */
6787 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6788 && modifier
!= EXPAND_CONST_ADDRESS
6789 && modifier
!= EXPAND_SUM
6790 && modifier
!= EXPAND_INITIALIZER
6791 && (! memory_address_p (DECL_MODE (exp
),
6792 XEXP (DECL_RTL (exp
), 0))
6794 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6795 temp
= replace_equiv_address (DECL_RTL (exp
),
6796 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6798 /* If we got something, return it. But first, set the alignment
6799 if the address is a register. */
6802 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6803 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6808 /* If the mode of DECL_RTL does not match that of the decl, it
6809 must be a promoted value. We return a SUBREG of the wanted mode,
6810 but mark it so that we know that it was already extended. */
6812 if (GET_CODE (DECL_RTL (exp
)) == REG
6813 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6815 /* Get the signedness used for this variable. Ensure we get the
6816 same mode we got when the variable was declared. */
6817 if (GET_MODE (DECL_RTL (exp
))
6818 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6819 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6822 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6823 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6824 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6828 return DECL_RTL (exp
);
6831 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6832 TREE_INT_CST_HIGH (exp
), mode
);
6834 /* ??? If overflow is set, fold will have done an incomplete job,
6835 which can result in (plus xx (const_int 0)), which can get
6836 simplified by validate_replace_rtx during virtual register
6837 instantiation, which can result in unrecognizable insns.
6838 Avoid this by forcing all overflows into registers. */
6839 if (TREE_CONSTANT_OVERFLOW (exp
)
6840 && modifier
!= EXPAND_INITIALIZER
)
6841 temp
= force_reg (mode
, temp
);
6846 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6849 /* If optimized, generate immediate CONST_DOUBLE
6850 which will be turned into memory by reload if necessary.
6852 We used to force a register so that loop.c could see it. But
6853 this does not allow gen_* patterns to perform optimizations with
6854 the constants. It also produces two insns in cases like "x = 1.0;".
6855 On most machines, floating-point constants are not permitted in
6856 many insns, so we'd end up copying it to a register in any case.
6858 Now, we do the copying in expand_binop, if appropriate. */
6859 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6860 TYPE_MODE (TREE_TYPE (exp
)));
6864 if (! TREE_CST_RTL (exp
))
6865 output_constant_def (exp
, 1);
6867 /* TREE_CST_RTL probably contains a constant address.
6868 On RISC machines where a constant address isn't valid,
6869 make some insns to get that address into a register. */
6870 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6871 && modifier
!= EXPAND_CONST_ADDRESS
6872 && modifier
!= EXPAND_INITIALIZER
6873 && modifier
!= EXPAND_SUM
6874 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6876 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6877 return replace_equiv_address (TREE_CST_RTL (exp
),
6878 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6879 return TREE_CST_RTL (exp
);
6881 case EXPR_WITH_FILE_LOCATION
:
6884 const char *saved_input_filename
= input_filename
;
6885 int saved_lineno
= lineno
;
6886 input_filename
= EXPR_WFL_FILENAME (exp
);
6887 lineno
= EXPR_WFL_LINENO (exp
);
6888 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6889 emit_line_note (input_filename
, lineno
);
6890 /* Possibly avoid switching back and forth here. */
6891 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6892 input_filename
= saved_input_filename
;
6893 lineno
= saved_lineno
;
6898 context
= decl_function_context (exp
);
6900 /* If this SAVE_EXPR was at global context, assume we are an
6901 initialization function and move it into our context. */
6903 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6905 /* We treat inline_function_decl as an alias for the current function
6906 because that is the inline function whose vars, types, etc.
6907 are being merged into the current function.
6908 See expand_inline_function. */
6909 if (context
== current_function_decl
|| context
== inline_function_decl
)
6912 /* If this is non-local, handle it. */
6915 /* The following call just exists to abort if the context is
6916 not of a containing function. */
6917 find_function_data (context
);
6919 temp
= SAVE_EXPR_RTL (exp
);
6920 if (temp
&& GET_CODE (temp
) == REG
)
6922 put_var_into_stack (exp
, /*rescan=*/true);
6923 temp
= SAVE_EXPR_RTL (exp
);
6925 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6928 replace_equiv_address (temp
,
6929 fix_lexical_addr (XEXP (temp
, 0), exp
));
6931 if (SAVE_EXPR_RTL (exp
) == 0)
6933 if (mode
== VOIDmode
)
6936 temp
= assign_temp (build_qualified_type (type
,
6938 | TYPE_QUAL_CONST
)),
6941 SAVE_EXPR_RTL (exp
) = temp
;
6942 if (!optimize
&& GET_CODE (temp
) == REG
)
6943 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6946 /* If the mode of TEMP does not match that of the expression, it
6947 must be a promoted value. We pass store_expr a SUBREG of the
6948 wanted mode but mark it so that we know that it was already
6951 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6953 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6954 promote_mode (type
, mode
, &unsignedp
, 0);
6955 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6956 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6959 if (temp
== const0_rtx
)
6960 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6962 store_expr (TREE_OPERAND (exp
, 0), temp
,
6963 modifier
== EXPAND_STACK_PARM
? 2 : 0);
6965 TREE_USED (exp
) = 1;
6968 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6969 must be a promoted value. We return a SUBREG of the wanted mode,
6970 but mark it so that we know that it was already extended. */
6972 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6973 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6975 /* Compute the signedness and make the proper SUBREG. */
6976 promote_mode (type
, mode
, &unsignedp
, 0);
6977 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6978 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6979 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6983 return SAVE_EXPR_RTL (exp
);
6988 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6989 TREE_OPERAND (exp
, 0)
6990 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6994 case PLACEHOLDER_EXPR
:
6996 tree old_list
= placeholder_list
;
6997 tree placeholder_expr
= 0;
6999 exp
= find_placeholder (exp
, &placeholder_expr
);
7003 placeholder_list
= TREE_CHAIN (placeholder_expr
);
7004 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
7005 placeholder_list
= old_list
;
7009 case WITH_RECORD_EXPR
:
7010 /* Put the object on the placeholder list, expand our first operand,
7011 and pop the list. */
7012 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
7014 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
7016 placeholder_list
= TREE_CHAIN (placeholder_list
);
7020 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
7021 expand_goto (TREE_OPERAND (exp
, 0));
7023 expand_computed_goto (TREE_OPERAND (exp
, 0));
7027 expand_exit_loop_if_false (NULL
,
7028 invert_truthvalue (TREE_OPERAND (exp
, 0)));
7031 case LABELED_BLOCK_EXPR
:
7032 if (LABELED_BLOCK_BODY (exp
))
7033 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
7034 /* Should perhaps use expand_label, but this is simpler and safer. */
7035 do_pending_stack_adjust ();
7036 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
7039 case EXIT_BLOCK_EXPR
:
7040 if (EXIT_BLOCK_RETURN (exp
))
7041 sorry ("returned value in block_exit_expr");
7042 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
7047 expand_start_loop (1);
7048 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
7056 tree vars
= TREE_OPERAND (exp
, 0);
7058 /* Need to open a binding contour here because
7059 if there are any cleanups they must be contained here. */
7060 expand_start_bindings (2);
7062 /* Mark the corresponding BLOCK for output in its proper place. */
7063 if (TREE_OPERAND (exp
, 2) != 0
7064 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
7065 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
7067 /* If VARS have not yet been expanded, expand them now. */
7070 if (!DECL_RTL_SET_P (vars
))
7072 expand_decl_init (vars
);
7073 vars
= TREE_CHAIN (vars
);
7076 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
7078 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7084 if (RTL_EXPR_SEQUENCE (exp
))
7086 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
7088 emit_insn (RTL_EXPR_SEQUENCE (exp
));
7089 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
7091 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
7092 free_temps_for_rtl_expr (exp
);
7093 return RTL_EXPR_RTL (exp
);
7096 /* If we don't need the result, just ensure we evaluate any
7102 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
7103 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
7108 /* All elts simple constants => refer to a constant in memory. But
7109 if this is a non-BLKmode mode, let it store a field at a time
7110 since that should make a CONST_INT or CONST_DOUBLE when we
7111 fold. Likewise, if we have a target we can use, it is best to
7112 store directly into the target unless the type is large enough
7113 that memcpy will be used. If we are making an initializer and
7114 all operands are constant, put it in memory as well.
7116 FIXME: Avoid trying to fill vector constructors piece-meal.
7117 Output them with output_constant_def below unless we're sure
7118 they're zeros. This should go away when vector initializers
7119 are treated like VECTOR_CST instead of arrays.
7121 else if ((TREE_STATIC (exp
)
7122 && ((mode
== BLKmode
7123 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7124 || TREE_ADDRESSABLE (exp
)
7125 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7126 && (! MOVE_BY_PIECES_P
7127 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7129 && ((TREE_CODE (type
) == VECTOR_TYPE
7130 && !is_zeros_p (exp
))
7131 || ! mostly_zeros_p (exp
)))))
7132 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
7134 rtx constructor
= output_constant_def (exp
, 1);
7136 if (modifier
!= EXPAND_CONST_ADDRESS
7137 && modifier
!= EXPAND_INITIALIZER
7138 && modifier
!= EXPAND_SUM
)
7139 constructor
= validize_mem (constructor
);
7145 /* Handle calls that pass values in multiple non-contiguous
7146 locations. The Irix 6 ABI has examples of this. */
7147 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7148 || GET_CODE (target
) == PARALLEL
7149 || modifier
== EXPAND_STACK_PARM
)
7151 = assign_temp (build_qualified_type (type
,
7153 | (TREE_READONLY (exp
)
7154 * TYPE_QUAL_CONST
))),
7155 0, TREE_ADDRESSABLE (exp
), 1);
7157 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7163 tree exp1
= TREE_OPERAND (exp
, 0);
7165 tree string
= string_constant (exp1
, &index
);
7167 /* Try to optimize reads from const strings. */
7169 && TREE_CODE (string
) == STRING_CST
7170 && TREE_CODE (index
) == INTEGER_CST
7171 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
7172 && GET_MODE_CLASS (mode
) == MODE_INT
7173 && GET_MODE_SIZE (mode
) == 1
7174 && modifier
!= EXPAND_WRITE
)
7175 return gen_int_mode (TREE_STRING_POINTER (string
)
7176 [TREE_INT_CST_LOW (index
)], mode
);
7178 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7179 op0
= memory_address (mode
, op0
);
7180 temp
= gen_rtx_MEM (mode
, op0
);
7181 set_mem_attributes (temp
, exp
, 0);
7183 /* If we are writing to this object and its type is a record with
7184 readonly fields, we must mark it as readonly so it will
7185 conflict with readonly references to those fields. */
7186 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
7187 RTX_UNCHANGING_P (temp
) = 1;
7193 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7197 tree array
= TREE_OPERAND (exp
, 0);
7198 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7199 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7200 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7203 /* Optimize the special-case of a zero lower bound.
7205 We convert the low_bound to sizetype to avoid some problems
7206 with constant folding. (E.g. suppose the lower bound is 1,
7207 and its mode is QI. Without the conversion, (ARRAY
7208 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7209 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7211 if (! integer_zerop (low_bound
))
7212 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7214 /* Fold an expression like: "foo"[2].
7215 This is not done in fold so it won't happen inside &.
7216 Don't fold if this is for wide characters since it's too
7217 difficult to do correctly and this is a very rare case. */
7219 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7220 && TREE_CODE (array
) == STRING_CST
7221 && TREE_CODE (index
) == INTEGER_CST
7222 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
7223 && GET_MODE_CLASS (mode
) == MODE_INT
7224 && GET_MODE_SIZE (mode
) == 1)
7225 return gen_int_mode (TREE_STRING_POINTER (array
)
7226 [TREE_INT_CST_LOW (index
)], mode
);
7228 /* If this is a constant index into a constant array,
7229 just get the value from the array. Handle both the cases when
7230 we have an explicit constructor and when our operand is a variable
7231 that was declared const. */
7233 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
7234 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
7235 && TREE_CODE (index
) == INTEGER_CST
7236 && 0 > compare_tree_int (index
,
7237 list_length (CONSTRUCTOR_ELTS
7238 (TREE_OPERAND (exp
, 0)))))
7242 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7243 i
= TREE_INT_CST_LOW (index
);
7244 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7248 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7252 else if (optimize
>= 1
7253 && modifier
!= EXPAND_CONST_ADDRESS
7254 && modifier
!= EXPAND_INITIALIZER
7255 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7256 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7257 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
7259 if (TREE_CODE (index
) == INTEGER_CST
)
7261 tree init
= DECL_INITIAL (array
);
7263 if (TREE_CODE (init
) == CONSTRUCTOR
)
7267 for (elem
= CONSTRUCTOR_ELTS (init
);
7269 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7270 elem
= TREE_CHAIN (elem
))
7273 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7274 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7277 else if (TREE_CODE (init
) == STRING_CST
7278 && 0 > compare_tree_int (index
,
7279 TREE_STRING_LENGTH (init
)))
7281 tree type
= TREE_TYPE (TREE_TYPE (init
));
7282 enum machine_mode mode
= TYPE_MODE (type
);
7284 if (GET_MODE_CLASS (mode
) == MODE_INT
7285 && GET_MODE_SIZE (mode
) == 1)
7286 return gen_int_mode (TREE_STRING_POINTER (init
)
7287 [TREE_INT_CST_LOW (index
)], mode
);
7296 case ARRAY_RANGE_REF
:
7297 /* If the operand is a CONSTRUCTOR, we can just extract the
7298 appropriate field if it is present. Don't do this if we have
7299 already written the data since we want to refer to that copy
7300 and varasm.c assumes that's what we'll do. */
7301 if (code
== COMPONENT_REF
7302 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
7303 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
7307 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7308 elt
= TREE_CHAIN (elt
))
7309 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7310 /* We can normally use the value of the field in the
7311 CONSTRUCTOR. However, if this is a bitfield in
7312 an integral mode that we can fit in a HOST_WIDE_INT,
7313 we must mask only the number of bits in the bitfield,
7314 since this is done implicitly by the constructor. If
7315 the bitfield does not meet either of those conditions,
7316 we can't do this optimization. */
7317 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7318 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7320 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7321 <= HOST_BITS_PER_WIDE_INT
))))
7323 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7324 && modifier
== EXPAND_STACK_PARM
)
7326 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7327 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7329 HOST_WIDE_INT bitsize
7330 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7331 enum machine_mode imode
7332 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7334 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7336 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7337 op0
= expand_and (imode
, op0
, op1
, target
);
7342 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7345 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7347 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7357 enum machine_mode mode1
;
7358 HOST_WIDE_INT bitsize
, bitpos
;
7361 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7362 &mode1
, &unsignedp
, &volatilep
);
7365 /* If we got back the original object, something is wrong. Perhaps
7366 we are evaluating an expression too early. In any event, don't
7367 infinitely recurse. */
7371 /* If TEM's type is a union of variable size, pass TARGET to the inner
7372 computation, since it will need a temporary and TARGET is known
7373 to have to do. This occurs in unchecked conversion in Ada. */
7377 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7378 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7380 && modifier
!= EXPAND_STACK_PARM
7381 ? target
: NULL_RTX
),
7383 (modifier
== EXPAND_INITIALIZER
7384 || modifier
== EXPAND_CONST_ADDRESS
7385 || modifier
== EXPAND_STACK_PARM
)
7386 ? modifier
: EXPAND_NORMAL
);
7388 /* If this is a constant, put it into a register if it is a
7389 legitimate constant and OFFSET is 0 and memory if it isn't. */
7390 if (CONSTANT_P (op0
))
7392 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7393 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7395 op0
= force_reg (mode
, op0
);
7397 op0
= validize_mem (force_const_mem (mode
, op0
));
7402 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7405 /* If this object is in a register, put it into memory.
7406 This case can't occur in C, but can in Ada if we have
7407 unchecked conversion of an expression from a scalar type to
7408 an array or record type. */
7409 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7410 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7412 /* If the operand is a SAVE_EXPR, we can deal with this by
7413 forcing the SAVE_EXPR into memory. */
7414 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7416 put_var_into_stack (TREE_OPERAND (exp
, 0),
7418 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7423 = build_qualified_type (TREE_TYPE (tem
),
7424 (TYPE_QUALS (TREE_TYPE (tem
))
7425 | TYPE_QUAL_CONST
));
7426 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7428 emit_move_insn (memloc
, op0
);
7433 if (GET_CODE (op0
) != MEM
)
7436 #ifdef POINTERS_EXTEND_UNSIGNED
7437 if (GET_MODE (offset_rtx
) != Pmode
)
7438 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7440 if (GET_MODE (offset_rtx
) != ptr_mode
)
7441 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7444 /* A constant address in OP0 can have VOIDmode, we must not try
7445 to call force_reg for that case. Avoid that case. */
7446 if (GET_CODE (op0
) == MEM
7447 && GET_MODE (op0
) == BLKmode
7448 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7450 && (bitpos
% bitsize
) == 0
7451 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7452 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7454 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7458 op0
= offset_address (op0
, offset_rtx
,
7459 highest_pow2_factor (offset
));
7462 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7463 record its alignment as BIGGEST_ALIGNMENT. */
7464 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7465 && is_aligning_offset (offset
, tem
))
7466 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7468 /* Don't forget about volatility even if this is a bitfield. */
7469 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7471 if (op0
== orig_op0
)
7472 op0
= copy_rtx (op0
);
7474 MEM_VOLATILE_P (op0
) = 1;
7477 /* The following code doesn't handle CONCAT.
7478 Assume only bitpos == 0 can be used for CONCAT, due to
7479 one element arrays having the same mode as its element. */
7480 if (GET_CODE (op0
) == CONCAT
)
7482 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7487 /* In cases where an aligned union has an unaligned object
7488 as a field, we might be extracting a BLKmode value from
7489 an integer-mode (e.g., SImode) object. Handle this case
7490 by doing the extract into an object as wide as the field
7491 (which we know to be the width of a basic mode), then
7492 storing into memory, and changing the mode to BLKmode. */
7493 if (mode1
== VOIDmode
7494 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7495 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7496 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7497 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7498 && modifier
!= EXPAND_CONST_ADDRESS
7499 && modifier
!= EXPAND_INITIALIZER
)
7500 /* If the field isn't aligned enough to fetch as a memref,
7501 fetch it as a bit field. */
7502 || (mode1
!= BLKmode
7503 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
7504 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7505 < GET_MODE_ALIGNMENT (mode
))
7506 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7507 /* If the type and the field are a constant size and the
7508 size of the type isn't the same size as the bitfield,
7509 we must use bitfield operations. */
7511 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7513 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7516 enum machine_mode ext_mode
= mode
;
7518 if (ext_mode
== BLKmode
7519 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7520 && GET_CODE (target
) == MEM
7521 && bitpos
% BITS_PER_UNIT
== 0))
7522 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7524 if (ext_mode
== BLKmode
)
7526 /* In this case, BITPOS must start at a byte boundary and
7527 TARGET, if specified, must be a MEM. */
7528 if (GET_CODE (op0
) != MEM
7529 || (target
!= 0 && GET_CODE (target
) != MEM
)
7530 || bitpos
% BITS_PER_UNIT
!= 0)
7533 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7535 target
= assign_temp (type
, 0, 1, 1);
7537 emit_block_move (target
, op0
,
7538 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7540 (modifier
== EXPAND_STACK_PARM
7541 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7546 op0
= validize_mem (op0
);
7548 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7549 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7551 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7552 (modifier
== EXPAND_STACK_PARM
7553 ? NULL_RTX
: target
),
7555 int_size_in_bytes (TREE_TYPE (tem
)));
7557 /* If the result is a record type and BITSIZE is narrower than
7558 the mode of OP0, an integral mode, and this is a big endian
7559 machine, we must put the field into the high-order bits. */
7560 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7561 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7562 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7563 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7564 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7568 if (mode
== BLKmode
)
7570 rtx
new = assign_temp (build_qualified_type
7571 ((*lang_hooks
.types
.type_for_mode
)
7573 TYPE_QUAL_CONST
), 0, 1, 1);
7575 emit_move_insn (new, op0
);
7576 op0
= copy_rtx (new);
7577 PUT_MODE (op0
, BLKmode
);
7578 set_mem_attributes (op0
, exp
, 1);
7584 /* If the result is BLKmode, use that to access the object
7586 if (mode
== BLKmode
)
7589 /* Get a reference to just this component. */
7590 if (modifier
== EXPAND_CONST_ADDRESS
7591 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7592 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7594 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7596 if (op0
== orig_op0
)
7597 op0
= copy_rtx (op0
);
7599 set_mem_attributes (op0
, exp
, 0);
7600 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7601 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7603 MEM_VOLATILE_P (op0
) |= volatilep
;
7604 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7605 || modifier
== EXPAND_CONST_ADDRESS
7606 || modifier
== EXPAND_INITIALIZER
)
7608 else if (target
== 0)
7609 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7611 convert_move (target
, op0
, unsignedp
);
7617 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7619 /* Evaluate the interior expression. */
7620 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7623 /* Get or create an instruction off which to hang a note. */
7624 if (REG_P (subtarget
))
7627 insn
= get_last_insn ();
7630 if (! INSN_P (insn
))
7631 insn
= prev_nonnote_insn (insn
);
7635 target
= gen_reg_rtx (GET_MODE (subtarget
));
7636 insn
= emit_move_insn (target
, subtarget
);
7639 /* Collect the data for the note. */
7640 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7641 vtbl_ref
= plus_constant (vtbl_ref
,
7642 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7643 /* Discard the initial CONST that was added. */
7644 vtbl_ref
= XEXP (vtbl_ref
, 0);
7647 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7652 /* Intended for a reference to a buffer of a file-object in Pascal.
7653 But it's not certain that a special tree code will really be
7654 necessary for these. INDIRECT_REF might work for them. */
7660 /* Pascal set IN expression.
7663 rlo = set_low - (set_low%bits_per_word);
7664 the_word = set [ (index - rlo)/bits_per_word ];
7665 bit_index = index % bits_per_word;
7666 bitmask = 1 << bit_index;
7667 return !!(the_word & bitmask); */
7669 tree set
= TREE_OPERAND (exp
, 0);
7670 tree index
= TREE_OPERAND (exp
, 1);
7671 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7672 tree set_type
= TREE_TYPE (set
);
7673 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7674 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7675 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7676 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7677 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7678 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7679 rtx setaddr
= XEXP (setval
, 0);
7680 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7682 rtx diff
, quo
, rem
, addr
, bit
, result
;
7684 /* If domain is empty, answer is no. Likewise if index is constant
7685 and out of bounds. */
7686 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7687 && TREE_CODE (set_low_bound
) == INTEGER_CST
7688 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7689 || (TREE_CODE (index
) == INTEGER_CST
7690 && TREE_CODE (set_low_bound
) == INTEGER_CST
7691 && tree_int_cst_lt (index
, set_low_bound
))
7692 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7693 && TREE_CODE (index
) == INTEGER_CST
7694 && tree_int_cst_lt (set_high_bound
, index
))))
7698 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7700 /* If we get here, we have to generate the code for both cases
7701 (in range and out of range). */
7703 op0
= gen_label_rtx ();
7704 op1
= gen_label_rtx ();
7706 if (! (GET_CODE (index_val
) == CONST_INT
7707 && GET_CODE (lo_r
) == CONST_INT
))
7708 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7709 GET_MODE (index_val
), iunsignedp
, op1
);
7711 if (! (GET_CODE (index_val
) == CONST_INT
7712 && GET_CODE (hi_r
) == CONST_INT
))
7713 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7714 GET_MODE (index_val
), iunsignedp
, op1
);
7716 /* Calculate the element number of bit zero in the first word
7718 if (GET_CODE (lo_r
) == CONST_INT
)
7719 rlow
= GEN_INT (INTVAL (lo_r
)
7720 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7722 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7723 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7724 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7726 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7727 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7729 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7730 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7731 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7732 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7734 addr
= memory_address (byte_mode
,
7735 expand_binop (index_mode
, add_optab
, diff
,
7736 setaddr
, NULL_RTX
, iunsignedp
,
7739 /* Extract the bit we want to examine. */
7740 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7741 gen_rtx_MEM (byte_mode
, addr
),
7742 make_tree (TREE_TYPE (index
), rem
),
7744 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7745 GET_MODE (target
) == byte_mode
? target
: 0,
7746 1, OPTAB_LIB_WIDEN
);
7748 if (result
!= target
)
7749 convert_move (target
, result
, 1);
7751 /* Output the code to handle the out-of-range case. */
7754 emit_move_insn (target
, const0_rtx
);
7759 case WITH_CLEANUP_EXPR
:
7760 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7762 WITH_CLEANUP_EXPR_RTL (exp
)
7763 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7764 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7765 CLEANUP_EH_ONLY (exp
));
7767 /* That's it for this cleanup. */
7768 TREE_OPERAND (exp
, 1) = 0;
7770 return WITH_CLEANUP_EXPR_RTL (exp
);
7772 case CLEANUP_POINT_EXPR
:
7774 /* Start a new binding layer that will keep track of all cleanup
7775 actions to be performed. */
7776 expand_start_bindings (2);
7778 target_temp_slot_level
= temp_slot_level
;
7780 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7781 /* If we're going to use this value, load it up now. */
7783 op0
= force_not_mem (op0
);
7784 preserve_temp_slots (op0
);
7785 expand_end_bindings (NULL_TREE
, 0, 0);
7790 /* Check for a built-in function. */
7791 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7792 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7794 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7796 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7797 == BUILT_IN_FRONTEND
)
7798 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7801 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7804 return expand_call (exp
, target
, ignore
);
7806 case NON_LVALUE_EXPR
:
7809 case REFERENCE_EXPR
:
7810 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7813 if (TREE_CODE (type
) == UNION_TYPE
)
7815 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7817 /* If both input and output are BLKmode, this conversion isn't doing
7818 anything except possibly changing memory attribute. */
7819 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7821 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7824 result
= copy_rtx (result
);
7825 set_mem_attributes (result
, exp
, 0);
7830 target
= assign_temp (type
, 0, 1, 1);
7832 if (GET_CODE (target
) == MEM
)
7833 /* Store data into beginning of memory target. */
7834 store_expr (TREE_OPERAND (exp
, 0),
7835 adjust_address (target
, TYPE_MODE (valtype
), 0),
7836 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7838 else if (GET_CODE (target
) == REG
)
7839 /* Store this field into a union of the proper type. */
7840 store_field (target
,
7841 MIN ((int_size_in_bytes (TREE_TYPE
7842 (TREE_OPERAND (exp
, 0)))
7844 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7845 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7846 VOIDmode
, 0, type
, 0);
7850 /* Return the entire union. */
7854 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7856 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7859 /* If the signedness of the conversion differs and OP0 is
7860 a promoted SUBREG, clear that indication since we now
7861 have to do the proper extension. */
7862 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7863 && GET_CODE (op0
) == SUBREG
)
7864 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7869 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7870 if (GET_MODE (op0
) == mode
)
7873 /* If OP0 is a constant, just convert it into the proper mode. */
7874 if (CONSTANT_P (op0
))
7876 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7877 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7879 if (modifier
== EXPAND_INITIALIZER
)
7880 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7881 subreg_lowpart_offset (mode
,
7884 return convert_modes (mode
, inner_mode
, op0
,
7885 TREE_UNSIGNED (inner_type
));
7888 if (modifier
== EXPAND_INITIALIZER
)
7889 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7893 convert_to_mode (mode
, op0
,
7894 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7896 convert_move (target
, op0
,
7897 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7900 case VIEW_CONVERT_EXPR
:
7901 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7903 /* If the input and output modes are both the same, we are done.
7904 Otherwise, if neither mode is BLKmode and both are within a word, we
7905 can use gen_lowpart. If neither is true, make sure the operand is
7906 in memory and convert the MEM to the new mode. */
7907 if (TYPE_MODE (type
) == GET_MODE (op0
))
7909 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7910 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7911 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7912 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7913 else if (GET_CODE (op0
) != MEM
)
7915 /* If the operand is not a MEM, force it into memory. Since we
7916 are going to be be changing the mode of the MEM, don't call
7917 force_const_mem for constants because we don't allow pool
7918 constants to change mode. */
7919 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7921 if (TREE_ADDRESSABLE (exp
))
7924 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7926 = assign_stack_temp_for_type
7927 (TYPE_MODE (inner_type
),
7928 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7930 emit_move_insn (target
, op0
);
7934 /* At this point, OP0 is in the correct mode. If the output type is such
7935 that the operand is known to be aligned, indicate that it is.
7936 Otherwise, we need only be concerned about alignment for non-BLKmode
7938 if (GET_CODE (op0
) == MEM
)
7940 op0
= copy_rtx (op0
);
7942 if (TYPE_ALIGN_OK (type
))
7943 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7944 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7945 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7947 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7948 HOST_WIDE_INT temp_size
7949 = MAX (int_size_in_bytes (inner_type
),
7950 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7951 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7952 temp_size
, 0, type
);
7953 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7955 if (TREE_ADDRESSABLE (exp
))
7958 if (GET_MODE (op0
) == BLKmode
)
7959 emit_block_move (new_with_op0_mode
, op0
,
7960 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7961 (modifier
== EXPAND_STACK_PARM
7962 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7964 emit_move_insn (new_with_op0_mode
, op0
);
7969 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7975 this_optab
= ! unsignedp
&& flag_trapv
7976 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7977 ? addv_optab
: add_optab
;
7979 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7980 something else, make sure we add the register to the constant and
7981 then to the other thing. This case can occur during strength
7982 reduction and doing it this way will produce better code if the
7983 frame pointer or argument pointer is eliminated.
7985 fold-const.c will ensure that the constant is always in the inner
7986 PLUS_EXPR, so the only case we need to do anything about is if
7987 sp, ap, or fp is our second argument, in which case we must swap
7988 the innermost first argument and our second argument. */
7990 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7991 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7992 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7993 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7994 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7995 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7997 tree t
= TREE_OPERAND (exp
, 1);
7999 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8000 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
8003 /* If the result is to be ptr_mode and we are adding an integer to
8004 something, we might be forming a constant. So try to use
8005 plus_constant. If it produces a sum and we can't accept it,
8006 use force_operand. This allows P = &ARR[const] to generate
8007 efficient code on machines where a SYMBOL_REF is not a valid
8010 If this is an EXPAND_SUM call, always return the sum. */
8011 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8012 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8014 if (modifier
== EXPAND_STACK_PARM
)
8016 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
8017 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8018 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
8022 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
8024 /* Use immed_double_const to ensure that the constant is
8025 truncated according to the mode of OP1, then sign extended
8026 to a HOST_WIDE_INT. Using the constant directly can result
8027 in non-canonical RTL in a 64x32 cross compile. */
8029 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
8031 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8032 op1
= plus_constant (op1
, INTVAL (constant_part
));
8033 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8034 op1
= force_operand (op1
, target
);
8038 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8039 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
8040 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
8044 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8045 (modifier
== EXPAND_INITIALIZER
8046 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8047 if (! CONSTANT_P (op0
))
8049 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8050 VOIDmode
, modifier
);
8051 /* Don't go to both_summands if modifier
8052 says it's not right to return a PLUS. */
8053 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8057 /* Use immed_double_const to ensure that the constant is
8058 truncated according to the mode of OP1, then sign extended
8059 to a HOST_WIDE_INT. Using the constant directly can result
8060 in non-canonical RTL in a 64x32 cross compile. */
8062 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
8064 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8065 op0
= plus_constant (op0
, INTVAL (constant_part
));
8066 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8067 op0
= force_operand (op0
, target
);
8072 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8075 /* No sense saving up arithmetic to be done
8076 if it's all in the wrong mode to form part of an address.
8077 And force_operand won't know whether to sign-extend or
8079 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8080 || mode
!= ptr_mode
)
8082 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8083 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8084 if (op0
== const0_rtx
)
8086 if (op1
== const0_rtx
)
8091 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8092 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8094 /* We come here from MINUS_EXPR when the second operand is a
8097 /* Make sure any term that's a sum with a constant comes last. */
8098 if (GET_CODE (op0
) == PLUS
8099 && CONSTANT_P (XEXP (op0
, 1)))
8105 /* If adding to a sum including a constant,
8106 associate it to put the constant outside. */
8107 if (GET_CODE (op1
) == PLUS
8108 && CONSTANT_P (XEXP (op1
, 1)))
8110 rtx constant_term
= const0_rtx
;
8112 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
8115 /* Ensure that MULT comes first if there is one. */
8116 else if (GET_CODE (op0
) == MULT
)
8117 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
8119 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
8121 /* Let's also eliminate constants from op0 if possible. */
8122 op0
= eliminate_constant_term (op0
, &constant_term
);
8124 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8125 their sum should be a constant. Form it into OP1, since the
8126 result we want will then be OP0 + OP1. */
8128 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
8133 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
8136 /* Put a constant term last and put a multiplication first. */
8137 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
8138 temp
= op1
, op1
= op0
, op0
= temp
;
8140 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
8141 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
8144 /* For initializers, we are allowed to return a MINUS of two
8145 symbolic constants. Here we handle all cases when both operands
8147 /* Handle difference of two symbolic constants,
8148 for the sake of an initializer. */
8149 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8150 && really_constant_p (TREE_OPERAND (exp
, 0))
8151 && really_constant_p (TREE_OPERAND (exp
, 1)))
8153 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
8155 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8158 /* If the last operand is a CONST_INT, use plus_constant of
8159 the negated constant. Else make the MINUS. */
8160 if (GET_CODE (op1
) == CONST_INT
)
8161 return plus_constant (op0
, - INTVAL (op1
));
8163 return gen_rtx_MINUS (mode
, op0
, op1
);
8166 this_optab
= ! unsignedp
&& flag_trapv
8167 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8168 ? subv_optab
: sub_optab
;
8170 /* No sense saving up arithmetic to be done
8171 if it's all in the wrong mode to form part of an address.
8172 And force_operand won't know whether to sign-extend or
8174 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8175 || mode
!= ptr_mode
)
8178 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8181 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8182 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8184 /* Convert A - const to A + (-const). */
8185 if (GET_CODE (op1
) == CONST_INT
)
8187 op1
= negate_rtx (mode
, op1
);
8194 /* If first operand is constant, swap them.
8195 Thus the following special case checks need only
8196 check the second operand. */
8197 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8199 tree t1
= TREE_OPERAND (exp
, 0);
8200 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8201 TREE_OPERAND (exp
, 1) = t1
;
8204 /* Attempt to return something suitable for generating an
8205 indexed address, for machines that support that. */
8207 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8208 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8210 tree exp1
= TREE_OPERAND (exp
, 1);
8212 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8215 /* If we knew for certain that this is arithmetic for an array
8216 reference, and we knew the bounds of the array, then we could
8217 apply the distributive law across (PLUS X C) for constant C.
8218 Without such knowledge, we risk overflowing the computation
8219 when both X and C are large, but X+C isn't. */
8220 /* ??? Could perhaps special-case EXP being unsigned and C being
8221 positive. In that case we are certain that X+C is no smaller
8222 than X and so the transformed expression will overflow iff the
8223 original would have. */
8225 if (GET_CODE (op0
) != REG
)
8226 op0
= force_operand (op0
, NULL_RTX
);
8227 if (GET_CODE (op0
) != REG
)
8228 op0
= copy_to_mode_reg (mode
, op0
);
8230 return gen_rtx_MULT (mode
, op0
,
8231 gen_int_mode (tree_low_cst (exp1
, 0),
8232 TYPE_MODE (TREE_TYPE (exp1
))));
8235 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8238 if (modifier
== EXPAND_STACK_PARM
)
8241 /* Check for multiplying things that have been extended
8242 from a narrower type. If this machine supports multiplying
8243 in that narrower type with a result in the desired type,
8244 do it that way, and avoid the explicit type-conversion. */
8245 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8246 && TREE_CODE (type
) == INTEGER_TYPE
8247 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8248 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8249 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8250 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8251 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8252 /* Don't use a widening multiply if a shift will do. */
8253 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8254 > HOST_BITS_PER_WIDE_INT
)
8255 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8257 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8258 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8260 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8261 /* If both operands are extended, they must either both
8262 be zero-extended or both be sign-extended. */
8263 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8265 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
8267 enum machine_mode innermode
8268 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
8269 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8270 ? smul_widen_optab
: umul_widen_optab
);
8271 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8272 ? umul_widen_optab
: smul_widen_optab
);
8273 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8275 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8277 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8278 NULL_RTX
, VOIDmode
, 0);
8279 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8280 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8283 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8284 NULL_RTX
, VOIDmode
, 0);
8287 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8288 && innermode
== word_mode
)
8291 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8292 NULL_RTX
, VOIDmode
, 0);
8293 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8294 op1
= convert_modes (innermode
, mode
,
8295 expand_expr (TREE_OPERAND (exp
, 1),
8296 NULL_RTX
, VOIDmode
, 0),
8299 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8300 NULL_RTX
, VOIDmode
, 0);
8301 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8302 unsignedp
, OPTAB_LIB_WIDEN
);
8303 htem
= expand_mult_highpart_adjust (innermode
,
8304 gen_highpart (innermode
, temp
),
8306 gen_highpart (innermode
, temp
),
8308 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
8313 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8314 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8315 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8317 case TRUNC_DIV_EXPR
:
8318 case FLOOR_DIV_EXPR
:
8320 case ROUND_DIV_EXPR
:
8321 case EXACT_DIV_EXPR
:
8322 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8324 if (modifier
== EXPAND_STACK_PARM
)
8326 /* Possible optimization: compute the dividend with EXPAND_SUM
8327 then if the divisor is constant can optimize the case
8328 where some terms of the dividend have coeffs divisible by it. */
8329 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8330 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8331 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8334 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8335 expensive divide. If not, combine will rebuild the original
8337 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8338 && TREE_CODE (type
) == REAL_TYPE
8339 && !real_onep (TREE_OPERAND (exp
, 0)))
8340 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8341 build (RDIV_EXPR
, type
,
8342 build_real (type
, dconst1
),
8343 TREE_OPERAND (exp
, 1))),
8344 target
, tmode
, modifier
);
8345 this_optab
= sdiv_optab
;
8348 case TRUNC_MOD_EXPR
:
8349 case FLOOR_MOD_EXPR
:
8351 case ROUND_MOD_EXPR
:
8352 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8354 if (modifier
== EXPAND_STACK_PARM
)
8356 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8357 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8358 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8360 case FIX_ROUND_EXPR
:
8361 case FIX_FLOOR_EXPR
:
8363 abort (); /* Not used for C. */
8365 case FIX_TRUNC_EXPR
:
8366 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8367 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8368 target
= gen_reg_rtx (mode
);
8369 expand_fix (target
, op0
, unsignedp
);
8373 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8374 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8375 target
= gen_reg_rtx (mode
);
8376 /* expand_float can't figure out what to do if FROM has VOIDmode.
8377 So give it the correct mode. With -O, cse will optimize this. */
8378 if (GET_MODE (op0
) == VOIDmode
)
8379 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8381 expand_float (target
, op0
,
8382 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8386 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8387 if (modifier
== EXPAND_STACK_PARM
)
8389 temp
= expand_unop (mode
,
8390 ! unsignedp
&& flag_trapv
8391 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8392 ? negv_optab
: neg_optab
, op0
, target
, 0);
8398 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8399 if (modifier
== EXPAND_STACK_PARM
)
8402 /* Handle complex values specially. */
8403 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8404 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8405 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8407 /* Unsigned abs is simply the operand. Testing here means we don't
8408 risk generating incorrect code below. */
8409 if (TREE_UNSIGNED (type
))
8412 return expand_abs (mode
, op0
, target
, unsignedp
,
8413 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8417 target
= original_target
;
8419 || modifier
== EXPAND_STACK_PARM
8420 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8421 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8422 || GET_MODE (target
) != mode
8423 || (GET_CODE (target
) == REG
8424 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8425 target
= gen_reg_rtx (mode
);
8426 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8427 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8429 /* First try to do it with a special MIN or MAX instruction.
8430 If that does not win, use a conditional jump to select the proper
8432 this_optab
= (TREE_UNSIGNED (type
)
8433 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8434 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8436 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8441 /* At this point, a MEM target is no longer useful; we will get better
8444 if (GET_CODE (target
) == MEM
)
8445 target
= gen_reg_rtx (mode
);
8448 emit_move_insn (target
, op0
);
8450 op0
= gen_label_rtx ();
8452 /* If this mode is an integer too wide to compare properly,
8453 compare word by word. Rely on cse to optimize constant cases. */
8454 if (GET_MODE_CLASS (mode
) == MODE_INT
8455 && ! can_compare_p (GE
, mode
, ccp_jump
))
8457 if (code
== MAX_EXPR
)
8458 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8459 target
, op1
, NULL_RTX
, op0
);
8461 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8462 op1
, target
, NULL_RTX
, op0
);
8466 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8467 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8468 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8471 emit_move_insn (target
, op1
);
8476 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8477 if (modifier
== EXPAND_STACK_PARM
)
8479 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8485 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8486 if (modifier
== EXPAND_STACK_PARM
)
8488 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8494 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8495 temp
= expand_unop (mode
, clz_optab
, op0
, target
, 1);
8501 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8502 temp
= expand_unop (mode
, ctz_optab
, op0
, target
, 1);
8508 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8509 temp
= expand_unop (mode
, popcount_optab
, op0
, target
, 1);
8515 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8516 temp
= expand_unop (mode
, parity_optab
, op0
, target
, 1);
8521 /* ??? Can optimize bitwise operations with one arg constant.
8522 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8523 and (a bitwise1 b) bitwise2 b (etc)
8524 but that is probably not worth while. */
8526 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8527 boolean values when we want in all cases to compute both of them. In
8528 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8529 as actual zero-or-1 values and then bitwise anding. In cases where
8530 there cannot be any side effects, better code would be made by
8531 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8532 how to recognize those cases. */
8534 case TRUTH_AND_EXPR
:
8536 this_optab
= and_optab
;
8541 this_optab
= ior_optab
;
8544 case TRUTH_XOR_EXPR
:
8546 this_optab
= xor_optab
;
8553 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8555 if (modifier
== EXPAND_STACK_PARM
)
8557 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8558 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8561 /* Could determine the answer when only additive constants differ. Also,
8562 the addition of one can be handled by changing the condition. */
8569 case UNORDERED_EXPR
:
8576 temp
= do_store_flag (exp
,
8577 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8578 tmode
!= VOIDmode
? tmode
: mode
, 0);
8582 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8583 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8585 && GET_CODE (original_target
) == REG
8586 && (GET_MODE (original_target
)
8587 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8589 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8592 /* If temp is constant, we can just compute the result. */
8593 if (GET_CODE (temp
) == CONST_INT
)
8595 if (INTVAL (temp
) != 0)
8596 emit_move_insn (target
, const1_rtx
);
8598 emit_move_insn (target
, const0_rtx
);
8603 if (temp
!= original_target
)
8605 enum machine_mode mode1
= GET_MODE (temp
);
8606 if (mode1
== VOIDmode
)
8607 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8609 temp
= copy_to_mode_reg (mode1
, temp
);
8612 op1
= gen_label_rtx ();
8613 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8614 GET_MODE (temp
), unsignedp
, op1
);
8615 emit_move_insn (temp
, const1_rtx
);
8620 /* If no set-flag instruction, must generate a conditional
8621 store into a temporary variable. Drop through
8622 and handle this like && and ||. */
8624 case TRUTH_ANDIF_EXPR
:
8625 case TRUTH_ORIF_EXPR
:
8628 || modifier
== EXPAND_STACK_PARM
8629 || ! safe_from_p (target
, exp
, 1)
8630 /* Make sure we don't have a hard reg (such as function's return
8631 value) live across basic blocks, if not optimizing. */
8632 || (!optimize
&& GET_CODE (target
) == REG
8633 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8634 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8637 emit_clr_insn (target
);
8639 op1
= gen_label_rtx ();
8640 jumpifnot (exp
, op1
);
8643 emit_0_to_1_insn (target
);
8646 return ignore
? const0_rtx
: target
;
8648 case TRUTH_NOT_EXPR
:
8649 if (modifier
== EXPAND_STACK_PARM
)
8651 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8652 /* The parser is careful to generate TRUTH_NOT_EXPR
8653 only with operands that are always zero or one. */
8654 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8655 target
, 1, OPTAB_LIB_WIDEN
);
8661 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8663 return expand_expr (TREE_OPERAND (exp
, 1),
8664 (ignore
? const0_rtx
: target
),
8665 VOIDmode
, modifier
);
8668 /* If we would have a "singleton" (see below) were it not for a
8669 conversion in each arm, bring that conversion back out. */
8670 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8671 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8672 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8673 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8675 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8676 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8678 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8679 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8680 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8681 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8682 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8683 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8684 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8685 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8686 return expand_expr (build1 (NOP_EXPR
, type
,
8687 build (COND_EXPR
, TREE_TYPE (iftrue
),
8688 TREE_OPERAND (exp
, 0),
8690 target
, tmode
, modifier
);
8694 /* Note that COND_EXPRs whose type is a structure or union
8695 are required to be constructed to contain assignments of
8696 a temporary variable, so that we can evaluate them here
8697 for side effect only. If type is void, we must do likewise. */
8699 /* If an arm of the branch requires a cleanup,
8700 only that cleanup is performed. */
8703 tree binary_op
= 0, unary_op
= 0;
8705 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8706 convert it to our mode, if necessary. */
8707 if (integer_onep (TREE_OPERAND (exp
, 1))
8708 && integer_zerop (TREE_OPERAND (exp
, 2))
8709 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8713 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8718 if (modifier
== EXPAND_STACK_PARM
)
8720 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8721 if (GET_MODE (op0
) == mode
)
8725 target
= gen_reg_rtx (mode
);
8726 convert_move (target
, op0
, unsignedp
);
8730 /* Check for X ? A + B : A. If we have this, we can copy A to the
8731 output and conditionally add B. Similarly for unary operations.
8732 Don't do this if X has side-effects because those side effects
8733 might affect A or B and the "?" operation is a sequence point in
8734 ANSI. (operand_equal_p tests for side effects.) */
8736 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8737 && operand_equal_p (TREE_OPERAND (exp
, 2),
8738 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8739 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8740 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8741 && operand_equal_p (TREE_OPERAND (exp
, 1),
8742 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8743 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8744 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8745 && operand_equal_p (TREE_OPERAND (exp
, 2),
8746 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8747 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8748 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8749 && operand_equal_p (TREE_OPERAND (exp
, 1),
8750 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8751 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8753 /* If we are not to produce a result, we have no target. Otherwise,
8754 if a target was specified use it; it will not be used as an
8755 intermediate target unless it is safe. If no target, use a
8760 else if (modifier
== EXPAND_STACK_PARM
)
8761 temp
= assign_temp (type
, 0, 0, 1);
8762 else if (original_target
8763 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8764 || (singleton
&& GET_CODE (original_target
) == REG
8765 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8766 && original_target
== var_rtx (singleton
)))
8767 && GET_MODE (original_target
) == mode
8768 #ifdef HAVE_conditional_move
8769 && (! can_conditionally_move_p (mode
)
8770 || GET_CODE (original_target
) == REG
8771 || TREE_ADDRESSABLE (type
))
8773 && (GET_CODE (original_target
) != MEM
8774 || TREE_ADDRESSABLE (type
)))
8775 temp
= original_target
;
8776 else if (TREE_ADDRESSABLE (type
))
8779 temp
= assign_temp (type
, 0, 0, 1);
8781 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8782 do the test of X as a store-flag operation, do this as
8783 A + ((X != 0) << log C). Similarly for other simple binary
8784 operators. Only do for C == 1 if BRANCH_COST is low. */
8785 if (temp
&& singleton
&& binary_op
8786 && (TREE_CODE (binary_op
) == PLUS_EXPR
8787 || TREE_CODE (binary_op
) == MINUS_EXPR
8788 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8789 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8790 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8791 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8792 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8796 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8797 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8798 ? addv_optab
: add_optab
)
8799 : TREE_CODE (binary_op
) == MINUS_EXPR
8800 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8801 ? subv_optab
: sub_optab
)
8802 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8805 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8806 if (singleton
== TREE_OPERAND (exp
, 1))
8807 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8809 cond
= TREE_OPERAND (exp
, 0);
8811 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8813 mode
, BRANCH_COST
<= 1);
8815 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8816 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8817 build_int_2 (tree_log2
8821 (safe_from_p (temp
, singleton
, 1)
8822 ? temp
: NULL_RTX
), 0);
8826 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8827 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8828 unsignedp
, OPTAB_LIB_WIDEN
);
8832 do_pending_stack_adjust ();
8834 op0
= gen_label_rtx ();
8836 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8840 /* If the target conflicts with the other operand of the
8841 binary op, we can't use it. Also, we can't use the target
8842 if it is a hard register, because evaluating the condition
8843 might clobber it. */
8845 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8846 || (GET_CODE (temp
) == REG
8847 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8848 temp
= gen_reg_rtx (mode
);
8849 store_expr (singleton
, temp
,
8850 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8853 expand_expr (singleton
,
8854 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8855 if (singleton
== TREE_OPERAND (exp
, 1))
8856 jumpif (TREE_OPERAND (exp
, 0), op0
);
8858 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8860 start_cleanup_deferral ();
8861 if (binary_op
&& temp
== 0)
8862 /* Just touch the other operand. */
8863 expand_expr (TREE_OPERAND (binary_op
, 1),
8864 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8866 store_expr (build (TREE_CODE (binary_op
), type
,
8867 make_tree (type
, temp
),
8868 TREE_OPERAND (binary_op
, 1)),
8869 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8871 store_expr (build1 (TREE_CODE (unary_op
), type
,
8872 make_tree (type
, temp
)),
8873 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8876 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8877 comparison operator. If we have one of these cases, set the
8878 output to A, branch on A (cse will merge these two references),
8879 then set the output to FOO. */
8881 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8882 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8883 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8884 TREE_OPERAND (exp
, 1), 0)
8885 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8886 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8887 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8889 if (GET_CODE (temp
) == REG
8890 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8891 temp
= gen_reg_rtx (mode
);
8892 store_expr (TREE_OPERAND (exp
, 1), temp
,
8893 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8894 jumpif (TREE_OPERAND (exp
, 0), op0
);
8896 start_cleanup_deferral ();
8897 store_expr (TREE_OPERAND (exp
, 2), temp
,
8898 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8902 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8903 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8904 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8905 TREE_OPERAND (exp
, 2), 0)
8906 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8907 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8908 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8910 if (GET_CODE (temp
) == REG
8911 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8912 temp
= gen_reg_rtx (mode
);
8913 store_expr (TREE_OPERAND (exp
, 2), temp
,
8914 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8915 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8917 start_cleanup_deferral ();
8918 store_expr (TREE_OPERAND (exp
, 1), temp
,
8919 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8924 op1
= gen_label_rtx ();
8925 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8927 start_cleanup_deferral ();
8929 /* One branch of the cond can be void, if it never returns. For
8930 example A ? throw : E */
8932 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8933 store_expr (TREE_OPERAND (exp
, 1), temp
,
8934 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8936 expand_expr (TREE_OPERAND (exp
, 1),
8937 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8938 end_cleanup_deferral ();
8940 emit_jump_insn (gen_jump (op1
));
8943 start_cleanup_deferral ();
8945 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8946 store_expr (TREE_OPERAND (exp
, 2), temp
,
8947 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8949 expand_expr (TREE_OPERAND (exp
, 2),
8950 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8953 end_cleanup_deferral ();
8964 /* Something needs to be initialized, but we didn't know
8965 where that thing was when building the tree. For example,
8966 it could be the return value of a function, or a parameter
8967 to a function which lays down in the stack, or a temporary
8968 variable which must be passed by reference.
8970 We guarantee that the expression will either be constructed
8971 or copied into our original target. */
8973 tree slot
= TREE_OPERAND (exp
, 0);
8974 tree cleanups
= NULL_TREE
;
8977 if (TREE_CODE (slot
) != VAR_DECL
)
8981 target
= original_target
;
8983 /* Set this here so that if we get a target that refers to a
8984 register variable that's already been used, put_reg_into_stack
8985 knows that it should fix up those uses. */
8986 TREE_USED (slot
) = 1;
8990 if (DECL_RTL_SET_P (slot
))
8992 target
= DECL_RTL (slot
);
8993 /* If we have already expanded the slot, so don't do
8995 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9000 target
= assign_temp (type
, 2, 0, 1);
9001 /* All temp slots at this level must not conflict. */
9002 preserve_temp_slots (target
);
9003 SET_DECL_RTL (slot
, target
);
9004 if (TREE_ADDRESSABLE (slot
))
9005 put_var_into_stack (slot
, /*rescan=*/false);
9007 /* Since SLOT is not known to the called function
9008 to belong to its stack frame, we must build an explicit
9009 cleanup. This case occurs when we must build up a reference
9010 to pass the reference as an argument. In this case,
9011 it is very likely that such a reference need not be
9014 if (TREE_OPERAND (exp
, 2) == 0)
9015 TREE_OPERAND (exp
, 2)
9016 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
9017 cleanups
= TREE_OPERAND (exp
, 2);
9022 /* This case does occur, when expanding a parameter which
9023 needs to be constructed on the stack. The target
9024 is the actual stack address that we want to initialize.
9025 The function we call will perform the cleanup in this case. */
9027 /* If we have already assigned it space, use that space,
9028 not target that we were passed in, as our target
9029 parameter is only a hint. */
9030 if (DECL_RTL_SET_P (slot
))
9032 target
= DECL_RTL (slot
);
9033 /* If we have already expanded the slot, so don't do
9035 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9040 SET_DECL_RTL (slot
, target
);
9041 /* If we must have an addressable slot, then make sure that
9042 the RTL that we just stored in slot is OK. */
9043 if (TREE_ADDRESSABLE (slot
))
9044 put_var_into_stack (slot
, /*rescan=*/true);
9048 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
9049 /* Mark it as expanded. */
9050 TREE_OPERAND (exp
, 1) = NULL_TREE
;
9052 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
9054 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
9061 tree lhs
= TREE_OPERAND (exp
, 0);
9062 tree rhs
= TREE_OPERAND (exp
, 1);
9064 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
9070 /* If lhs is complex, expand calls in rhs before computing it.
9071 That's so we don't compute a pointer and save it over a
9072 call. If lhs is simple, compute it first so we can give it
9073 as a target if the rhs is just a call. This avoids an
9074 extra temp and copy and that prevents a partial-subsumption
9075 which makes bad code. Actually we could treat
9076 component_ref's of vars like vars. */
9078 tree lhs
= TREE_OPERAND (exp
, 0);
9079 tree rhs
= TREE_OPERAND (exp
, 1);
9083 /* Check for |= or &= of a bitfield of size one into another bitfield
9084 of size 1. In this case, (unless we need the result of the
9085 assignment) we can do this more efficiently with a
9086 test followed by an assignment, if necessary.
9088 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9089 things change so we do, this code should be enhanced to
9092 && TREE_CODE (lhs
) == COMPONENT_REF
9093 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9094 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9095 && TREE_OPERAND (rhs
, 0) == lhs
9096 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9097 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9098 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9100 rtx label
= gen_label_rtx ();
9102 do_jump (TREE_OPERAND (rhs
, 1),
9103 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
9104 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
9105 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
9106 (TREE_CODE (rhs
) == BIT_IOR_EXPR
9108 : integer_zero_node
)),
9110 do_pending_stack_adjust ();
9115 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
9121 if (!TREE_OPERAND (exp
, 0))
9122 expand_null_return ();
9124 expand_return (TREE_OPERAND (exp
, 0));
9127 case PREINCREMENT_EXPR
:
9128 case PREDECREMENT_EXPR
:
9129 return expand_increment (exp
, 0, ignore
);
9131 case POSTINCREMENT_EXPR
:
9132 case POSTDECREMENT_EXPR
:
9133 /* Faster to treat as pre-increment if result is not used. */
9134 return expand_increment (exp
, ! ignore
, ignore
);
9137 if (modifier
== EXPAND_STACK_PARM
)
9139 /* Are we taking the address of a nested function? */
9140 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
9141 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
9142 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
9143 && ! TREE_STATIC (exp
))
9145 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
9146 op0
= force_operand (op0
, target
);
9148 /* If we are taking the address of something erroneous, just
9150 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
9152 /* If we are taking the address of a constant and are at the
9153 top level, we have to use output_constant_def since we can't
9154 call force_const_mem at top level. */
9156 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
9157 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
9159 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
9162 /* We make sure to pass const0_rtx down if we came in with
9163 ignore set, to avoid doing the cleanups twice for something. */
9164 op0
= expand_expr (TREE_OPERAND (exp
, 0),
9165 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
9166 (modifier
== EXPAND_INITIALIZER
9167 ? modifier
: EXPAND_CONST_ADDRESS
));
9169 /* If we are going to ignore the result, OP0 will have been set
9170 to const0_rtx, so just return it. Don't get confused and
9171 think we are taking the address of the constant. */
9175 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9176 clever and returns a REG when given a MEM. */
9177 op0
= protect_from_queue (op0
, 1);
9179 /* We would like the object in memory. If it is a constant, we can
9180 have it be statically allocated into memory. For a non-constant,
9181 we need to allocate some memory and store the value into it. */
9183 if (CONSTANT_P (op0
))
9184 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
9186 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9187 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
9188 || GET_CODE (op0
) == PARALLEL
)
9190 /* If the operand is a SAVE_EXPR, we can deal with this by
9191 forcing the SAVE_EXPR into memory. */
9192 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
9194 put_var_into_stack (TREE_OPERAND (exp
, 0),
9196 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
9200 /* If this object is in a register, it can't be BLKmode. */
9201 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9202 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
9204 if (GET_CODE (op0
) == PARALLEL
)
9205 /* Handle calls that pass values in multiple
9206 non-contiguous locations. The Irix 6 ABI has examples
9208 emit_group_store (memloc
, op0
,
9209 int_size_in_bytes (inner_type
));
9211 emit_move_insn (memloc
, op0
);
9217 if (GET_CODE (op0
) != MEM
)
9220 mark_temp_addr_taken (op0
);
9221 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9223 op0
= XEXP (op0
, 0);
9224 #ifdef POINTERS_EXTEND_UNSIGNED
9225 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9226 && mode
== ptr_mode
)
9227 op0
= convert_memory_address (ptr_mode
, op0
);
9232 /* If OP0 is not aligned as least as much as the type requires, we
9233 need to make a temporary, copy OP0 to it, and take the address of
9234 the temporary. We want to use the alignment of the type, not of
9235 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9236 the test for BLKmode means that can't happen. The test for
9237 BLKmode is because we never make mis-aligned MEMs with
9240 We don't need to do this at all if the machine doesn't have
9241 strict alignment. */
9242 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9243 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9245 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9247 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9250 if (TYPE_ALIGN_OK (inner_type
))
9253 if (TREE_ADDRESSABLE (inner_type
))
9255 /* We can't make a bitwise copy of this object, so fail. */
9256 error ("cannot take the address of an unaligned member");
9260 new = assign_stack_temp_for_type
9261 (TYPE_MODE (inner_type
),
9262 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9263 : int_size_in_bytes (inner_type
),
9264 1, build_qualified_type (inner_type
,
9265 (TYPE_QUALS (inner_type
)
9266 | TYPE_QUAL_CONST
)));
9268 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9269 (modifier
== EXPAND_STACK_PARM
9270 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9275 op0
= force_operand (XEXP (op0
, 0), target
);
9279 && GET_CODE (op0
) != REG
9280 && modifier
!= EXPAND_CONST_ADDRESS
9281 && modifier
!= EXPAND_INITIALIZER
9282 && modifier
!= EXPAND_SUM
)
9283 op0
= force_reg (Pmode
, op0
);
9285 if (GET_CODE (op0
) == REG
9286 && ! REG_USERVAR_P (op0
))
9287 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9289 #ifdef POINTERS_EXTEND_UNSIGNED
9290 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9291 && mode
== ptr_mode
)
9292 op0
= convert_memory_address (ptr_mode
, op0
);
9297 case ENTRY_VALUE_EXPR
:
9300 /* COMPLEX type for Extended Pascal & Fortran */
9303 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9306 /* Get the rtx code of the operands. */
9307 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9308 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9311 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9315 /* Move the real (op0) and imaginary (op1) parts to their location. */
9316 emit_move_insn (gen_realpart (mode
, target
), op0
);
9317 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9319 insns
= get_insns ();
9322 /* Complex construction should appear as a single unit. */
9323 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9324 each with a separate pseudo as destination.
9325 It's not correct for flow to treat them as a unit. */
9326 if (GET_CODE (target
) != CONCAT
)
9327 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9335 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9336 return gen_realpart (mode
, op0
);
9339 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9340 return gen_imagpart (mode
, op0
);
9344 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9348 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9351 target
= gen_reg_rtx (mode
);
9355 /* Store the realpart and the negated imagpart to target. */
9356 emit_move_insn (gen_realpart (partmode
, target
),
9357 gen_realpart (partmode
, op0
));
9359 imag_t
= gen_imagpart (partmode
, target
);
9360 temp
= expand_unop (partmode
,
9361 ! unsignedp
&& flag_trapv
9362 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9363 ? negv_optab
: neg_optab
,
9364 gen_imagpart (partmode
, op0
), imag_t
, 0);
9366 emit_move_insn (imag_t
, temp
);
9368 insns
= get_insns ();
9371 /* Conjugate should appear as a single unit
9372 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9373 each with a separate pseudo as destination.
9374 It's not correct for flow to treat them as a unit. */
9375 if (GET_CODE (target
) != CONCAT
)
9376 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9383 case TRY_CATCH_EXPR
:
9385 tree handler
= TREE_OPERAND (exp
, 1);
9387 expand_eh_region_start ();
9389 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9391 expand_eh_region_end_cleanup (handler
);
9396 case TRY_FINALLY_EXPR
:
9398 tree try_block
= TREE_OPERAND (exp
, 0);
9399 tree finally_block
= TREE_OPERAND (exp
, 1);
9401 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9403 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9404 is not sufficient, so we cannot expand the block twice.
9405 So we play games with GOTO_SUBROUTINE_EXPR to let us
9406 expand the thing only once. */
9407 /* When not optimizing, we go ahead with this form since
9408 (1) user breakpoints operate more predictably without
9409 code duplication, and
9410 (2) we're not running any of the global optimizers
9411 that would explode in time/space with the highly
9412 connected CFG created by the indirect branching. */
9414 rtx finally_label
= gen_label_rtx ();
9415 rtx done_label
= gen_label_rtx ();
9416 rtx return_link
= gen_reg_rtx (Pmode
);
9417 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9418 (tree
) finally_label
, (tree
) return_link
);
9419 TREE_SIDE_EFFECTS (cleanup
) = 1;
9421 /* Start a new binding layer that will keep track of all cleanup
9422 actions to be performed. */
9423 expand_start_bindings (2);
9424 target_temp_slot_level
= temp_slot_level
;
9426 expand_decl_cleanup (NULL_TREE
, cleanup
);
9427 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9429 preserve_temp_slots (op0
);
9430 expand_end_bindings (NULL_TREE
, 0, 0);
9431 emit_jump (done_label
);
9432 emit_label (finally_label
);
9433 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9434 emit_indirect_jump (return_link
);
9435 emit_label (done_label
);
9439 expand_start_bindings (2);
9440 target_temp_slot_level
= temp_slot_level
;
9442 expand_decl_cleanup (NULL_TREE
, finally_block
);
9443 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9445 preserve_temp_slots (op0
);
9446 expand_end_bindings (NULL_TREE
, 0, 0);
9452 case GOTO_SUBROUTINE_EXPR
:
9454 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9455 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9456 rtx return_address
= gen_label_rtx ();
9457 emit_move_insn (return_link
,
9458 gen_rtx_LABEL_REF (Pmode
, return_address
));
9460 emit_label (return_address
);
9465 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9468 return get_exception_pointer (cfun
);
9471 /* Function descriptors are not valid except for as
9472 initialization constants, and should not be expanded. */
9476 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9479 /* Here to do an ordinary binary operator, generating an instruction
9480 from the optab already placed in `this_optab'. */
9482 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9484 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
9485 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9487 if (modifier
== EXPAND_STACK_PARM
)
9489 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9490 unsignedp
, OPTAB_LIB_WIDEN
);
9496 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9497 when applied to the address of EXP produces an address known to be
9498 aligned more than BIGGEST_ALIGNMENT. */
9501 is_aligning_offset (offset
, exp
)
9505 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9506 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9507 || TREE_CODE (offset
) == NOP_EXPR
9508 || TREE_CODE (offset
) == CONVERT_EXPR
9509 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9510 offset
= TREE_OPERAND (offset
, 0);
9512 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9513 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9514 if (TREE_CODE (offset
) != BIT_AND_EXPR
9515 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9516 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9517 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9520 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9521 It must be NEGATE_EXPR. Then strip any more conversions. */
9522 offset
= TREE_OPERAND (offset
, 0);
9523 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9524 || TREE_CODE (offset
) == NOP_EXPR
9525 || TREE_CODE (offset
) == CONVERT_EXPR
)
9526 offset
= TREE_OPERAND (offset
, 0);
9528 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9531 offset
= TREE_OPERAND (offset
, 0);
9532 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9533 || TREE_CODE (offset
) == NOP_EXPR
9534 || TREE_CODE (offset
) == CONVERT_EXPR
)
9535 offset
= TREE_OPERAND (offset
, 0);
9537 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9538 whose type is the same as EXP. */
9539 return (TREE_CODE (offset
) == ADDR_EXPR
9540 && (TREE_OPERAND (offset
, 0) == exp
9541 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9542 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9543 == TREE_TYPE (exp
)))));
9546 /* Return the tree node if an ARG corresponds to a string constant or zero
9547 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9548 in bytes within the string that ARG is accessing. The type of the
9549 offset will be `sizetype'. */
9552 string_constant (arg
, ptr_offset
)
9558 if (TREE_CODE (arg
) == ADDR_EXPR
9559 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9561 *ptr_offset
= size_zero_node
;
9562 return TREE_OPERAND (arg
, 0);
9564 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9566 tree arg0
= TREE_OPERAND (arg
, 0);
9567 tree arg1
= TREE_OPERAND (arg
, 1);
9572 if (TREE_CODE (arg0
) == ADDR_EXPR
9573 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9575 *ptr_offset
= convert (sizetype
, arg1
);
9576 return TREE_OPERAND (arg0
, 0);
9578 else if (TREE_CODE (arg1
) == ADDR_EXPR
9579 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9581 *ptr_offset
= convert (sizetype
, arg0
);
9582 return TREE_OPERAND (arg1
, 0);
9589 /* Expand code for a post- or pre- increment or decrement
9590 and return the RTX for the result.
9591 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9594 expand_increment (exp
, post
, ignore
)
9600 tree incremented
= TREE_OPERAND (exp
, 0);
9601 optab this_optab
= add_optab
;
9603 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9604 int op0_is_copy
= 0;
9605 int single_insn
= 0;
9606 /* 1 means we can't store into OP0 directly,
9607 because it is a subreg narrower than a word,
9608 and we don't dare clobber the rest of the word. */
9611 /* Stabilize any component ref that might need to be
9612 evaluated more than once below. */
9614 || TREE_CODE (incremented
) == BIT_FIELD_REF
9615 || (TREE_CODE (incremented
) == COMPONENT_REF
9616 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9617 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9618 incremented
= stabilize_reference (incremented
);
9619 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9620 ones into save exprs so that they don't accidentally get evaluated
9621 more than once by the code below. */
9622 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9623 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9624 incremented
= save_expr (incremented
);
9626 /* Compute the operands as RTX.
9627 Note whether OP0 is the actual lvalue or a copy of it:
9628 I believe it is a copy iff it is a register or subreg
9629 and insns were generated in computing it. */
9631 temp
= get_last_insn ();
9632 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9634 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9635 in place but instead must do sign- or zero-extension during assignment,
9636 so we copy it into a new register and let the code below use it as
9639 Note that we can safely modify this SUBREG since it is know not to be
9640 shared (it was made by the expand_expr call above). */
9642 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9645 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9649 else if (GET_CODE (op0
) == SUBREG
9650 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9652 /* We cannot increment this SUBREG in place. If we are
9653 post-incrementing, get a copy of the old value. Otherwise,
9654 just mark that we cannot increment in place. */
9656 op0
= copy_to_reg (op0
);
9661 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9662 && temp
!= get_last_insn ());
9663 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9665 /* Decide whether incrementing or decrementing. */
9666 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9667 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9668 this_optab
= sub_optab
;
9670 /* Convert decrement by a constant into a negative increment. */
9671 if (this_optab
== sub_optab
9672 && GET_CODE (op1
) == CONST_INT
)
9674 op1
= GEN_INT (-INTVAL (op1
));
9675 this_optab
= add_optab
;
9678 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9679 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9681 /* For a preincrement, see if we can do this with a single instruction. */
9684 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9685 if (icode
!= (int) CODE_FOR_nothing
9686 /* Make sure that OP0 is valid for operands 0 and 1
9687 of the insn we want to queue. */
9688 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9689 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9690 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9694 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9695 then we cannot just increment OP0. We must therefore contrive to
9696 increment the original value. Then, for postincrement, we can return
9697 OP0 since it is a copy of the old value. For preincrement, expand here
9698 unless we can do it with a single insn.
9700 Likewise if storing directly into OP0 would clobber high bits
9701 we need to preserve (bad_subreg). */
9702 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9704 /* This is the easiest way to increment the value wherever it is.
9705 Problems with multiple evaluation of INCREMENTED are prevented
9706 because either (1) it is a component_ref or preincrement,
9707 in which case it was stabilized above, or (2) it is an array_ref
9708 with constant index in an array in a register, which is
9709 safe to reevaluate. */
9710 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9711 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9712 ? MINUS_EXPR
: PLUS_EXPR
),
9715 TREE_OPERAND (exp
, 1));
9717 while (TREE_CODE (incremented
) == NOP_EXPR
9718 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9720 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9721 incremented
= TREE_OPERAND (incremented
, 0);
9724 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9725 return post
? op0
: temp
;
9730 /* We have a true reference to the value in OP0.
9731 If there is an insn to add or subtract in this mode, queue it.
9732 Queueing the increment insn avoids the register shuffling
9733 that often results if we must increment now and first save
9734 the old value for subsequent use. */
9736 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9737 op0
= stabilize (op0
);
9740 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9741 if (icode
!= (int) CODE_FOR_nothing
9742 /* Make sure that OP0 is valid for operands 0 and 1
9743 of the insn we want to queue. */
9744 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9745 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9747 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9748 op1
= force_reg (mode
, op1
);
9750 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9752 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9754 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9755 ? force_reg (Pmode
, XEXP (op0
, 0))
9756 : copy_to_reg (XEXP (op0
, 0)));
9759 op0
= replace_equiv_address (op0
, addr
);
9760 temp
= force_reg (GET_MODE (op0
), op0
);
9761 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9762 op1
= force_reg (mode
, op1
);
9764 /* The increment queue is LIFO, thus we have to `queue'
9765 the instructions in reverse order. */
9766 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9767 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9772 /* Preincrement, or we can't increment with one simple insn. */
9774 /* Save a copy of the value before inc or dec, to return it later. */
9775 temp
= value
= copy_to_reg (op0
);
9777 /* Arrange to return the incremented value. */
9778 /* Copy the rtx because expand_binop will protect from the queue,
9779 and the results of that would be invalid for us to return
9780 if our caller does emit_queue before using our result. */
9781 temp
= copy_rtx (value
= op0
);
9783 /* Increment however we can. */
9784 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9785 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9787 /* Make sure the value is stored into OP0. */
9789 emit_move_insn (op0
, op1
);
9794 /* Generate code to calculate EXP using a store-flag instruction
9795 and return an rtx for the result. EXP is either a comparison
9796 or a TRUTH_NOT_EXPR whose operand is a comparison.
9798 If TARGET is nonzero, store the result there if convenient.
9800 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9803 Return zero if there is no suitable set-flag instruction
9804 available on this machine.
9806 Once expand_expr has been called on the arguments of the comparison,
9807 we are committed to doing the store flag, since it is not safe to
9808 re-evaluate the expression. We emit the store-flag insn by calling
9809 emit_store_flag, but only expand the arguments if we have a reason
9810 to believe that emit_store_flag will be successful. If we think that
9811 it will, but it isn't, we have to simulate the store-flag with a
9812 set/jump/set sequence. */
9815 do_store_flag (exp
, target
, mode
, only_cheap
)
9818 enum machine_mode mode
;
9822 tree arg0
, arg1
, type
;
9824 enum machine_mode operand_mode
;
9828 enum insn_code icode
;
9829 rtx subtarget
= target
;
9832 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9833 result at the end. We can't simply invert the test since it would
9834 have already been inverted if it were valid. This case occurs for
9835 some floating-point comparisons. */
9837 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9838 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9840 arg0
= TREE_OPERAND (exp
, 0);
9841 arg1
= TREE_OPERAND (exp
, 1);
9843 /* Don't crash if the comparison was erroneous. */
9844 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9847 type
= TREE_TYPE (arg0
);
9848 operand_mode
= TYPE_MODE (type
);
9849 unsignedp
= TREE_UNSIGNED (type
);
9851 /* We won't bother with BLKmode store-flag operations because it would mean
9852 passing a lot of information to emit_store_flag. */
9853 if (operand_mode
== BLKmode
)
9856 /* We won't bother with store-flag operations involving function pointers
9857 when function pointers must be canonicalized before comparisons. */
9858 #ifdef HAVE_canonicalize_funcptr_for_compare
9859 if (HAVE_canonicalize_funcptr_for_compare
9860 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9861 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9863 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9864 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9865 == FUNCTION_TYPE
))))
9872 /* Get the rtx comparison code to use. We know that EXP is a comparison
9873 operation of some type. Some comparisons against 1 and -1 can be
9874 converted to comparisons with zero. Do so here so that the tests
9875 below will be aware that we have a comparison with zero. These
9876 tests will not catch constants in the first operand, but constants
9877 are rarely passed as the first operand. */
9879 switch (TREE_CODE (exp
))
9888 if (integer_onep (arg1
))
9889 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9891 code
= unsignedp
? LTU
: LT
;
9894 if (! unsignedp
&& integer_all_onesp (arg1
))
9895 arg1
= integer_zero_node
, code
= LT
;
9897 code
= unsignedp
? LEU
: LE
;
9900 if (! unsignedp
&& integer_all_onesp (arg1
))
9901 arg1
= integer_zero_node
, code
= GE
;
9903 code
= unsignedp
? GTU
: GT
;
9906 if (integer_onep (arg1
))
9907 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9909 code
= unsignedp
? GEU
: GE
;
9912 case UNORDERED_EXPR
:
9938 /* Put a constant second. */
9939 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9941 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9942 code
= swap_condition (code
);
9945 /* If this is an equality or inequality test of a single bit, we can
9946 do this by shifting the bit being tested to the low-order bit and
9947 masking the result with the constant 1. If the condition was EQ,
9948 we xor it with 1. This does not require an scc insn and is faster
9949 than an scc insn even if we have it. */
9951 if ((code
== NE
|| code
== EQ
)
9952 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9953 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9955 tree inner
= TREE_OPERAND (arg0
, 0);
9956 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
9959 /* If INNER is a right shift of a constant and it plus BITNUM does
9960 not overflow, adjust BITNUM and INNER. */
9962 if (TREE_CODE (inner
) == RSHIFT_EXPR
9963 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
9964 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
9965 && bitnum
< TYPE_PRECISION (type
)
9966 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
9967 bitnum
- TYPE_PRECISION (type
)))
9969 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
9970 inner
= TREE_OPERAND (inner
, 0);
9973 /* If we are going to be able to omit the AND below, we must do our
9974 operations as unsigned. If we must use the AND, we have a choice.
9975 Normally unsigned is faster, but for some machines signed is. */
9976 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
9977 #ifdef LOAD_EXTEND_OP
9978 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
9984 if (! get_subtarget (subtarget
)
9985 || GET_MODE (subtarget
) != operand_mode
9986 || ! safe_from_p (subtarget
, inner
, 1))
9989 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
9992 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
9993 size_int (bitnum
), subtarget
, ops_unsignedp
);
9995 if (GET_MODE (op0
) != mode
)
9996 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
9998 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
9999 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10000 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10002 /* Put the AND last so it can combine with more things. */
10003 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10004 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10009 /* Now see if we are likely to be able to do this. Return if not. */
10010 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10013 icode
= setcc_gen_code
[(int) code
];
10014 if (icode
== CODE_FOR_nothing
10015 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10017 /* We can only do this if it is one of the special cases that
10018 can be handled without an scc insn. */
10019 if ((code
== LT
&& integer_zerop (arg1
))
10020 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10022 else if (BRANCH_COST
>= 0
10023 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10024 && TREE_CODE (type
) != REAL_TYPE
10025 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10026 != CODE_FOR_nothing
)
10027 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10028 != CODE_FOR_nothing
)))
10034 if (! get_subtarget (target
)
10035 || GET_MODE (subtarget
) != operand_mode
10036 || ! safe_from_p (subtarget
, arg1
, 1))
10039 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10040 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10043 target
= gen_reg_rtx (mode
);
10045 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10046 because, if the emit_store_flag does anything it will succeed and
10047 OP0 and OP1 will not be used subsequently. */
10049 result
= emit_store_flag (target
, code
,
10050 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10051 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10052 operand_mode
, unsignedp
, 1);
10057 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10058 result
, 0, OPTAB_LIB_WIDEN
);
10062 /* If this failed, we have to do this with set/compare/jump/set code. */
10063 if (GET_CODE (target
) != REG
10064 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10065 target
= gen_reg_rtx (GET_MODE (target
));
10067 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10068 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10069 operand_mode
, NULL_RTX
);
10070 if (GET_CODE (result
) == CONST_INT
)
10071 return (((result
== const0_rtx
&& ! invert
)
10072 || (result
!= const0_rtx
&& invert
))
10073 ? const0_rtx
: const1_rtx
);
10075 /* The code of RESULT may not match CODE if compare_from_rtx
10076 decided to swap its operands and reverse the original code.
10078 We know that compare_from_rtx returns either a CONST_INT or
10079 a new comparison code, so it is safe to just extract the
10080 code from RESULT. */
10081 code
= GET_CODE (result
);
10083 label
= gen_label_rtx ();
10084 if (bcc_gen_fctn
[(int) code
] == 0)
10087 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10088 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10089 emit_label (label
);
10095 /* Stubs in case we haven't got a casesi insn. */
10096 #ifndef HAVE_casesi
10097 # define HAVE_casesi 0
10098 # define gen_casesi(a, b, c, d, e) (0)
10099 # define CODE_FOR_casesi CODE_FOR_nothing
10102 /* If the machine does not have a case insn that compares the bounds,
10103 this means extra overhead for dispatch tables, which raises the
10104 threshold for using them. */
10105 #ifndef CASE_VALUES_THRESHOLD
10106 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10107 #endif /* CASE_VALUES_THRESHOLD */
10110 case_values_threshold ()
10112 return CASE_VALUES_THRESHOLD
;
10115 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10116 0 otherwise (i.e. if there is no casesi instruction). */
10118 try_casesi (index_type
, index_expr
, minval
, range
,
10119 table_label
, default_label
)
10120 tree index_type
, index_expr
, minval
, range
;
10121 rtx table_label ATTRIBUTE_UNUSED
;
10124 enum machine_mode index_mode
= SImode
;
10125 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10126 rtx op1
, op2
, index
;
10127 enum machine_mode op_mode
;
10132 /* Convert the index to SImode. */
10133 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10135 enum machine_mode omode
= TYPE_MODE (index_type
);
10136 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10138 /* We must handle the endpoints in the original mode. */
10139 index_expr
= build (MINUS_EXPR
, index_type
,
10140 index_expr
, minval
);
10141 minval
= integer_zero_node
;
10142 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10143 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10144 omode
, 1, default_label
);
10145 /* Now we can safely truncate. */
10146 index
= convert_to_mode (index_mode
, index
, 0);
10150 if (TYPE_MODE (index_type
) != index_mode
)
10152 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10153 (index_bits
, 0), index_expr
);
10154 index_type
= TREE_TYPE (index_expr
);
10157 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10160 index
= protect_from_queue (index
, 0);
10161 do_pending_stack_adjust ();
10163 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10164 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10166 index
= copy_to_mode_reg (op_mode
, index
);
10168 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10170 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10171 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10172 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10173 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10175 op1
= copy_to_mode_reg (op_mode
, op1
);
10177 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10179 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10180 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10181 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10182 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10184 op2
= copy_to_mode_reg (op_mode
, op2
);
10186 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10187 table_label
, default_label
));
10191 /* Attempt to generate a tablejump instruction; same concept. */
10192 #ifndef HAVE_tablejump
10193 #define HAVE_tablejump 0
10194 #define gen_tablejump(x, y) (0)
10197 /* Subroutine of the next function.
10199 INDEX is the value being switched on, with the lowest value
10200 in the table already subtracted.
10201 MODE is its expected mode (needed if INDEX is constant).
10202 RANGE is the length of the jump table.
10203 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10205 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10206 index value is out of range. */
10209 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10210 rtx index
, range
, table_label
, default_label
;
10211 enum machine_mode mode
;
10215 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
10216 cfun
->max_jumptable_ents
= INTVAL (range
);
10218 /* Do an unsigned comparison (in the proper mode) between the index
10219 expression and the value which represents the length of the range.
10220 Since we just finished subtracting the lower bound of the range
10221 from the index expression, this comparison allows us to simultaneously
10222 check that the original index expression value is both greater than
10223 or equal to the minimum value of the range and less than or equal to
10224 the maximum value of the range. */
10226 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10229 /* If index is in range, it must fit in Pmode.
10230 Convert to Pmode so we can index with it. */
10232 index
= convert_to_mode (Pmode
, index
, 1);
10234 /* Don't let a MEM slip thru, because then INDEX that comes
10235 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10236 and break_out_memory_refs will go to work on it and mess it up. */
10237 #ifdef PIC_CASE_VECTOR_ADDRESS
10238 if (flag_pic
&& GET_CODE (index
) != REG
)
10239 index
= copy_to_mode_reg (Pmode
, index
);
10242 /* If flag_force_addr were to affect this address
10243 it could interfere with the tricky assumptions made
10244 about addresses that contain label-refs,
10245 which may be valid only very near the tablejump itself. */
10246 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10247 GET_MODE_SIZE, because this indicates how large insns are. The other
10248 uses should all be Pmode, because they are addresses. This code
10249 could fail if addresses and insns are not the same size. */
10250 index
= gen_rtx_PLUS (Pmode
,
10251 gen_rtx_MULT (Pmode
, index
,
10252 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10253 gen_rtx_LABEL_REF (Pmode
, table_label
));
10254 #ifdef PIC_CASE_VECTOR_ADDRESS
10256 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10259 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10260 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10261 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10262 RTX_UNCHANGING_P (vector
) = 1;
10263 convert_move (temp
, vector
, 0);
10265 emit_jump_insn (gen_tablejump (temp
, table_label
));
10267 /* If we are generating PIC code or if the table is PC-relative, the
10268 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10269 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10274 try_tablejump (index_type
, index_expr
, minval
, range
,
10275 table_label
, default_label
)
10276 tree index_type
, index_expr
, minval
, range
;
10277 rtx table_label
, default_label
;
10281 if (! HAVE_tablejump
)
10284 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10285 convert (index_type
, index_expr
),
10286 convert (index_type
, minval
)));
10287 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10289 index
= protect_from_queue (index
, 0);
10290 do_pending_stack_adjust ();
10292 do_tablejump (index
, TYPE_MODE (index_type
),
10293 convert_modes (TYPE_MODE (index_type
),
10294 TYPE_MODE (TREE_TYPE (range
)),
10295 expand_expr (range
, NULL_RTX
,
10297 TREE_UNSIGNED (TREE_TYPE (range
))),
10298 table_label
, default_label
);
10302 /* Nonzero if the mode is a valid vector mode for this architecture.
10303 This returns nonzero even if there is no hardware support for the
10304 vector mode, but we can emulate with narrower modes. */
10307 vector_mode_valid_p (mode
)
10308 enum machine_mode mode
;
10310 enum mode_class
class = GET_MODE_CLASS (mode
);
10311 enum machine_mode innermode
;
10313 /* Doh! What's going on? */
10314 if (class != MODE_VECTOR_INT
10315 && class != MODE_VECTOR_FLOAT
)
10318 /* Hardware support. Woo hoo! */
10319 if (VECTOR_MODE_SUPPORTED_P (mode
))
10322 innermode
= GET_MODE_INNER (mode
);
10324 /* We should probably return 1 if requesting V4DI and we have no DI,
10325 but we have V2DI, but this is probably very unlikely. */
10327 /* If we have support for the inner mode, we can safely emulate it.
10328 We may not have V2DI, but me can emulate with a pair of DIs. */
10329 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
10332 #include "gt-expr.h"