1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list
= 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from
;
112 unsigned HOST_WIDE_INT len
;
113 HOST_WIDE_INT offset
;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len
;
127 HOST_WIDE_INT offset
;
128 rtx (*constfun
) PARAMS ((void *, HOST_WIDE_INT
, enum machine_mode
));
133 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT
,
137 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
138 struct move_by_pieces
*));
139 static bool block_move_libcall_safe_for_call_parm
PARAMS ((void));
140 static bool emit_block_move_via_movstr
PARAMS ((rtx
, rtx
, rtx
, unsigned));
141 static rtx emit_block_move_via_libcall
PARAMS ((rtx
, rtx
, rtx
));
142 static tree emit_block_move_libcall_fn
PARAMS ((int));
143 static void emit_block_move_via_loop
PARAMS ((rtx
, rtx
, rtx
, unsigned));
144 static rtx clear_by_pieces_1
PARAMS ((void *, HOST_WIDE_INT
,
146 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
148 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
150 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
152 struct store_by_pieces
*));
153 static bool clear_storage_via_clrstr
PARAMS ((rtx
, rtx
, unsigned));
154 static rtx clear_storage_via_libcall
PARAMS ((rtx
, rtx
));
155 static tree clear_storage_libcall_fn
PARAMS ((int));
156 static rtx compress_float_constant
PARAMS ((rtx
, rtx
));
157 static rtx get_subtarget
PARAMS ((rtx
));
158 static int is_zeros_p
PARAMS ((tree
));
159 static int mostly_zeros_p
PARAMS ((tree
));
160 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
161 HOST_WIDE_INT
, enum machine_mode
,
162 tree
, tree
, int, int));
163 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
164 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
165 HOST_WIDE_INT
, enum machine_mode
,
166 tree
, enum machine_mode
, int, tree
,
168 static rtx var_rtx
PARAMS ((tree
));
170 static unsigned HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type
PARAMS ((tree
,
174 static int is_aligning_offset
PARAMS ((tree
, tree
));
175 static rtx expand_increment
PARAMS ((tree
, int, int));
176 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
178 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
180 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
181 static rtx const_vector_from_tree
PARAMS ((tree
));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load
[NUM_MACHINE_MODES
];
188 static char direct_store
[NUM_MACHINE_MODES
];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode
;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
267 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg
= gen_rtx_REG (VOIDmode
, -1);
273 insn
= rtx_alloc (INSN
);
274 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
275 PATTERN (insn
) = pat
;
277 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
278 mode
= (enum machine_mode
) ((int) mode
+ 1))
282 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
283 PUT_MODE (mem
, mode
);
284 PUT_MODE (mem1
, mode
);
285 PUT_MODE (reg
, mode
);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
291 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
292 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
295 if (! HARD_REGNO_MODE_OK (regno
, mode
))
301 SET_DEST (pat
) = reg
;
302 if (recog (pat
, insn
, &num_clobbers
) >= 0)
303 direct_load
[(int) mode
] = 1;
305 SET_SRC (pat
) = mem1
;
306 SET_DEST (pat
) = reg
;
307 if (recog (pat
, insn
, &num_clobbers
) >= 0)
308 direct_load
[(int) mode
] = 1;
311 SET_DEST (pat
) = mem
;
312 if (recog (pat
, insn
, &num_clobbers
) >= 0)
313 direct_store
[(int) mode
] = 1;
316 SET_DEST (pat
) = mem1
;
317 if (recog (pat
, insn
, &num_clobbers
) >= 0)
318 direct_store
[(int) mode
] = 1;
322 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
324 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
325 mode
= GET_MODE_WIDER_MODE (mode
))
327 enum machine_mode srcmode
;
328 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
329 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
333 ic
= can_extend_p (mode
, srcmode
, 0);
334 if (ic
== CODE_FOR_nothing
)
337 PUT_MODE (mem
, srcmode
);
339 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
340 float_extend_from_mem
[mode
][srcmode
] = true;
345 /* This is run at the start of compiling a function. */
350 cfun
->expr
= (struct expr_status
*) ggc_alloc (sizeof (struct expr_status
));
353 pending_stack_adjust
= 0;
354 stack_pointer_delta
= 0;
355 inhibit_defer_pop
= 0;
357 apply_args_value
= 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var
, body
)
384 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
385 body
, pending_chain
);
386 return pending_chain
;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x
, modify
)
409 RTX_CODE code
= GET_CODE (x
);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain
== 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code
== MEM
&& GET_MODE (x
) != BLKmode
425 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
428 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
432 rtx temp
= gen_reg_rtx (GET_MODE (x
));
434 emit_insn_before (gen_move_insn (temp
, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
449 if (tem
!= XEXP (x
, 0))
455 else if (code
== PLUS
|| code
== MULT
)
457 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
458 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
459 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x
) == 0)
472 return copy_to_reg (QUEUED_VAR (x
));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x
) != 0)
476 return QUEUED_COPY (x
);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
482 return QUEUED_COPY (x
);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code
= GET_CODE (x
);
500 return queued_subexp_p (XEXP (x
, 0));
504 return (queued_subexp_p (XEXP (x
, 0))
505 || queued_subexp_p (XEXP (x
, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p
= pending_chain
))
519 rtx body
= QUEUED_BODY (p
);
521 switch (GET_CODE (body
))
529 QUEUED_INSN (p
) = body
;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p
) = emit_insn (body
);
544 pending_chain
= QUEUED_NEXT (p
);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to
, from
, unsignedp
)
558 enum machine_mode to_mode
= GET_MODE (to
);
559 enum machine_mode from_mode
= GET_MODE (from
);
560 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
561 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
567 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
569 to
= protect_from_queue (to
, 1);
570 from
= protect_from_queue (from
, 0);
572 if (to_real
!= from_real
)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
581 >= GET_MODE_SIZE (to_mode
))
582 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
583 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
585 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
588 if (to_mode
== from_mode
589 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
591 emit_move_insn (to
, from
);
595 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
597 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
600 if (VECTOR_MODE_P (to_mode
))
601 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
603 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
605 emit_move_insn (to
, from
);
609 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
611 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
612 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
616 if (to_real
!= from_real
)
623 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
625 /* Try converting directly if the insn is supported. */
626 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
629 emit_unop_insn (code
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_trunchfqf2
635 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
637 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_trunctqfqf2
642 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
644 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
648 #ifdef HAVE_truncsfqf2
649 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
651 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
655 #ifdef HAVE_truncdfqf2
656 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
658 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
662 #ifdef HAVE_truncxfqf2
663 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
665 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
669 #ifdef HAVE_trunctfqf2
670 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
672 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_trunctqfhf2
678 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
680 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
684 #ifdef HAVE_truncsfhf2
685 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
687 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
691 #ifdef HAVE_truncdfhf2
692 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
694 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
698 #ifdef HAVE_truncxfhf2
699 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
701 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
705 #ifdef HAVE_trunctfhf2
706 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
708 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
713 #ifdef HAVE_truncsftqf2
714 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
716 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
720 #ifdef HAVE_truncdftqf2
721 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
723 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
727 #ifdef HAVE_truncxftqf2
728 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
730 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
734 #ifdef HAVE_trunctftqf2
735 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
737 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
742 #ifdef HAVE_truncdfsf2
743 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
745 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
749 #ifdef HAVE_truncxfsf2
750 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
752 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
756 #ifdef HAVE_trunctfsf2
757 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
759 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
763 #ifdef HAVE_truncxfdf2
764 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
766 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
770 #ifdef HAVE_trunctfdf2
771 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
773 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
785 libcall
= extendsfdf2_libfunc
;
789 libcall
= extendsfxf2_libfunc
;
793 libcall
= extendsftf2_libfunc
;
805 libcall
= truncdfsf2_libfunc
;
809 libcall
= extenddfxf2_libfunc
;
813 libcall
= extenddftf2_libfunc
;
825 libcall
= truncxfsf2_libfunc
;
829 libcall
= truncxfdf2_libfunc
;
841 libcall
= trunctfsf2_libfunc
;
845 libcall
= trunctfdf2_libfunc
;
857 if (libcall
== (rtx
) 0)
858 /* This conversion is not implemented yet. */
862 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
864 insns
= get_insns ();
866 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
871 /* Now both modes are integers. */
873 /* Handle expanding beyond a word. */
874 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
875 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
882 enum machine_mode lowpart_mode
;
883 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
885 /* Try converting directly if the insn is supported. */
886 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
889 /* If FROM is a SUBREG, put it into a register. Do this
890 so that we always generate the same set of insns for
891 better cse'ing; if an intermediate assignment occurred,
892 we won't be doing the operation directly on the SUBREG. */
893 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
894 from
= force_reg (from_mode
, from
);
895 emit_unop_insn (code
, to
, from
, equiv_code
);
898 /* Next, try converting via full word. */
899 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
900 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
901 != CODE_FOR_nothing
))
903 if (GET_CODE (to
) == REG
)
904 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
905 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
906 emit_unop_insn (code
, to
,
907 gen_lowpart (word_mode
, to
), equiv_code
);
911 /* No special multiword conversion insn; do it by hand. */
914 /* Since we will turn this into a no conflict block, we must ensure
915 that the source does not overlap the target. */
917 if (reg_overlap_mentioned_p (to
, from
))
918 from
= force_reg (from_mode
, from
);
920 /* Get a copy of FROM widened to a word, if necessary. */
921 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
922 lowpart_mode
= word_mode
;
924 lowpart_mode
= from_mode
;
926 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
928 lowpart
= gen_lowpart (lowpart_mode
, to
);
929 emit_move_insn (lowpart
, lowfrom
);
931 /* Compute the value to put in each remaining word. */
933 fill_value
= const0_rtx
;
938 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
939 && STORE_FLAG_VALUE
== -1)
941 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
943 fill_value
= gen_reg_rtx (word_mode
);
944 emit_insn (gen_slt (fill_value
));
950 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
951 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
953 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
957 /* Fill the remaining words. */
958 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
960 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
961 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
966 if (fill_value
!= subword
)
967 emit_move_insn (subword
, fill_value
);
970 insns
= get_insns ();
973 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
974 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
978 /* Truncating multi-word to a word or less. */
979 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
980 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
982 if (!((GET_CODE (from
) == MEM
983 && ! MEM_VOLATILE_P (from
)
984 && direct_load
[(int) to_mode
]
985 && ! mode_dependent_address_p (XEXP (from
, 0)))
986 || GET_CODE (from
) == REG
987 || GET_CODE (from
) == SUBREG
))
988 from
= force_reg (from_mode
, from
);
989 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
993 /* Handle pointer conversion. */ /* SPEE 900220. */
994 if (to_mode
== PQImode
)
996 if (from_mode
!= QImode
)
997 from
= convert_to_mode (QImode
, from
, unsignedp
);
999 #ifdef HAVE_truncqipqi2
1000 if (HAVE_truncqipqi2
)
1002 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
1005 #endif /* HAVE_truncqipqi2 */
1009 if (from_mode
== PQImode
)
1011 if (to_mode
!= QImode
)
1013 from
= convert_to_mode (QImode
, from
, unsignedp
);
1018 #ifdef HAVE_extendpqiqi2
1019 if (HAVE_extendpqiqi2
)
1021 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
1024 #endif /* HAVE_extendpqiqi2 */
1029 if (to_mode
== PSImode
)
1031 if (from_mode
!= SImode
)
1032 from
= convert_to_mode (SImode
, from
, unsignedp
);
1034 #ifdef HAVE_truncsipsi2
1035 if (HAVE_truncsipsi2
)
1037 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1040 #endif /* HAVE_truncsipsi2 */
1044 if (from_mode
== PSImode
)
1046 if (to_mode
!= SImode
)
1048 from
= convert_to_mode (SImode
, from
, unsignedp
);
1053 #ifdef HAVE_extendpsisi2
1054 if (! unsignedp
&& HAVE_extendpsisi2
)
1056 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1059 #endif /* HAVE_extendpsisi2 */
1060 #ifdef HAVE_zero_extendpsisi2
1061 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1063 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1066 #endif /* HAVE_zero_extendpsisi2 */
1071 if (to_mode
== PDImode
)
1073 if (from_mode
!= DImode
)
1074 from
= convert_to_mode (DImode
, from
, unsignedp
);
1076 #ifdef HAVE_truncdipdi2
1077 if (HAVE_truncdipdi2
)
1079 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1082 #endif /* HAVE_truncdipdi2 */
1086 if (from_mode
== PDImode
)
1088 if (to_mode
!= DImode
)
1090 from
= convert_to_mode (DImode
, from
, unsignedp
);
1095 #ifdef HAVE_extendpdidi2
1096 if (HAVE_extendpdidi2
)
1098 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1101 #endif /* HAVE_extendpdidi2 */
1106 /* Now follow all the conversions between integers
1107 no more than a word long. */
1109 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1110 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1111 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1112 GET_MODE_BITSIZE (from_mode
)))
1114 if (!((GET_CODE (from
) == MEM
1115 && ! MEM_VOLATILE_P (from
)
1116 && direct_load
[(int) to_mode
]
1117 && ! mode_dependent_address_p (XEXP (from
, 0)))
1118 || GET_CODE (from
) == REG
1119 || GET_CODE (from
) == SUBREG
))
1120 from
= force_reg (from_mode
, from
);
1121 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1122 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1123 from
= copy_to_reg (from
);
1124 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1128 /* Handle extension. */
1129 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1131 /* Convert directly if that works. */
1132 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1133 != CODE_FOR_nothing
)
1136 from
= force_not_mem (from
);
1138 emit_unop_insn (code
, to
, from
, equiv_code
);
1143 enum machine_mode intermediate
;
1147 /* Search for a mode to convert via. */
1148 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1149 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1150 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1151 != CODE_FOR_nothing
)
1152 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1153 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1154 GET_MODE_BITSIZE (intermediate
))))
1155 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1156 != CODE_FOR_nothing
))
1158 convert_move (to
, convert_to_mode (intermediate
, from
,
1159 unsignedp
), unsignedp
);
1163 /* No suitable intermediate mode.
1164 Generate what we need with shifts. */
1165 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1166 - GET_MODE_BITSIZE (from_mode
), 0);
1167 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1168 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1170 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1173 emit_move_insn (to
, tmp
);
1178 /* Support special truncate insns for certain modes. */
1180 if (from_mode
== DImode
&& to_mode
== SImode
)
1182 #ifdef HAVE_truncdisi2
1183 if (HAVE_truncdisi2
)
1185 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1189 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1193 if (from_mode
== DImode
&& to_mode
== HImode
)
1195 #ifdef HAVE_truncdihi2
1196 if (HAVE_truncdihi2
)
1198 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1202 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1206 if (from_mode
== DImode
&& to_mode
== QImode
)
1208 #ifdef HAVE_truncdiqi2
1209 if (HAVE_truncdiqi2
)
1211 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1215 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1219 if (from_mode
== SImode
&& to_mode
== HImode
)
1221 #ifdef HAVE_truncsihi2
1222 if (HAVE_truncsihi2
)
1224 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1228 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1232 if (from_mode
== SImode
&& to_mode
== QImode
)
1234 #ifdef HAVE_truncsiqi2
1235 if (HAVE_truncsiqi2
)
1237 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1241 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1245 if (from_mode
== HImode
&& to_mode
== QImode
)
1247 #ifdef HAVE_trunchiqi2
1248 if (HAVE_trunchiqi2
)
1250 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1254 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1258 if (from_mode
== TImode
&& to_mode
== DImode
)
1260 #ifdef HAVE_trunctidi2
1261 if (HAVE_trunctidi2
)
1263 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1267 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1271 if (from_mode
== TImode
&& to_mode
== SImode
)
1273 #ifdef HAVE_trunctisi2
1274 if (HAVE_trunctisi2
)
1276 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1280 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1284 if (from_mode
== TImode
&& to_mode
== HImode
)
1286 #ifdef HAVE_trunctihi2
1287 if (HAVE_trunctihi2
)
1289 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1293 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1297 if (from_mode
== TImode
&& to_mode
== QImode
)
1299 #ifdef HAVE_trunctiqi2
1300 if (HAVE_trunctiqi2
)
1302 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1306 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1310 /* Handle truncation of volatile memrefs, and so on;
1311 the things that couldn't be truncated directly,
1312 and for which there was no special instruction. */
1313 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1315 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1316 emit_move_insn (to
, temp
);
1320 /* Mode combination is not recognized. */
1324 /* Return an rtx for a value that would result
1325 from converting X to mode MODE.
1326 Both X and MODE may be floating, or both integer.
1327 UNSIGNEDP is nonzero if X is an unsigned value.
1328 This can be done by referring to a part of X in place
1329 or by copying to a new temporary with conversion.
1331 This function *must not* call protect_from_queue
1332 except when putting X into an insn (in which case convert_move does it). */
1335 convert_to_mode (mode
, x
, unsignedp
)
1336 enum machine_mode mode
;
1340 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1343 /* Return an rtx for a value that would result
1344 from converting X from mode OLDMODE to mode MODE.
1345 Both modes may be floating, or both integer.
1346 UNSIGNEDP is nonzero if X is an unsigned value.
1348 This can be done by referring to a part of X in place
1349 or by copying to a new temporary with conversion.
1351 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1353 This function *must not* call protect_from_queue
1354 except when putting X into an insn (in which case convert_move does it). */
1357 convert_modes (mode
, oldmode
, x
, unsignedp
)
1358 enum machine_mode mode
, oldmode
;
1364 /* If FROM is a SUBREG that indicates that we have already done at least
1365 the required extension, strip it. */
1367 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1368 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1369 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1370 x
= gen_lowpart (mode
, x
);
1372 if (GET_MODE (x
) != VOIDmode
)
1373 oldmode
= GET_MODE (x
);
1375 if (mode
== oldmode
)
1378 /* There is one case that we must handle specially: If we are converting
1379 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1380 we are to interpret the constant as unsigned, gen_lowpart will do
1381 the wrong if the constant appears negative. What we want to do is
1382 make the high-order word of the constant zero, not all ones. */
1384 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1385 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1386 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1388 HOST_WIDE_INT val
= INTVAL (x
);
1390 if (oldmode
!= VOIDmode
1391 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1393 int width
= GET_MODE_BITSIZE (oldmode
);
1395 /* We need to zero extend VAL. */
1396 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1399 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1402 /* We can do this with a gen_lowpart if both desired and current modes
1403 are integer, and this is either a constant integer, a register, or a
1404 non-volatile MEM. Except for the constant case where MODE is no
1405 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1407 if ((GET_CODE (x
) == CONST_INT
1408 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1409 || (GET_MODE_CLASS (mode
) == MODE_INT
1410 && GET_MODE_CLASS (oldmode
) == MODE_INT
1411 && (GET_CODE (x
) == CONST_DOUBLE
1412 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1413 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1414 && direct_load
[(int) mode
])
1415 || (GET_CODE (x
) == REG
1416 && (! HARD_REGISTER_P (x
)
1417 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
1418 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1419 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1421 /* ?? If we don't know OLDMODE, we have to assume here that
1422 X does not need sign- or zero-extension. This may not be
1423 the case, but it's the best we can do. */
1424 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1425 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1427 HOST_WIDE_INT val
= INTVAL (x
);
1428 int width
= GET_MODE_BITSIZE (oldmode
);
1430 /* We must sign or zero-extend in this case. Start by
1431 zero-extending, then sign extend if we need to. */
1432 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1434 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1435 val
|= (HOST_WIDE_INT
) (-1) << width
;
1437 return gen_int_mode (val
, mode
);
1440 return gen_lowpart (mode
, x
);
1443 temp
= gen_reg_rtx (mode
);
1444 convert_move (temp
, x
, unsignedp
);
1448 /* This macro is used to determine what the largest unit size that
1449 move_by_pieces can use is. */
1451 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1452 move efficiently, as opposed to MOVE_MAX which is the maximum
1453 number of bytes we can move with a single instruction. */
1455 #ifndef MOVE_MAX_PIECES
1456 #define MOVE_MAX_PIECES MOVE_MAX
1459 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1460 store efficiently. Due to internal GCC limitations, this is
1461 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1462 for an immediate constant. */
1464 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1466 /* Determine whether the LEN bytes can be moved by using several move
1467 instructions. Return nonzero if a call to move_by_pieces should
1471 can_move_by_pieces (len
, align
)
1472 unsigned HOST_WIDE_INT len
;
1473 unsigned int align ATTRIBUTE_UNUSED
;
1475 return MOVE_BY_PIECES_P (len
, align
);
1478 /* Generate several move instructions to copy LEN bytes from block FROM to
1479 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1480 and TO through protect_from_queue before calling.
1482 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1483 used to push FROM to the stack.
1485 ALIGN is maximum stack alignment we can assume.
1487 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1488 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1492 move_by_pieces (to
, from
, len
, align
, endp
)
1494 unsigned HOST_WIDE_INT len
;
1498 struct move_by_pieces data
;
1499 rtx to_addr
, from_addr
= XEXP (from
, 0);
1500 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1501 enum machine_mode mode
= VOIDmode
, tmode
;
1502 enum insn_code icode
;
1504 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1507 data
.from_addr
= from_addr
;
1510 to_addr
= XEXP (to
, 0);
1513 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1514 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1516 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1523 #ifdef STACK_GROWS_DOWNWARD
1529 data
.to_addr
= to_addr
;
1532 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1533 || GET_CODE (from_addr
) == POST_INC
1534 || GET_CODE (from_addr
) == POST_DEC
);
1536 data
.explicit_inc_from
= 0;
1537 data
.explicit_inc_to
= 0;
1538 if (data
.reverse
) data
.offset
= len
;
1541 /* If copying requires more than two move insns,
1542 copy addresses to registers (to make displacements shorter)
1543 and use post-increment if available. */
1544 if (!(data
.autinc_from
&& data
.autinc_to
)
1545 && move_by_pieces_ninsns (len
, align
) > 2)
1547 /* Find the mode of the largest move... */
1548 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1549 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1550 if (GET_MODE_SIZE (tmode
) < max_size
)
1553 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1555 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1556 data
.autinc_from
= 1;
1557 data
.explicit_inc_from
= -1;
1559 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1561 data
.from_addr
= copy_addr_to_reg (from_addr
);
1562 data
.autinc_from
= 1;
1563 data
.explicit_inc_from
= 1;
1565 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1566 data
.from_addr
= copy_addr_to_reg (from_addr
);
1567 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1569 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1571 data
.explicit_inc_to
= -1;
1573 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1575 data
.to_addr
= copy_addr_to_reg (to_addr
);
1577 data
.explicit_inc_to
= 1;
1579 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1580 data
.to_addr
= copy_addr_to_reg (to_addr
);
1583 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1584 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1585 align
= MOVE_MAX
* BITS_PER_UNIT
;
1587 /* First move what we can in the largest integer mode, then go to
1588 successively smaller modes. */
1590 while (max_size
> 1)
1592 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1593 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1594 if (GET_MODE_SIZE (tmode
) < max_size
)
1597 if (mode
== VOIDmode
)
1600 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1601 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1602 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1604 max_size
= GET_MODE_SIZE (mode
);
1607 /* The code above should have handled everything. */
1621 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1622 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1624 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1627 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1634 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1642 /* Return number of insns required to move L bytes by pieces.
1643 ALIGN (in bits) is maximum alignment we can assume. */
1645 static unsigned HOST_WIDE_INT
1646 move_by_pieces_ninsns (l
, align
)
1647 unsigned HOST_WIDE_INT l
;
1650 unsigned HOST_WIDE_INT n_insns
= 0;
1651 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1653 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1654 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1655 align
= MOVE_MAX
* BITS_PER_UNIT
;
1657 while (max_size
> 1)
1659 enum machine_mode mode
= VOIDmode
, tmode
;
1660 enum insn_code icode
;
1662 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1663 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1664 if (GET_MODE_SIZE (tmode
) < max_size
)
1667 if (mode
== VOIDmode
)
1670 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1671 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1672 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1674 max_size
= GET_MODE_SIZE (mode
);
1682 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1683 with move instructions for mode MODE. GENFUN is the gen_... function
1684 to make a move insn for that mode. DATA has all the other info. */
1687 move_by_pieces_1 (genfun
, mode
, data
)
1688 rtx (*genfun
) PARAMS ((rtx
, ...));
1689 enum machine_mode mode
;
1690 struct move_by_pieces
*data
;
1692 unsigned int size
= GET_MODE_SIZE (mode
);
1693 rtx to1
= NULL_RTX
, from1
;
1695 while (data
->len
>= size
)
1698 data
->offset
-= size
;
1702 if (data
->autinc_to
)
1703 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1706 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1709 if (data
->autinc_from
)
1710 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1713 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1715 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1716 emit_insn (gen_add2_insn (data
->to_addr
,
1717 GEN_INT (-(HOST_WIDE_INT
)size
)));
1718 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1719 emit_insn (gen_add2_insn (data
->from_addr
,
1720 GEN_INT (-(HOST_WIDE_INT
)size
)));
1723 emit_insn ((*genfun
) (to1
, from1
));
1726 #ifdef PUSH_ROUNDING
1727 emit_single_push_insn (mode
, from1
, NULL
);
1733 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1734 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1735 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1736 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1738 if (! data
->reverse
)
1739 data
->offset
+= size
;
1745 /* Emit code to move a block Y to a block X. This may be done with
1746 string-move instructions, with multiple scalar move instructions,
1747 or with a library call.
1749 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1750 SIZE is an rtx that says how long they are.
1751 ALIGN is the maximum alignment we can assume they have.
1752 METHOD describes what kind of copy this is, and what mechanisms may be used.
1754 Return the address of the new block, if memcpy is called and returns it,
1758 emit_block_move (x
, y
, size
, method
)
1760 enum block_op_methods method
;
1768 case BLOCK_OP_NORMAL
:
1769 may_use_call
= true;
1772 case BLOCK_OP_CALL_PARM
:
1773 may_use_call
= block_move_libcall_safe_for_call_parm ();
1775 /* Make inhibit_defer_pop nonzero around the library call
1776 to force it to pop the arguments right away. */
1780 case BLOCK_OP_NO_LIBCALL
:
1781 may_use_call
= false;
1788 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1790 if (GET_MODE (x
) != BLKmode
)
1792 if (GET_MODE (y
) != BLKmode
)
1795 x
= protect_from_queue (x
, 1);
1796 y
= protect_from_queue (y
, 0);
1797 size
= protect_from_queue (size
, 0);
1799 if (GET_CODE (x
) != MEM
)
1801 if (GET_CODE (y
) != MEM
)
1806 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1807 can be incorrect is coming from __builtin_memcpy. */
1808 if (GET_CODE (size
) == CONST_INT
)
1810 x
= shallow_copy_rtx (x
);
1811 y
= shallow_copy_rtx (y
);
1812 set_mem_size (x
, size
);
1813 set_mem_size (y
, size
);
1816 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1817 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1818 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1820 else if (may_use_call
)
1821 retval
= emit_block_move_via_libcall (x
, y
, size
);
1823 emit_block_move_via_loop (x
, y
, size
, align
);
1825 if (method
== BLOCK_OP_CALL_PARM
)
1831 /* A subroutine of emit_block_move. Returns true if calling the
1832 block move libcall will not clobber any parameters which may have
1833 already been placed on the stack. */
1836 block_move_libcall_safe_for_call_parm ()
1842 /* Check to see whether memcpy takes all register arguments. */
1844 takes_regs_uninit
, takes_regs_no
, takes_regs_yes
1845 } takes_regs
= takes_regs_uninit
;
1849 case takes_regs_uninit
:
1851 CUMULATIVE_ARGS args_so_far
;
1854 fn
= emit_block_move_libcall_fn (false);
1855 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1857 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1858 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1860 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1861 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1862 if (!tmp
|| !REG_P (tmp
))
1863 goto fail_takes_regs
;
1864 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1865 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1867 goto fail_takes_regs
;
1869 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1872 takes_regs
= takes_regs_yes
;
1875 case takes_regs_yes
:
1879 takes_regs
= takes_regs_no
;
1890 /* A subroutine of emit_block_move. Expand a movstr pattern;
1891 return true if successful. */
1894 emit_block_move_via_movstr (x
, y
, size
, align
)
1898 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1899 enum machine_mode mode
;
1901 /* Since this is a move insn, we don't care about volatility. */
1904 /* Try the most limited insn first, because there's no point
1905 including more than one in the machine description unless
1906 the more limited one has some advantage. */
1908 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1909 mode
= GET_MODE_WIDER_MODE (mode
))
1911 enum insn_code code
= movstr_optab
[(int) mode
];
1912 insn_operand_predicate_fn pred
;
1914 if (code
!= CODE_FOR_nothing
1915 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1916 here because if SIZE is less than the mode mask, as it is
1917 returned by the macro, it will definitely be less than the
1918 actual mode mask. */
1919 && ((GET_CODE (size
) == CONST_INT
1920 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1921 <= (GET_MODE_MASK (mode
) >> 1)))
1922 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1923 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1924 || (*pred
) (x
, BLKmode
))
1925 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1926 || (*pred
) (y
, BLKmode
))
1927 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1928 || (*pred
) (opalign
, VOIDmode
)))
1931 rtx last
= get_last_insn ();
1934 op2
= convert_to_mode (mode
, size
, 1);
1935 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1936 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1937 op2
= copy_to_mode_reg (mode
, op2
);
1939 /* ??? When called via emit_block_move_for_call, it'd be
1940 nice if there were some way to inform the backend, so
1941 that it doesn't fail the expansion because it thinks
1942 emitting the libcall would be more efficient. */
1944 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1952 delete_insns_since (last
);
1960 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1961 Return the return value from memcpy, 0 otherwise. */
1964 emit_block_move_via_libcall (dst
, src
, size
)
1967 rtx dst_addr
, src_addr
;
1968 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1969 enum machine_mode size_mode
;
1972 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1974 It is unsafe to save the value generated by protect_from_queue and reuse
1975 it later. Consider what happens if emit_queue is called before the
1976 return value from protect_from_queue is used.
1978 Expansion of the CALL_EXPR below will call emit_queue before we are
1979 finished emitting RTL for argument setup. So if we are not careful we
1980 could get the wrong value for an argument.
1982 To avoid this problem we go ahead and emit code to copy the addresses of
1983 DST and SRC and SIZE into new pseudos. We can then place those new
1984 pseudos into an RTL_EXPR and use them later, even after a call to
1987 Note this is not strictly needed for library calls since they do not call
1988 emit_queue before loading their arguments. However, we may need to have
1989 library calls call emit_queue in the future since failing to do so could
1990 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1991 arguments in registers. */
1993 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1994 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1996 #ifdef POINTERS_EXTEND_UNSIGNED
1997 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1998 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
2001 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
2002 src_tree
= make_tree (ptr_type_node
, src_addr
);
2004 if (TARGET_MEM_FUNCTIONS
)
2005 size_mode
= TYPE_MODE (sizetype
);
2007 size_mode
= TYPE_MODE (unsigned_type_node
);
2009 size
= convert_to_mode (size_mode
, size
, 1);
2010 size
= copy_to_mode_reg (size_mode
, size
);
2012 /* It is incorrect to use the libcall calling conventions to call
2013 memcpy in this context. This could be a user call to memcpy and
2014 the user may wish to examine the return value from memcpy. For
2015 targets where libcalls and normal calls have different conventions
2016 for returning pointers, we could end up generating incorrect code.
2018 For convenience, we generate the call to bcopy this way as well. */
2020 if (TARGET_MEM_FUNCTIONS
)
2021 size_tree
= make_tree (sizetype
, size
);
2023 size_tree
= make_tree (unsigned_type_node
, size
);
2025 fn
= emit_block_move_libcall_fn (true);
2026 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2027 if (TARGET_MEM_FUNCTIONS
)
2029 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
2030 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
2034 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
2035 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
2038 /* Now we have to build up the CALL_EXPR itself. */
2039 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2040 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2041 call_expr
, arg_list
, NULL_TREE
);
2042 TREE_SIDE_EFFECTS (call_expr
) = 1;
2044 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2046 /* If we are initializing a readonly value, show the above call clobbered
2047 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2048 the delay slot scheduler might overlook conflicts and take nasty
2050 if (RTX_UNCHANGING_P (dst
))
2051 add_function_usage_to
2052 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
2053 gen_rtx_CLOBBER (VOIDmode
, dst
),
2056 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
2059 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2060 for the function we use for block copies. The first time FOR_CALL
2061 is true, we call assemble_external. */
2063 static GTY(()) tree block_move_fn
;
2066 init_block_move_fn (asmspec
)
2067 const char *asmspec
;
2073 if (TARGET_MEM_FUNCTIONS
)
2075 fn
= get_identifier ("memcpy");
2076 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2077 const_ptr_type_node
, sizetype
,
2082 fn
= get_identifier ("bcopy");
2083 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
2084 ptr_type_node
, unsigned_type_node
,
2088 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2089 DECL_EXTERNAL (fn
) = 1;
2090 TREE_PUBLIC (fn
) = 1;
2091 DECL_ARTIFICIAL (fn
) = 1;
2092 TREE_NOTHROW (fn
) = 1;
2099 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
2100 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
2105 emit_block_move_libcall_fn (for_call
)
2108 static bool emitted_extern
;
2111 init_block_move_fn (NULL
);
2113 if (for_call
&& !emitted_extern
)
2115 emitted_extern
= true;
2116 make_decl_rtl (block_move_fn
, NULL
);
2117 assemble_external (block_move_fn
);
2120 return block_move_fn
;
2123 /* A subroutine of emit_block_move. Copy the data via an explicit
2124 loop. This is used only when libcalls are forbidden. */
2125 /* ??? It'd be nice to copy in hunks larger than QImode. */
2128 emit_block_move_via_loop (x
, y
, size
, align
)
2130 unsigned int align ATTRIBUTE_UNUSED
;
2132 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
2133 enum machine_mode iter_mode
;
2135 iter_mode
= GET_MODE (size
);
2136 if (iter_mode
== VOIDmode
)
2137 iter_mode
= word_mode
;
2139 top_label
= gen_label_rtx ();
2140 cmp_label
= gen_label_rtx ();
2141 iter
= gen_reg_rtx (iter_mode
);
2143 emit_move_insn (iter
, const0_rtx
);
2145 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
2146 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
2147 do_pending_stack_adjust ();
2149 emit_note (NOTE_INSN_LOOP_BEG
);
2151 emit_jump (cmp_label
);
2152 emit_label (top_label
);
2154 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
2155 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
2156 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
2157 x
= change_address (x
, QImode
, x_addr
);
2158 y
= change_address (y
, QImode
, y_addr
);
2160 emit_move_insn (x
, y
);
2162 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
2163 true, OPTAB_LIB_WIDEN
);
2165 emit_move_insn (iter
, tmp
);
2167 emit_note (NOTE_INSN_LOOP_CONT
);
2168 emit_label (cmp_label
);
2170 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
2173 emit_note (NOTE_INSN_LOOP_END
);
2176 /* Copy all or part of a value X into registers starting at REGNO.
2177 The number of registers to be filled is NREGS. */
2180 move_block_to_reg (regno
, x
, nregs
, mode
)
2184 enum machine_mode mode
;
2187 #ifdef HAVE_load_multiple
2195 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2196 x
= validize_mem (force_const_mem (mode
, x
));
2198 /* See if the machine can do this with a load multiple insn. */
2199 #ifdef HAVE_load_multiple
2200 if (HAVE_load_multiple
)
2202 last
= get_last_insn ();
2203 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
2211 delete_insns_since (last
);
2215 for (i
= 0; i
< nregs
; i
++)
2216 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2217 operand_subword_force (x
, i
, mode
));
2220 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2221 The number of registers to be filled is NREGS. */
2224 move_block_from_reg (regno
, x
, nregs
)
2234 /* See if the machine can do this with a store multiple insn. */
2235 #ifdef HAVE_store_multiple
2236 if (HAVE_store_multiple
)
2238 rtx last
= get_last_insn ();
2239 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
2247 delete_insns_since (last
);
2251 for (i
= 0; i
< nregs
; i
++)
2253 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2258 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2262 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2263 ORIG, where ORIG is a non-consecutive group of registers represented by
2264 a PARALLEL. The clone is identical to the original except in that the
2265 original set of registers is replaced by a new set of pseudo registers.
2266 The new set has the same modes as the original set. */
2269 gen_group_rtx (orig
)
2275 if (GET_CODE (orig
) != PARALLEL
)
2278 length
= XVECLEN (orig
, 0);
2279 tmps
= (rtx
*) alloca (sizeof (rtx
) * length
);
2281 /* Skip a NULL entry in first slot. */
2282 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2287 for (; i
< length
; i
++)
2289 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2290 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2292 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2295 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2298 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2299 registers represented by a PARALLEL. SSIZE represents the total size of
2300 block SRC in bytes, or -1 if not known. */
2301 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2302 the balance will be in what would be the low-order memory addresses, i.e.
2303 left justified for big endian, right justified for little endian. This
2304 happens to be true for the targets currently using this support. If this
2305 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2309 emit_group_load (dst
, orig_src
, ssize
)
2316 if (GET_CODE (dst
) != PARALLEL
)
2319 /* Check for a NULL entry, used to indicate that the parameter goes
2320 both on the stack and in registers. */
2321 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2326 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
2328 /* Process the pieces. */
2329 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2331 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2332 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2333 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2336 /* Handle trailing fragments that run over the size of the struct. */
2337 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2339 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2340 bytelen
= ssize
- bytepos
;
2345 /* If we won't be loading directly from memory, protect the real source
2346 from strange tricks we might play; but make sure that the source can
2347 be loaded directly into the destination. */
2349 if (GET_CODE (orig_src
) != MEM
2350 && (!CONSTANT_P (orig_src
)
2351 || (GET_MODE (orig_src
) != mode
2352 && GET_MODE (orig_src
) != VOIDmode
)))
2354 if (GET_MODE (orig_src
) == VOIDmode
)
2355 src
= gen_reg_rtx (mode
);
2357 src
= gen_reg_rtx (GET_MODE (orig_src
));
2359 emit_move_insn (src
, orig_src
);
2362 /* Optimize the access just a bit. */
2363 if (GET_CODE (src
) == MEM
2364 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2365 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2366 && bytelen
== GET_MODE_SIZE (mode
))
2368 tmps
[i
] = gen_reg_rtx (mode
);
2369 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2371 else if (GET_CODE (src
) == CONCAT
)
2373 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2374 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2376 if ((bytepos
== 0 && bytelen
== slen0
)
2377 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2379 /* The following assumes that the concatenated objects all
2380 have the same size. In this case, a simple calculation
2381 can be used to determine the object and the bit field
2383 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2384 if (! CONSTANT_P (tmps
[i
])
2385 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
2386 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2387 (bytepos
% slen0
) * BITS_PER_UNIT
,
2388 1, NULL_RTX
, mode
, mode
, ssize
);
2390 else if (bytepos
== 0)
2392 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2393 emit_move_insn (mem
, src
);
2394 tmps
[i
] = adjust_address (mem
, mode
, 0);
2399 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2400 SIMD register, which is currently broken. While we get GCC
2401 to emit proper RTL for these cases, let's dump to memory. */
2402 else if (VECTOR_MODE_P (GET_MODE (dst
))
2403 && GET_CODE (src
) == REG
)
2405 int slen
= GET_MODE_SIZE (GET_MODE (src
));
2408 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
2409 emit_move_insn (mem
, src
);
2410 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
2412 else if (CONSTANT_P (src
)
2413 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2416 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2417 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2420 if (BYTES_BIG_ENDIAN
&& shift
)
2421 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2422 tmps
[i
], 0, OPTAB_WIDEN
);
2427 /* Copy the extracted pieces into the proper (probable) hard regs. */
2428 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2429 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2432 /* Emit code to move a block SRC to block DST, where SRC and DST are
2433 non-consecutive groups of registers, each represented by a PARALLEL. */
2436 emit_group_move (dst
, src
)
2441 if (GET_CODE (src
) != PARALLEL
2442 || GET_CODE (dst
) != PARALLEL
2443 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
2446 /* Skip first entry if NULL. */
2447 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2448 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2449 XEXP (XVECEXP (src
, 0, i
), 0));
2452 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2453 registers represented by a PARALLEL. SSIZE represents the total size of
2454 block DST, or -1 if not known. */
2457 emit_group_store (orig_dst
, src
, ssize
)
2464 if (GET_CODE (src
) != PARALLEL
)
2467 /* Check for a NULL entry, used to indicate that the parameter goes
2468 both on the stack and in registers. */
2469 if (XEXP (XVECEXP (src
, 0, 0), 0))
2474 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2476 /* Copy the (probable) hard regs into pseudos. */
2477 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2479 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2480 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2481 emit_move_insn (tmps
[i
], reg
);
2485 /* If we won't be storing directly into memory, protect the real destination
2486 from strange tricks we might play. */
2488 if (GET_CODE (dst
) == PARALLEL
)
2492 /* We can get a PARALLEL dst if there is a conditional expression in
2493 a return statement. In that case, the dst and src are the same,
2494 so no action is necessary. */
2495 if (rtx_equal_p (dst
, src
))
2498 /* It is unclear if we can ever reach here, but we may as well handle
2499 it. Allocate a temporary, and split this into a store/load to/from
2502 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2503 emit_group_store (temp
, src
, ssize
);
2504 emit_group_load (dst
, temp
, ssize
);
2507 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2509 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2510 /* Make life a bit easier for combine. */
2511 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2514 /* Process the pieces. */
2515 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2517 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2518 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2519 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2522 /* Handle trailing fragments that run over the size of the struct. */
2523 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2525 if (BYTES_BIG_ENDIAN
)
2527 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2528 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2529 tmps
[i
], 0, OPTAB_WIDEN
);
2531 bytelen
= ssize
- bytepos
;
2534 if (GET_CODE (dst
) == CONCAT
)
2536 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2537 dest
= XEXP (dst
, 0);
2538 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2540 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2541 dest
= XEXP (dst
, 1);
2543 else if (bytepos
== 0 && XVECLEN (src
, 0))
2545 dest
= assign_stack_temp (GET_MODE (dest
),
2546 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2547 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2556 /* Optimize the access just a bit. */
2557 if (GET_CODE (dest
) == MEM
2558 && MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
)
2559 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2560 && bytelen
== GET_MODE_SIZE (mode
))
2561 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2563 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2564 mode
, tmps
[i
], ssize
);
2569 /* Copy from the pseudo into the (probable) hard reg. */
2570 if (orig_dst
!= dst
)
2571 emit_move_insn (orig_dst
, dst
);
2574 /* Generate code to copy a BLKmode object of TYPE out of a
2575 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2576 is null, a stack temporary is created. TGTBLK is returned.
2578 The primary purpose of this routine is to handle functions
2579 that return BLKmode structures in registers. Some machines
2580 (the PA for example) want to return all small structures
2581 in registers regardless of the structure's alignment. */
2584 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2589 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2590 rtx src
= NULL
, dst
= NULL
;
2591 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2592 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2596 tgtblk
= assign_temp (build_qualified_type (type
,
2598 | TYPE_QUAL_CONST
)),
2600 preserve_temp_slots (tgtblk
);
2603 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2604 into a new pseudo which is a full word. */
2606 if (GET_MODE (srcreg
) != BLKmode
2607 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2608 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2610 /* Structures whose size is not a multiple of a word are aligned
2611 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2612 machine, this means we must skip the empty high order bytes when
2613 calculating the bit offset. */
2614 if (BYTES_BIG_ENDIAN
2615 && bytes
% UNITS_PER_WORD
)
2616 big_endian_correction
2617 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2619 /* Copy the structure BITSIZE bites at a time.
2621 We could probably emit more efficient code for machines which do not use
2622 strict alignment, but it doesn't seem worth the effort at the current
2624 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2625 bitpos
< bytes
* BITS_PER_UNIT
;
2626 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2628 /* We need a new source operand each time xbitpos is on a
2629 word boundary and when xbitpos == big_endian_correction
2630 (the first time through). */
2631 if (xbitpos
% BITS_PER_WORD
== 0
2632 || xbitpos
== big_endian_correction
)
2633 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2636 /* We need a new destination operand each time bitpos is on
2638 if (bitpos
% BITS_PER_WORD
== 0)
2639 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2641 /* Use xbitpos for the source extraction (right justified) and
2642 xbitpos for the destination store (left justified). */
2643 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2644 extract_bit_field (src
, bitsize
,
2645 xbitpos
% BITS_PER_WORD
, 1,
2646 NULL_RTX
, word_mode
, word_mode
,
2654 /* Add a USE expression for REG to the (possibly empty) list pointed
2655 to by CALL_FUSAGE. REG must denote a hard register. */
2658 use_reg (call_fusage
, reg
)
2659 rtx
*call_fusage
, reg
;
2661 if (GET_CODE (reg
) != REG
2662 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2666 = gen_rtx_EXPR_LIST (VOIDmode
,
2667 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2670 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2671 starting at REGNO. All of these registers must be hard registers. */
2674 use_regs (call_fusage
, regno
, nregs
)
2681 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2684 for (i
= 0; i
< nregs
; i
++)
2685 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2688 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2689 PARALLEL REGS. This is for calls that pass values in multiple
2690 non-contiguous locations. The Irix 6 ABI has examples of this. */
2693 use_group_regs (call_fusage
, regs
)
2699 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2701 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2703 /* A NULL entry means the parameter goes both on the stack and in
2704 registers. This can also be a MEM for targets that pass values
2705 partially on the stack and partially in registers. */
2706 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2707 use_reg (call_fusage
, reg
);
2712 /* Determine whether the LEN bytes generated by CONSTFUN can be
2713 stored to memory using several move instructions. CONSTFUNDATA is
2714 a pointer which will be passed as argument in every CONSTFUN call.
2715 ALIGN is maximum alignment we can assume. Return nonzero if a
2716 call to store_by_pieces should succeed. */
2719 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2720 unsigned HOST_WIDE_INT len
;
2721 rtx (*constfun
) PARAMS ((void *, HOST_WIDE_INT
, enum machine_mode
));
2725 unsigned HOST_WIDE_INT max_size
, l
;
2726 HOST_WIDE_INT offset
= 0;
2727 enum machine_mode mode
, tmode
;
2728 enum insn_code icode
;
2732 if (! STORE_BY_PIECES_P (len
, align
))
2735 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2736 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2737 align
= MOVE_MAX
* BITS_PER_UNIT
;
2739 /* We would first store what we can in the largest integer mode, then go to
2740 successively smaller modes. */
2743 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2748 max_size
= STORE_MAX_PIECES
+ 1;
2749 while (max_size
> 1)
2751 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2752 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2753 if (GET_MODE_SIZE (tmode
) < max_size
)
2756 if (mode
== VOIDmode
)
2759 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2760 if (icode
!= CODE_FOR_nothing
2761 && align
>= GET_MODE_ALIGNMENT (mode
))
2763 unsigned int size
= GET_MODE_SIZE (mode
);
2770 cst
= (*constfun
) (constfundata
, offset
, mode
);
2771 if (!LEGITIMATE_CONSTANT_P (cst
))
2781 max_size
= GET_MODE_SIZE (mode
);
2784 /* The code above should have handled everything. */
2792 /* Generate several move instructions to store LEN bytes generated by
2793 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2794 pointer which will be passed as argument in every CONSTFUN call.
2795 ALIGN is maximum alignment we can assume.
2796 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2797 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2801 store_by_pieces (to
, len
, constfun
, constfundata
, align
, endp
)
2803 unsigned HOST_WIDE_INT len
;
2804 rtx (*constfun
) PARAMS ((void *, HOST_WIDE_INT
, enum machine_mode
));
2809 struct store_by_pieces data
;
2811 if (! STORE_BY_PIECES_P (len
, align
))
2813 to
= protect_from_queue (to
, 1);
2814 data
.constfun
= constfun
;
2815 data
.constfundata
= constfundata
;
2818 store_by_pieces_1 (&data
, align
);
2829 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2830 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2832 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2835 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2842 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2850 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2851 rtx with BLKmode). The caller must pass TO through protect_from_queue
2852 before calling. ALIGN is maximum alignment we can assume. */
2855 clear_by_pieces (to
, len
, align
)
2857 unsigned HOST_WIDE_INT len
;
2860 struct store_by_pieces data
;
2862 data
.constfun
= clear_by_pieces_1
;
2863 data
.constfundata
= NULL
;
2866 store_by_pieces_1 (&data
, align
);
2869 /* Callback routine for clear_by_pieces.
2870 Return const0_rtx unconditionally. */
2873 clear_by_pieces_1 (data
, offset
, mode
)
2874 void *data ATTRIBUTE_UNUSED
;
2875 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2876 enum machine_mode mode ATTRIBUTE_UNUSED
;
2881 /* Subroutine of clear_by_pieces and store_by_pieces.
2882 Generate several move instructions to store LEN bytes of block TO. (A MEM
2883 rtx with BLKmode). The caller must pass TO through protect_from_queue
2884 before calling. ALIGN is maximum alignment we can assume. */
2887 store_by_pieces_1 (data
, align
)
2888 struct store_by_pieces
*data
;
2891 rtx to_addr
= XEXP (data
->to
, 0);
2892 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2893 enum machine_mode mode
= VOIDmode
, tmode
;
2894 enum insn_code icode
;
2897 data
->to_addr
= to_addr
;
2899 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2900 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2902 data
->explicit_inc_to
= 0;
2904 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2906 data
->offset
= data
->len
;
2908 /* If storing requires more than two move insns,
2909 copy addresses to registers (to make displacements shorter)
2910 and use post-increment if available. */
2911 if (!data
->autinc_to
2912 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2914 /* Determine the main mode we'll be using. */
2915 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2916 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2917 if (GET_MODE_SIZE (tmode
) < max_size
)
2920 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2922 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2923 data
->autinc_to
= 1;
2924 data
->explicit_inc_to
= -1;
2927 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2928 && ! data
->autinc_to
)
2930 data
->to_addr
= copy_addr_to_reg (to_addr
);
2931 data
->autinc_to
= 1;
2932 data
->explicit_inc_to
= 1;
2935 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2936 data
->to_addr
= copy_addr_to_reg (to_addr
);
2939 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2940 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2941 align
= MOVE_MAX
* BITS_PER_UNIT
;
2943 /* First store what we can in the largest integer mode, then go to
2944 successively smaller modes. */
2946 while (max_size
> 1)
2948 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2949 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2950 if (GET_MODE_SIZE (tmode
) < max_size
)
2953 if (mode
== VOIDmode
)
2956 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2957 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2958 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2960 max_size
= GET_MODE_SIZE (mode
);
2963 /* The code above should have handled everything. */
2968 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2969 with move instructions for mode MODE. GENFUN is the gen_... function
2970 to make a move insn for that mode. DATA has all the other info. */
2973 store_by_pieces_2 (genfun
, mode
, data
)
2974 rtx (*genfun
) PARAMS ((rtx
, ...));
2975 enum machine_mode mode
;
2976 struct store_by_pieces
*data
;
2978 unsigned int size
= GET_MODE_SIZE (mode
);
2981 while (data
->len
>= size
)
2984 data
->offset
-= size
;
2986 if (data
->autinc_to
)
2987 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2990 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2992 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2993 emit_insn (gen_add2_insn (data
->to_addr
,
2994 GEN_INT (-(HOST_WIDE_INT
) size
)));
2996 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2997 emit_insn ((*genfun
) (to1
, cst
));
2999 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
3000 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
3002 if (! data
->reverse
)
3003 data
->offset
+= size
;
3009 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
3010 its length in bytes. */
3013 clear_storage (object
, size
)
3018 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
3019 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
3021 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3022 just move a zero. Otherwise, do this a piece at a time. */
3023 if (GET_MODE (object
) != BLKmode
3024 && GET_CODE (size
) == CONST_INT
3025 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
3026 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
3029 object
= protect_from_queue (object
, 1);
3030 size
= protect_from_queue (size
, 0);
3032 if (GET_CODE (size
) == CONST_INT
3033 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
3034 clear_by_pieces (object
, INTVAL (size
), align
);
3035 else if (clear_storage_via_clrstr (object
, size
, align
))
3038 retval
= clear_storage_via_libcall (object
, size
);
3044 /* A subroutine of clear_storage. Expand a clrstr pattern;
3045 return true if successful. */
3048 clear_storage_via_clrstr (object
, size
, align
)
3052 /* Try the most limited insn first, because there's no point
3053 including more than one in the machine description unless
3054 the more limited one has some advantage. */
3056 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3057 enum machine_mode mode
;
3059 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
3060 mode
= GET_MODE_WIDER_MODE (mode
))
3062 enum insn_code code
= clrstr_optab
[(int) mode
];
3063 insn_operand_predicate_fn pred
;
3065 if (code
!= CODE_FOR_nothing
3066 /* We don't need MODE to be narrower than
3067 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3068 the mode mask, as it is returned by the macro, it will
3069 definitely be less than the actual mode mask. */
3070 && ((GET_CODE (size
) == CONST_INT
3071 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3072 <= (GET_MODE_MASK (mode
) >> 1)))
3073 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3074 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
3075 || (*pred
) (object
, BLKmode
))
3076 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
3077 || (*pred
) (opalign
, VOIDmode
)))
3080 rtx last
= get_last_insn ();
3083 op1
= convert_to_mode (mode
, size
, 1);
3084 pred
= insn_data
[(int) code
].operand
[1].predicate
;
3085 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
3086 op1
= copy_to_mode_reg (mode
, op1
);
3088 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
3095 delete_insns_since (last
);
3102 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3103 Return the return value of memset, 0 otherwise. */
3106 clear_storage_via_libcall (object
, size
)
3109 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
3110 enum machine_mode size_mode
;
3113 /* OBJECT or SIZE may have been passed through protect_from_queue.
3115 It is unsafe to save the value generated by protect_from_queue
3116 and reuse it later. Consider what happens if emit_queue is
3117 called before the return value from protect_from_queue is used.
3119 Expansion of the CALL_EXPR below will call emit_queue before
3120 we are finished emitting RTL for argument setup. So if we are
3121 not careful we could get the wrong value for an argument.
3123 To avoid this problem we go ahead and emit code to copy OBJECT
3124 and SIZE into new pseudos. We can then place those new pseudos
3125 into an RTL_EXPR and use them later, even after a call to
3128 Note this is not strictly needed for library calls since they
3129 do not call emit_queue before loading their arguments. However,
3130 we may need to have library calls call emit_queue in the future
3131 since failing to do so could cause problems for targets which
3132 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3134 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
3136 if (TARGET_MEM_FUNCTIONS
)
3137 size_mode
= TYPE_MODE (sizetype
);
3139 size_mode
= TYPE_MODE (unsigned_type_node
);
3140 size
= convert_to_mode (size_mode
, size
, 1);
3141 size
= copy_to_mode_reg (size_mode
, size
);
3143 /* It is incorrect to use the libcall calling conventions to call
3144 memset in this context. This could be a user call to memset and
3145 the user may wish to examine the return value from memset. For
3146 targets where libcalls and normal calls have different conventions
3147 for returning pointers, we could end up generating incorrect code.
3149 For convenience, we generate the call to bzero this way as well. */
3151 object_tree
= make_tree (ptr_type_node
, object
);
3152 if (TARGET_MEM_FUNCTIONS
)
3153 size_tree
= make_tree (sizetype
, size
);
3155 size_tree
= make_tree (unsigned_type_node
, size
);
3157 fn
= clear_storage_libcall_fn (true);
3158 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
3159 if (TARGET_MEM_FUNCTIONS
)
3160 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
3161 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
3163 /* Now we have to build up the CALL_EXPR itself. */
3164 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
3165 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
3166 call_expr
, arg_list
, NULL_TREE
);
3167 TREE_SIDE_EFFECTS (call_expr
) = 1;
3169 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
3171 /* If we are initializing a readonly value, show the above call
3172 clobbered it. Otherwise, a load from it may erroneously be
3173 hoisted from a loop. */
3174 if (RTX_UNCHANGING_P (object
))
3175 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
3177 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
3180 /* A subroutine of clear_storage_via_libcall. Create the tree node
3181 for the function we use for block clears. The first time FOR_CALL
3182 is true, we call assemble_external. */
3184 static GTY(()) tree block_clear_fn
;
3187 init_block_clear_fn (asmspec
)
3188 const char *asmspec
;
3190 if (!block_clear_fn
)
3194 if (TARGET_MEM_FUNCTIONS
)
3196 fn
= get_identifier ("memset");
3197 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
3198 integer_type_node
, sizetype
,
3203 fn
= get_identifier ("bzero");
3204 args
= build_function_type_list (void_type_node
, ptr_type_node
,
3205 unsigned_type_node
, NULL_TREE
);
3208 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
3209 DECL_EXTERNAL (fn
) = 1;
3210 TREE_PUBLIC (fn
) = 1;
3211 DECL_ARTIFICIAL (fn
) = 1;
3212 TREE_NOTHROW (fn
) = 1;
3214 block_clear_fn
= fn
;
3219 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
3220 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
3225 clear_storage_libcall_fn (for_call
)
3228 static bool emitted_extern
;
3230 if (!block_clear_fn
)
3231 init_block_clear_fn (NULL
);
3233 if (for_call
&& !emitted_extern
)
3235 emitted_extern
= true;
3236 make_decl_rtl (block_clear_fn
, NULL
);
3237 assemble_external (block_clear_fn
);
3240 return block_clear_fn
;
3243 /* Generate code to copy Y into X.
3244 Both Y and X must have the same mode, except that
3245 Y can be a constant with VOIDmode.
3246 This mode cannot be BLKmode; use emit_block_move for that.
3248 Return the last instruction emitted. */
3251 emit_move_insn (x
, y
)
3254 enum machine_mode mode
= GET_MODE (x
);
3255 rtx y_cst
= NULL_RTX
;
3258 x
= protect_from_queue (x
, 1);
3259 y
= protect_from_queue (y
, 0);
3261 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
3264 /* Never force constant_p_rtx to memory. */
3265 if (GET_CODE (y
) == CONSTANT_P_RTX
)
3267 else if (CONSTANT_P (y
))
3270 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3271 && (last_insn
= compress_float_constant (x
, y
)))
3276 if (!LEGITIMATE_CONSTANT_P (y
))
3278 y
= force_const_mem (mode
, y
);
3280 /* If the target's cannot_force_const_mem prevented the spill,
3281 assume that the target's move expanders will also take care
3282 of the non-legitimate constant. */
3288 /* If X or Y are memory references, verify that their addresses are valid
3290 if (GET_CODE (x
) == MEM
3291 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3292 && ! push_operand (x
, GET_MODE (x
)))
3294 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
3295 x
= validize_mem (x
);
3297 if (GET_CODE (y
) == MEM
3298 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
3300 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
3301 y
= validize_mem (y
);
3303 if (mode
== BLKmode
)
3306 last_insn
= emit_move_insn_1 (x
, y
);
3308 if (y_cst
&& GET_CODE (x
) == REG
3309 && (set
= single_set (last_insn
)) != NULL_RTX
3310 && SET_DEST (set
) == x
3311 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3312 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3317 /* Low level part of emit_move_insn.
3318 Called just like emit_move_insn, but assumes X and Y
3319 are basically valid. */
3322 emit_move_insn_1 (x
, y
)
3325 enum machine_mode mode
= GET_MODE (x
);
3326 enum machine_mode submode
;
3327 enum mode_class
class = GET_MODE_CLASS (mode
);
3329 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
3332 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
3334 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
3336 /* Expand complex moves by moving real part and imag part, if possible. */
3337 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
3338 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
3339 && (mov_optab
->handlers
[(int) submode
].insn_code
3340 != CODE_FOR_nothing
))
3342 /* Don't split destination if it is a stack push. */
3343 int stack
= push_operand (x
, GET_MODE (x
));
3345 #ifdef PUSH_ROUNDING
3346 /* In case we output to the stack, but the size is smaller than the
3347 machine can push exactly, we need to use move instructions. */
3349 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
3350 != GET_MODE_SIZE (submode
)))
3353 HOST_WIDE_INT offset1
, offset2
;
3355 /* Do not use anti_adjust_stack, since we don't want to update
3356 stack_pointer_delta. */
3357 temp
= expand_binop (Pmode
,
3358 #ifdef STACK_GROWS_DOWNWARD
3366 (GET_MODE_SIZE (GET_MODE (x
)))),
3367 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3369 if (temp
!= stack_pointer_rtx
)
3370 emit_move_insn (stack_pointer_rtx
, temp
);
3372 #ifdef STACK_GROWS_DOWNWARD
3374 offset2
= GET_MODE_SIZE (submode
);
3376 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
3377 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
3378 + GET_MODE_SIZE (submode
));
3381 emit_move_insn (change_address (x
, submode
,
3382 gen_rtx_PLUS (Pmode
,
3384 GEN_INT (offset1
))),
3385 gen_realpart (submode
, y
));
3386 emit_move_insn (change_address (x
, submode
,
3387 gen_rtx_PLUS (Pmode
,
3389 GEN_INT (offset2
))),
3390 gen_imagpart (submode
, y
));
3394 /* If this is a stack, push the highpart first, so it
3395 will be in the argument order.
3397 In that case, change_address is used only to convert
3398 the mode, not to change the address. */
3401 /* Note that the real part always precedes the imag part in memory
3402 regardless of machine's endianness. */
3403 #ifdef STACK_GROWS_DOWNWARD
3404 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3405 gen_imagpart (submode
, y
));
3406 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3407 gen_realpart (submode
, y
));
3409 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3410 gen_realpart (submode
, y
));
3411 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3412 gen_imagpart (submode
, y
));
3417 rtx realpart_x
, realpart_y
;
3418 rtx imagpart_x
, imagpart_y
;
3420 /* If this is a complex value with each part being smaller than a
3421 word, the usual calling sequence will likely pack the pieces into
3422 a single register. Unfortunately, SUBREG of hard registers only
3423 deals in terms of words, so we have a problem converting input
3424 arguments to the CONCAT of two registers that is used elsewhere
3425 for complex values. If this is before reload, we can copy it into
3426 memory and reload. FIXME, we should see about using extract and
3427 insert on integer registers, but complex short and complex char
3428 variables should be rarely used. */
3429 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
3430 && (reload_in_progress
| reload_completed
) == 0)
3433 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
3435 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
3437 if (packed_dest_p
|| packed_src_p
)
3439 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
3440 ? MODE_FLOAT
: MODE_INT
);
3442 enum machine_mode reg_mode
3443 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
3445 if (reg_mode
!= BLKmode
)
3447 rtx mem
= assign_stack_temp (reg_mode
,
3448 GET_MODE_SIZE (mode
), 0);
3449 rtx cmem
= adjust_address (mem
, mode
, 0);
3452 = N_("function using short complex types cannot be inline");
3456 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
3458 emit_move_insn_1 (cmem
, y
);
3459 return emit_move_insn_1 (sreg
, mem
);
3463 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
3465 emit_move_insn_1 (mem
, sreg
);
3466 return emit_move_insn_1 (x
, cmem
);
3472 realpart_x
= gen_realpart (submode
, x
);
3473 realpart_y
= gen_realpart (submode
, y
);
3474 imagpart_x
= gen_imagpart (submode
, x
);
3475 imagpart_y
= gen_imagpart (submode
, y
);
3477 /* Show the output dies here. This is necessary for SUBREGs
3478 of pseudos since we cannot track their lifetimes correctly;
3479 hard regs shouldn't appear here except as return values.
3480 We never want to emit such a clobber after reload. */
3482 && ! (reload_in_progress
|| reload_completed
)
3483 && (GET_CODE (realpart_x
) == SUBREG
3484 || GET_CODE (imagpart_x
) == SUBREG
))
3485 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3487 emit_move_insn (realpart_x
, realpart_y
);
3488 emit_move_insn (imagpart_x
, imagpart_y
);
3491 return get_last_insn ();
3494 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3495 find a mode to do it in. If we have a movcc, use it. Otherwise,
3496 find the MODE_INT mode of the same width. */
3497 else if (GET_MODE_CLASS (mode
) == MODE_CC
3498 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3500 enum insn_code insn_code
;
3501 enum machine_mode tmode
= VOIDmode
;
3505 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3508 for (tmode
= QImode
; tmode
!= VOIDmode
;
3509 tmode
= GET_MODE_WIDER_MODE (tmode
))
3510 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3513 if (tmode
== VOIDmode
)
3516 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3517 may call change_address which is not appropriate if we were
3518 called when a reload was in progress. We don't have to worry
3519 about changing the address since the size in bytes is supposed to
3520 be the same. Copy the MEM to change the mode and move any
3521 substitutions from the old MEM to the new one. */
3523 if (reload_in_progress
)
3525 x
= gen_lowpart_common (tmode
, x1
);
3526 if (x
== 0 && GET_CODE (x1
) == MEM
)
3528 x
= adjust_address_nv (x1
, tmode
, 0);
3529 copy_replacements (x1
, x
);
3532 y
= gen_lowpart_common (tmode
, y1
);
3533 if (y
== 0 && GET_CODE (y1
) == MEM
)
3535 y
= adjust_address_nv (y1
, tmode
, 0);
3536 copy_replacements (y1
, y
);
3541 x
= gen_lowpart (tmode
, x
);
3542 y
= gen_lowpart (tmode
, y
);
3545 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3546 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3549 /* This will handle any multi-word or full-word mode that lacks a move_insn
3550 pattern. However, you will get better code if you define such patterns,
3551 even if they must turn into multiple assembler instructions. */
3552 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3559 #ifdef PUSH_ROUNDING
3561 /* If X is a push on the stack, do the push now and replace
3562 X with a reference to the stack pointer. */
3563 if (push_operand (x
, GET_MODE (x
)))
3568 /* Do not use anti_adjust_stack, since we don't want to update
3569 stack_pointer_delta. */
3570 temp
= expand_binop (Pmode
,
3571 #ifdef STACK_GROWS_DOWNWARD
3579 (GET_MODE_SIZE (GET_MODE (x
)))),
3580 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3582 if (temp
!= stack_pointer_rtx
)
3583 emit_move_insn (stack_pointer_rtx
, temp
);
3585 code
= GET_CODE (XEXP (x
, 0));
3587 /* Just hope that small offsets off SP are OK. */
3588 if (code
== POST_INC
)
3589 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3590 GEN_INT (-((HOST_WIDE_INT
)
3591 GET_MODE_SIZE (GET_MODE (x
)))));
3592 else if (code
== POST_DEC
)
3593 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3594 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3596 temp
= stack_pointer_rtx
;
3598 x
= change_address (x
, VOIDmode
, temp
);
3602 /* If we are in reload, see if either operand is a MEM whose address
3603 is scheduled for replacement. */
3604 if (reload_in_progress
&& GET_CODE (x
) == MEM
3605 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3606 x
= replace_equiv_address_nv (x
, inner
);
3607 if (reload_in_progress
&& GET_CODE (y
) == MEM
3608 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3609 y
= replace_equiv_address_nv (y
, inner
);
3615 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3618 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3619 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3621 /* If we can't get a part of Y, put Y into memory if it is a
3622 constant. Otherwise, force it into a register. If we still
3623 can't get a part of Y, abort. */
3624 if (ypart
== 0 && CONSTANT_P (y
))
3626 y
= force_const_mem (mode
, y
);
3627 ypart
= operand_subword (y
, i
, 1, mode
);
3629 else if (ypart
== 0)
3630 ypart
= operand_subword_force (y
, i
, mode
);
3632 if (xpart
== 0 || ypart
== 0)
3635 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3637 last_insn
= emit_move_insn (xpart
, ypart
);
3643 /* Show the output dies here. This is necessary for SUBREGs
3644 of pseudos since we cannot track their lifetimes correctly;
3645 hard regs shouldn't appear here except as return values.
3646 We never want to emit such a clobber after reload. */
3648 && ! (reload_in_progress
|| reload_completed
)
3649 && need_clobber
!= 0)
3650 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3660 /* If Y is representable exactly in a narrower mode, and the target can
3661 perform the extension directly from constant or memory, then emit the
3662 move as an extension. */
3665 compress_float_constant (x
, y
)
3668 enum machine_mode dstmode
= GET_MODE (x
);
3669 enum machine_mode orig_srcmode
= GET_MODE (y
);
3670 enum machine_mode srcmode
;
3673 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3675 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3676 srcmode
!= orig_srcmode
;
3677 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3680 rtx trunc_y
, last_insn
;
3682 /* Skip if the target can't extend this way. */
3683 ic
= can_extend_p (dstmode
, srcmode
, 0);
3684 if (ic
== CODE_FOR_nothing
)
3687 /* Skip if the narrowed value isn't exact. */
3688 if (! exact_real_truncate (srcmode
, &r
))
3691 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3693 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3695 /* Skip if the target needs extra instructions to perform
3697 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3700 else if (float_extend_from_mem
[dstmode
][srcmode
])
3701 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3705 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3706 last_insn
= get_last_insn ();
3708 if (GET_CODE (x
) == REG
)
3709 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3717 /* Pushing data onto the stack. */
3719 /* Push a block of length SIZE (perhaps variable)
3720 and return an rtx to address the beginning of the block.
3721 Note that it is not possible for the value returned to be a QUEUED.
3722 The value may be virtual_outgoing_args_rtx.
3724 EXTRA is the number of bytes of padding to push in addition to SIZE.
3725 BELOW nonzero means this padding comes at low addresses;
3726 otherwise, the padding comes at high addresses. */
3729 push_block (size
, extra
, below
)
3735 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3736 if (CONSTANT_P (size
))
3737 anti_adjust_stack (plus_constant (size
, extra
));
3738 else if (GET_CODE (size
) == REG
&& extra
== 0)
3739 anti_adjust_stack (size
);
3742 temp
= copy_to_mode_reg (Pmode
, size
);
3744 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3745 temp
, 0, OPTAB_LIB_WIDEN
);
3746 anti_adjust_stack (temp
);
3749 #ifndef STACK_GROWS_DOWNWARD
3755 temp
= virtual_outgoing_args_rtx
;
3756 if (extra
!= 0 && below
)
3757 temp
= plus_constant (temp
, extra
);
3761 if (GET_CODE (size
) == CONST_INT
)
3762 temp
= plus_constant (virtual_outgoing_args_rtx
,
3763 -INTVAL (size
) - (below
? 0 : extra
));
3764 else if (extra
!= 0 && !below
)
3765 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3766 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3768 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3769 negate_rtx (Pmode
, size
));
3772 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3775 #ifdef PUSH_ROUNDING
3777 /* Emit single push insn. */
3780 emit_single_push_insn (mode
, x
, type
)
3782 enum machine_mode mode
;
3786 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3788 enum insn_code icode
;
3789 insn_operand_predicate_fn pred
;
3791 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3792 /* If there is push pattern, use it. Otherwise try old way of throwing
3793 MEM representing push operation to move expander. */
3794 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3795 if (icode
!= CODE_FOR_nothing
)
3797 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3798 && !((*pred
) (x
, mode
))))
3799 x
= force_reg (mode
, x
);
3800 emit_insn (GEN_FCN (icode
) (x
));
3803 if (GET_MODE_SIZE (mode
) == rounded_size
)
3804 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3805 /* If we are to pad downward, adjust the stack pointer first and
3806 then store X into the stack location using an offset. This is
3807 because emit_move_insn does not know how to pad; it does not have
3809 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3811 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3812 HOST_WIDE_INT offset
;
3814 emit_move_insn (stack_pointer_rtx
,
3815 expand_binop (Pmode
,
3816 #ifdef STACK_GROWS_DOWNWARD
3822 GEN_INT (rounded_size
),
3823 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3825 offset
= (HOST_WIDE_INT
) padding_size
;
3826 #ifdef STACK_GROWS_DOWNWARD
3827 if (STACK_PUSH_CODE
== POST_DEC
)
3828 /* We have already decremented the stack pointer, so get the
3830 offset
+= (HOST_WIDE_INT
) rounded_size
;
3832 if (STACK_PUSH_CODE
== POST_INC
)
3833 /* We have already incremented the stack pointer, so get the
3835 offset
-= (HOST_WIDE_INT
) rounded_size
;
3837 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3841 #ifdef STACK_GROWS_DOWNWARD
3842 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3843 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3844 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3846 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3847 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3848 GEN_INT (rounded_size
));
3850 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3853 dest
= gen_rtx_MEM (mode
, dest_addr
);
3857 set_mem_attributes (dest
, type
, 1);
3859 if (flag_optimize_sibling_calls
)
3860 /* Function incoming arguments may overlap with sibling call
3861 outgoing arguments and we cannot allow reordering of reads
3862 from function arguments with stores to outgoing arguments
3863 of sibling calls. */
3864 set_mem_alias_set (dest
, 0);
3866 emit_move_insn (dest
, x
);
3870 /* Generate code to push X onto the stack, assuming it has mode MODE and
3872 MODE is redundant except when X is a CONST_INT (since they don't
3874 SIZE is an rtx for the size of data to be copied (in bytes),
3875 needed only if X is BLKmode.
3877 ALIGN (in bits) is maximum alignment we can assume.
3879 If PARTIAL and REG are both nonzero, then copy that many of the first
3880 words of X into registers starting with REG, and push the rest of X.
3881 The amount of space pushed is decreased by PARTIAL words,
3882 rounded *down* to a multiple of PARM_BOUNDARY.
3883 REG must be a hard register in this case.
3884 If REG is zero but PARTIAL is not, take any all others actions for an
3885 argument partially in registers, but do not actually load any
3888 EXTRA is the amount in bytes of extra space to leave next to this arg.
3889 This is ignored if an argument block has already been allocated.
3891 On a machine that lacks real push insns, ARGS_ADDR is the address of
3892 the bottom of the argument block for this call. We use indexing off there
3893 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3894 argument block has not been preallocated.
3896 ARGS_SO_FAR is the size of args previously pushed for this call.
3898 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3899 for arguments passed in registers. If nonzero, it will be the number
3900 of bytes required. */
3903 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3904 args_addr
, args_so_far
, reg_parm_stack_space
,
3907 enum machine_mode mode
;
3916 int reg_parm_stack_space
;
3920 enum direction stack_direction
3921 #ifdef STACK_GROWS_DOWNWARD
3927 /* Decide where to pad the argument: `downward' for below,
3928 `upward' for above, or `none' for don't pad it.
3929 Default is below for small data on big-endian machines; else above. */
3930 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3932 /* Invert direction if stack is post-decrement.
3934 if (STACK_PUSH_CODE
== POST_DEC
)
3935 if (where_pad
!= none
)
3936 where_pad
= (where_pad
== downward
? upward
: downward
);
3938 xinner
= x
= protect_from_queue (x
, 0);
3940 if (mode
== BLKmode
)
3942 /* Copy a block into the stack, entirely or partially. */
3945 int used
= partial
* UNITS_PER_WORD
;
3946 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3954 /* USED is now the # of bytes we need not copy to the stack
3955 because registers will take care of them. */
3958 xinner
= adjust_address (xinner
, BLKmode
, used
);
3960 /* If the partial register-part of the arg counts in its stack size,
3961 skip the part of stack space corresponding to the registers.
3962 Otherwise, start copying to the beginning of the stack space,
3963 by setting SKIP to 0. */
3964 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3966 #ifdef PUSH_ROUNDING
3967 /* Do it with several push insns if that doesn't take lots of insns
3968 and if there is no difficulty with push insns that skip bytes
3969 on the stack for alignment purposes. */
3972 && GET_CODE (size
) == CONST_INT
3974 && MEM_ALIGN (xinner
) >= align
3975 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3976 /* Here we avoid the case of a structure whose weak alignment
3977 forces many pushes of a small amount of data,
3978 and such small pushes do rounding that causes trouble. */
3979 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3980 || align
>= BIGGEST_ALIGNMENT
3981 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3982 == (align
/ BITS_PER_UNIT
)))
3983 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3985 /* Push padding now if padding above and stack grows down,
3986 or if padding below and stack grows up.
3987 But if space already allocated, this has already been done. */
3988 if (extra
&& args_addr
== 0
3989 && where_pad
!= none
&& where_pad
!= stack_direction
)
3990 anti_adjust_stack (GEN_INT (extra
));
3992 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3995 #endif /* PUSH_ROUNDING */
3999 /* Otherwise make space on the stack and copy the data
4000 to the address of that space. */
4002 /* Deduct words put into registers from the size we must copy. */
4005 if (GET_CODE (size
) == CONST_INT
)
4006 size
= GEN_INT (INTVAL (size
) - used
);
4008 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4009 GEN_INT (used
), NULL_RTX
, 0,
4013 /* Get the address of the stack space.
4014 In this case, we do not deal with EXTRA separately.
4015 A single stack adjust will do. */
4018 temp
= push_block (size
, extra
, where_pad
== downward
);
4021 else if (GET_CODE (args_so_far
) == CONST_INT
)
4022 temp
= memory_address (BLKmode
,
4023 plus_constant (args_addr
,
4024 skip
+ INTVAL (args_so_far
)));
4026 temp
= memory_address (BLKmode
,
4027 plus_constant (gen_rtx_PLUS (Pmode
,
4032 if (!ACCUMULATE_OUTGOING_ARGS
)
4034 /* If the source is referenced relative to the stack pointer,
4035 copy it to another register to stabilize it. We do not need
4036 to do this if we know that we won't be changing sp. */
4038 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4039 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4040 temp
= copy_to_reg (temp
);
4043 target
= gen_rtx_MEM (BLKmode
, temp
);
4047 set_mem_attributes (target
, type
, 1);
4048 /* Function incoming arguments may overlap with sibling call
4049 outgoing arguments and we cannot allow reordering of reads
4050 from function arguments with stores to outgoing arguments
4051 of sibling calls. */
4052 set_mem_alias_set (target
, 0);
4055 /* ALIGN may well be better aligned than TYPE, e.g. due to
4056 PARM_BOUNDARY. Assume the caller isn't lying. */
4057 set_mem_align (target
, align
);
4059 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4062 else if (partial
> 0)
4064 /* Scalar partly in registers. */
4066 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4069 /* # words of start of argument
4070 that we must make space for but need not store. */
4071 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
4072 int args_offset
= INTVAL (args_so_far
);
4075 /* Push padding now if padding above and stack grows down,
4076 or if padding below and stack grows up.
4077 But if space already allocated, this has already been done. */
4078 if (extra
&& args_addr
== 0
4079 && where_pad
!= none
&& where_pad
!= stack_direction
)
4080 anti_adjust_stack (GEN_INT (extra
));
4082 /* If we make space by pushing it, we might as well push
4083 the real data. Otherwise, we can leave OFFSET nonzero
4084 and leave the space uninitialized. */
4088 /* Now NOT_STACK gets the number of words that we don't need to
4089 allocate on the stack. */
4090 not_stack
= partial
- offset
;
4092 /* If the partial register-part of the arg counts in its stack size,
4093 skip the part of stack space corresponding to the registers.
4094 Otherwise, start copying to the beginning of the stack space,
4095 by setting SKIP to 0. */
4096 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4098 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
4099 x
= validize_mem (force_const_mem (mode
, x
));
4101 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4102 SUBREGs of such registers are not allowed. */
4103 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
4104 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4105 x
= copy_to_reg (x
);
4107 /* Loop over all the words allocated on the stack for this arg. */
4108 /* We can do it by words, because any scalar bigger than a word
4109 has a size a multiple of a word. */
4110 #ifndef PUSH_ARGS_REVERSED
4111 for (i
= not_stack
; i
< size
; i
++)
4113 for (i
= size
- 1; i
>= not_stack
; i
--)
4115 if (i
>= not_stack
+ offset
)
4116 emit_push_insn (operand_subword_force (x
, i
, mode
),
4117 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4119 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4121 reg_parm_stack_space
, alignment_pad
);
4128 /* Push padding now if padding above and stack grows down,
4129 or if padding below and stack grows up.
4130 But if space already allocated, this has already been done. */
4131 if (extra
&& args_addr
== 0
4132 && where_pad
!= none
&& where_pad
!= stack_direction
)
4133 anti_adjust_stack (GEN_INT (extra
));
4135 #ifdef PUSH_ROUNDING
4136 if (args_addr
== 0 && PUSH_ARGS
)
4137 emit_single_push_insn (mode
, x
, type
);
4141 if (GET_CODE (args_so_far
) == CONST_INT
)
4143 = memory_address (mode
,
4144 plus_constant (args_addr
,
4145 INTVAL (args_so_far
)));
4147 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4149 dest
= gen_rtx_MEM (mode
, addr
);
4152 set_mem_attributes (dest
, type
, 1);
4153 /* Function incoming arguments may overlap with sibling call
4154 outgoing arguments and we cannot allow reordering of reads
4155 from function arguments with stores to outgoing arguments
4156 of sibling calls. */
4157 set_mem_alias_set (dest
, 0);
4160 emit_move_insn (dest
, x
);
4164 /* If part should go in registers, copy that part
4165 into the appropriate registers. Do this now, at the end,
4166 since mem-to-mem copies above may do function calls. */
4167 if (partial
> 0 && reg
!= 0)
4169 /* Handle calls that pass values in multiple non-contiguous locations.
4170 The Irix 6 ABI has examples of this. */
4171 if (GET_CODE (reg
) == PARALLEL
)
4172 emit_group_load (reg
, x
, -1); /* ??? size? */
4174 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
4177 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4178 anti_adjust_stack (GEN_INT (extra
));
4180 if (alignment_pad
&& args_addr
== 0)
4181 anti_adjust_stack (alignment_pad
);
4184 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4192 /* Only registers can be subtargets. */
4193 || GET_CODE (x
) != REG
4194 /* If the register is readonly, it can't be set more than once. */
4195 || RTX_UNCHANGING_P (x
)
4196 /* Don't use hard regs to avoid extending their life. */
4197 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4198 /* Avoid subtargets inside loops,
4199 since they hide some invariant expressions. */
4200 || preserve_subexpressions_p ())
4204 /* Expand an assignment that stores the value of FROM into TO.
4205 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4206 (This may contain a QUEUED rtx;
4207 if the value is constant, this rtx is a constant.)
4208 Otherwise, the returned value is NULL_RTX.
4210 SUGGEST_REG is no longer actually used.
4211 It used to mean, copy the value through a register
4212 and return that register, if that is possible.
4213 We now use WANT_VALUE to decide whether to do this. */
4216 expand_assignment (to
, from
, want_value
, suggest_reg
)
4219 int suggest_reg ATTRIBUTE_UNUSED
;
4224 /* Don't crash if the lhs of the assignment was erroneous. */
4226 if (TREE_CODE (to
) == ERROR_MARK
)
4228 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4229 return want_value
? result
: NULL_RTX
;
4232 /* Assignment of a structure component needs special treatment
4233 if the structure component's rtx is not simply a MEM.
4234 Assignment of an array element at a constant index, and assignment of
4235 an array element in an unaligned packed structure field, has the same
4238 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
4239 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
4240 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4242 enum machine_mode mode1
;
4243 HOST_WIDE_INT bitsize
, bitpos
;
4251 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4252 &unsignedp
, &volatilep
);
4254 /* If we are going to use store_bit_field and extract_bit_field,
4255 make sure to_rtx will be safe for multiple use. */
4257 if (mode1
== VOIDmode
&& want_value
)
4258 tem
= stabilize_reference (tem
);
4260 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
4264 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4266 if (GET_CODE (to_rtx
) != MEM
)
4269 #ifdef POINTERS_EXTEND_UNSIGNED
4270 if (GET_MODE (offset_rtx
) != Pmode
)
4271 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4273 if (GET_MODE (offset_rtx
) != ptr_mode
)
4274 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4277 /* A constant address in TO_RTX can have VOIDmode, we must not try
4278 to call force_reg for that case. Avoid that case. */
4279 if (GET_CODE (to_rtx
) == MEM
4280 && GET_MODE (to_rtx
) == BLKmode
4281 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4283 && (bitpos
% bitsize
) == 0
4284 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4285 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4287 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4291 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4292 highest_pow2_factor_for_type (TREE_TYPE (to
),
4296 if (GET_CODE (to_rtx
) == MEM
)
4298 /* If the field is at offset zero, we could have been given the
4299 DECL_RTX of the parent struct. Don't munge it. */
4300 to_rtx
= shallow_copy_rtx (to_rtx
);
4302 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4305 /* Deal with volatile and readonly fields. The former is only done
4306 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4307 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
4309 if (to_rtx
== orig_to_rtx
)
4310 to_rtx
= copy_rtx (to_rtx
);
4311 MEM_VOLATILE_P (to_rtx
) = 1;
4314 if (TREE_CODE (to
) == COMPONENT_REF
4315 && TREE_READONLY (TREE_OPERAND (to
, 1)))
4317 if (to_rtx
== orig_to_rtx
)
4318 to_rtx
= copy_rtx (to_rtx
);
4319 RTX_UNCHANGING_P (to_rtx
) = 1;
4322 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
4324 if (to_rtx
== orig_to_rtx
)
4325 to_rtx
= copy_rtx (to_rtx
);
4326 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4329 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4331 /* Spurious cast for HPUX compiler. */
4332 ? ((enum machine_mode
)
4333 TYPE_MODE (TREE_TYPE (to
)))
4335 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
4337 preserve_temp_slots (result
);
4341 /* If the value is meaningful, convert RESULT to the proper mode.
4342 Otherwise, return nothing. */
4343 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
4344 TYPE_MODE (TREE_TYPE (from
)),
4346 TREE_UNSIGNED (TREE_TYPE (to
)))
4350 /* If the rhs is a function call and its value is not an aggregate,
4351 call the function before we start to compute the lhs.
4352 This is needed for correct code for cases such as
4353 val = setjmp (buf) on machines where reference to val
4354 requires loading up part of an address in a separate insn.
4356 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4357 since it might be a promoted variable where the zero- or sign- extension
4358 needs to be done. Handling this in the normal way is safe because no
4359 computation is done before the call. */
4360 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
4361 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4362 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4363 && GET_CODE (DECL_RTL (to
)) == REG
))
4368 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4370 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4372 /* Handle calls that return values in multiple non-contiguous locations.
4373 The Irix 6 ABI has examples of this. */
4374 if (GET_CODE (to_rtx
) == PARALLEL
)
4375 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
4376 else if (GET_MODE (to_rtx
) == BLKmode
)
4377 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4380 #ifdef POINTERS_EXTEND_UNSIGNED
4381 if (POINTER_TYPE_P (TREE_TYPE (to
))
4382 && GET_MODE (to_rtx
) != GET_MODE (value
))
4383 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4385 emit_move_insn (to_rtx
, value
);
4387 preserve_temp_slots (to_rtx
);
4390 return want_value
? to_rtx
: NULL_RTX
;
4393 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4394 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4397 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4399 /* Don't move directly into a return register. */
4400 if (TREE_CODE (to
) == RESULT_DECL
4401 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
4406 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
4408 if (GET_CODE (to_rtx
) == PARALLEL
)
4409 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
4411 emit_move_insn (to_rtx
, temp
);
4413 preserve_temp_slots (to_rtx
);
4416 return want_value
? to_rtx
: NULL_RTX
;
4419 /* In case we are returning the contents of an object which overlaps
4420 the place the value is being stored, use a safe function when copying
4421 a value through a pointer into a structure value return block. */
4422 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4423 && current_function_returns_struct
4424 && !current_function_returns_pcc_struct
)
4429 size
= expr_size (from
);
4430 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
4432 if (TARGET_MEM_FUNCTIONS
)
4433 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4434 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4435 XEXP (from_rtx
, 0), Pmode
,
4436 convert_to_mode (TYPE_MODE (sizetype
),
4437 size
, TREE_UNSIGNED (sizetype
)),
4438 TYPE_MODE (sizetype
));
4440 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
4441 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
4442 XEXP (to_rtx
, 0), Pmode
,
4443 convert_to_mode (TYPE_MODE (integer_type_node
),
4445 TREE_UNSIGNED (integer_type_node
)),
4446 TYPE_MODE (integer_type_node
));
4448 preserve_temp_slots (to_rtx
);
4451 return want_value
? to_rtx
: NULL_RTX
;
4454 /* Compute FROM and store the value in the rtx we got. */
4457 result
= store_expr (from
, to_rtx
, want_value
);
4458 preserve_temp_slots (result
);
4461 return want_value
? result
: NULL_RTX
;
4464 /* Generate code for computing expression EXP,
4465 and storing the value into TARGET.
4466 TARGET may contain a QUEUED rtx.
4468 If WANT_VALUE & 1 is nonzero, return a copy of the value
4469 not in TARGET, so that we can be sure to use the proper
4470 value in a containing expression even if TARGET has something
4471 else stored in it. If possible, we copy the value through a pseudo
4472 and return that pseudo. Or, if the value is constant, we try to
4473 return the constant. In some cases, we return a pseudo
4474 copied *from* TARGET.
4476 If the mode is BLKmode then we may return TARGET itself.
4477 It turns out that in BLKmode it doesn't cause a problem.
4478 because C has no operators that could combine two different
4479 assignments into the same BLKmode object with different values
4480 with no sequence point. Will other languages need this to
4483 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4484 to catch quickly any cases where the caller uses the value
4485 and fails to set WANT_VALUE.
4487 If WANT_VALUE & 2 is set, this is a store into a call param on the
4488 stack, and block moves may need to be treated specially. */
4491 store_expr (exp
, target
, want_value
)
4497 int dont_return_target
= 0;
4498 int dont_store_target
= 0;
4500 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4502 /* C++ can generate ?: expressions with a throw expression in one
4503 branch and an rvalue in the other. Here, we resolve attempts to
4504 store the throw expression's nonexistant result. */
4507 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4510 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4512 /* Perform first part of compound expression, then assign from second
4514 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4515 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4517 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4519 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4521 /* For conditional expression, get safe form of the target. Then
4522 test the condition, doing the appropriate assignment on either
4523 side. This avoids the creation of unnecessary temporaries.
4524 For non-BLKmode, it is more efficient not to do this. */
4526 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4529 target
= protect_from_queue (target
, 1);
4531 do_pending_stack_adjust ();
4533 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4534 start_cleanup_deferral ();
4535 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4536 end_cleanup_deferral ();
4538 emit_jump_insn (gen_jump (lab2
));
4541 start_cleanup_deferral ();
4542 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4543 end_cleanup_deferral ();
4548 return want_value
& 1 ? target
: NULL_RTX
;
4550 else if (queued_subexp_p (target
))
4551 /* If target contains a postincrement, let's not risk
4552 using it as the place to generate the rhs. */
4554 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4556 /* Expand EXP into a new pseudo. */
4557 temp
= gen_reg_rtx (GET_MODE (target
));
4558 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4560 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4563 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4565 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4567 /* If target is volatile, ANSI requires accessing the value
4568 *from* the target, if it is accessed. So make that happen.
4569 In no case return the target itself. */
4570 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4571 dont_return_target
= 1;
4573 else if ((want_value
& 1) != 0
4574 && GET_CODE (target
) == MEM
4575 && ! MEM_VOLATILE_P (target
)
4576 && GET_MODE (target
) != BLKmode
)
4577 /* If target is in memory and caller wants value in a register instead,
4578 arrange that. Pass TARGET as target for expand_expr so that,
4579 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4580 We know expand_expr will not use the target in that case.
4581 Don't do this if TARGET is volatile because we are supposed
4582 to write it and then read it. */
4584 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4585 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4586 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4588 /* If TEMP is already in the desired TARGET, only copy it from
4589 memory and don't store it there again. */
4591 || (rtx_equal_p (temp
, target
)
4592 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4593 dont_store_target
= 1;
4594 temp
= copy_to_reg (temp
);
4596 dont_return_target
= 1;
4598 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4599 /* If this is a scalar in a register that is stored in a wider mode
4600 than the declared mode, compute the result into its declared mode
4601 and then convert to the wider mode. Our value is the computed
4604 rtx inner_target
= 0;
4606 /* If we don't want a value, we can do the conversion inside EXP,
4607 which will often result in some optimizations. Do the conversion
4608 in two steps: first change the signedness, if needed, then
4609 the extend. But don't do this if the type of EXP is a subtype
4610 of something else since then the conversion might involve
4611 more than just converting modes. */
4612 if ((want_value
& 1) == 0
4613 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4614 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4616 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4617 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4619 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4620 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4622 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4623 (GET_MODE (SUBREG_REG (target
)),
4624 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4627 inner_target
= SUBREG_REG (target
);
4630 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4631 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4633 /* If TEMP is a MEM and we want a result value, make the access
4634 now so it gets done only once. Strictly speaking, this is
4635 only necessary if the MEM is volatile, or if the address
4636 overlaps TARGET. But not performing the load twice also
4637 reduces the amount of rtl we generate and then have to CSE. */
4638 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4639 temp
= copy_to_reg (temp
);
4641 /* If TEMP is a VOIDmode constant, use convert_modes to make
4642 sure that we properly convert it. */
4643 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4645 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4646 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4647 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4648 GET_MODE (target
), temp
,
4649 SUBREG_PROMOTED_UNSIGNED_P (target
));
4652 convert_move (SUBREG_REG (target
), temp
,
4653 SUBREG_PROMOTED_UNSIGNED_P (target
));
4655 /* If we promoted a constant, change the mode back down to match
4656 target. Otherwise, the caller might get confused by a result whose
4657 mode is larger than expected. */
4659 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4661 if (GET_MODE (temp
) != VOIDmode
)
4663 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4664 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4665 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4666 SUBREG_PROMOTED_UNSIGNED_P (target
));
4669 temp
= convert_modes (GET_MODE (target
),
4670 GET_MODE (SUBREG_REG (target
)),
4671 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4674 return want_value
& 1 ? temp
: NULL_RTX
;
4678 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4679 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4680 /* Return TARGET if it's a specified hardware register.
4681 If TARGET is a volatile mem ref, either return TARGET
4682 or return a reg copied *from* TARGET; ANSI requires this.
4684 Otherwise, if TEMP is not TARGET, return TEMP
4685 if it is constant (for efficiency),
4686 or if we really want the correct value. */
4687 if (!(target
&& GET_CODE (target
) == REG
4688 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4689 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4690 && ! rtx_equal_p (temp
, target
)
4691 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4692 dont_return_target
= 1;
4695 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4696 the same as that of TARGET, adjust the constant. This is needed, for
4697 example, in case it is a CONST_DOUBLE and we want only a word-sized
4699 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4700 && TREE_CODE (exp
) != ERROR_MARK
4701 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4702 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4703 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4705 /* If value was not generated in the target, store it there.
4706 Convert the value to TARGET's type first if necessary.
4707 If TEMP and TARGET compare equal according to rtx_equal_p, but
4708 one or both of them are volatile memory refs, we have to distinguish
4710 - expand_expr has used TARGET. In this case, we must not generate
4711 another copy. This can be detected by TARGET being equal according
4713 - expand_expr has not used TARGET - that means that the source just
4714 happens to have the same RTX form. Since temp will have been created
4715 by expand_expr, it will compare unequal according to == .
4716 We must generate a copy in this case, to reach the correct number
4717 of volatile memory references. */
4719 if ((! rtx_equal_p (temp
, target
)
4720 || (temp
!= target
&& (side_effects_p (temp
)
4721 || side_effects_p (target
))))
4722 && TREE_CODE (exp
) != ERROR_MARK
4723 && ! dont_store_target
4724 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4725 but TARGET is not valid memory reference, TEMP will differ
4726 from TARGET although it is really the same location. */
4727 && (TREE_CODE_CLASS (TREE_CODE (exp
)) != 'd'
4728 || target
!= DECL_RTL_IF_SET (exp
))
4729 /* If there's nothing to copy, don't bother. Don't call expr_size
4730 unless necessary, because some front-ends (C++) expr_size-hook
4731 aborts on objects that are not supposed to be bit-copied or
4733 && expr_size (exp
) != const0_rtx
)
4735 target
= protect_from_queue (target
, 1);
4736 if (GET_MODE (temp
) != GET_MODE (target
)
4737 && GET_MODE (temp
) != VOIDmode
)
4739 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4740 if (dont_return_target
)
4742 /* In this case, we will return TEMP,
4743 so make sure it has the proper mode.
4744 But don't forget to store the value into TARGET. */
4745 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4746 emit_move_insn (target
, temp
);
4749 convert_move (target
, temp
, unsignedp
);
4752 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4754 /* Handle copying a string constant into an array. The string
4755 constant may be shorter than the array. So copy just the string's
4756 actual length, and clear the rest. First get the size of the data
4757 type of the string, which is actually the size of the target. */
4758 rtx size
= expr_size (exp
);
4760 if (GET_CODE (size
) == CONST_INT
4761 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4762 emit_block_move (target
, temp
, size
,
4764 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4767 /* Compute the size of the data to copy from the string. */
4769 = size_binop (MIN_EXPR
,
4770 make_tree (sizetype
, size
),
4771 size_int (TREE_STRING_LENGTH (exp
)));
4773 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4775 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4778 /* Copy that much. */
4779 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4780 TREE_UNSIGNED (sizetype
));
4781 emit_block_move (target
, temp
, copy_size_rtx
,
4783 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4785 /* Figure out how much is left in TARGET that we have to clear.
4786 Do all calculations in ptr_mode. */
4787 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4789 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4790 target
= adjust_address (target
, BLKmode
,
4791 INTVAL (copy_size_rtx
));
4795 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4796 copy_size_rtx
, NULL_RTX
, 0,
4799 #ifdef POINTERS_EXTEND_UNSIGNED
4800 if (GET_MODE (copy_size_rtx
) != Pmode
)
4801 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4802 TREE_UNSIGNED (sizetype
));
4805 target
= offset_address (target
, copy_size_rtx
,
4806 highest_pow2_factor (copy_size
));
4807 label
= gen_label_rtx ();
4808 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4809 GET_MODE (size
), 0, label
);
4812 if (size
!= const0_rtx
)
4813 clear_storage (target
, size
);
4819 /* Handle calls that return values in multiple non-contiguous locations.
4820 The Irix 6 ABI has examples of this. */
4821 else if (GET_CODE (target
) == PARALLEL
)
4822 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4823 else if (GET_MODE (temp
) == BLKmode
)
4824 emit_block_move (target
, temp
, expr_size (exp
),
4826 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4828 emit_move_insn (target
, temp
);
4831 /* If we don't want a value, return NULL_RTX. */
4832 if ((want_value
& 1) == 0)
4835 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4836 ??? The latter test doesn't seem to make sense. */
4837 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4840 /* Return TARGET itself if it is a hard register. */
4841 else if ((want_value
& 1) != 0
4842 && GET_MODE (target
) != BLKmode
4843 && ! (GET_CODE (target
) == REG
4844 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4845 return copy_to_reg (target
);
4851 /* Return 1 if EXP just contains zeros. */
4859 switch (TREE_CODE (exp
))
4863 case NON_LVALUE_EXPR
:
4864 case VIEW_CONVERT_EXPR
:
4865 return is_zeros_p (TREE_OPERAND (exp
, 0));
4868 return integer_zerop (exp
);
4872 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4875 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4878 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4879 elt
= TREE_CHAIN (elt
))
4880 if (!is_zeros_p (TREE_VALUE (elt
)))
4886 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4887 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4888 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4889 if (! is_zeros_p (TREE_VALUE (elt
)))
4899 /* Return 1 if EXP contains mostly (3/4) zeros. */
4902 mostly_zeros_p (exp
)
4905 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4907 int elts
= 0, zeros
= 0;
4908 tree elt
= CONSTRUCTOR_ELTS (exp
);
4909 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4911 /* If there are no ranges of true bits, it is all zero. */
4912 return elt
== NULL_TREE
;
4914 for (; elt
; elt
= TREE_CHAIN (elt
))
4916 /* We do not handle the case where the index is a RANGE_EXPR,
4917 so the statistic will be somewhat inaccurate.
4918 We do make a more accurate count in store_constructor itself,
4919 so since this function is only used for nested array elements,
4920 this should be close enough. */
4921 if (mostly_zeros_p (TREE_VALUE (elt
)))
4926 return 4 * zeros
>= 3 * elts
;
4929 return is_zeros_p (exp
);
4932 /* Helper function for store_constructor.
4933 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4934 TYPE is the type of the CONSTRUCTOR, not the element type.
4935 CLEARED is as for store_constructor.
4936 ALIAS_SET is the alias set to use for any stores.
4938 This provides a recursive shortcut back to store_constructor when it isn't
4939 necessary to go through store_field. This is so that we can pass through
4940 the cleared field to let store_constructor know that we may not have to
4941 clear a substructure if the outer structure has already been cleared. */
4944 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4947 unsigned HOST_WIDE_INT bitsize
;
4948 HOST_WIDE_INT bitpos
;
4949 enum machine_mode mode
;
4954 if (TREE_CODE (exp
) == CONSTRUCTOR
4955 && bitpos
% BITS_PER_UNIT
== 0
4956 /* If we have a nonzero bitpos for a register target, then we just
4957 let store_field do the bitfield handling. This is unlikely to
4958 generate unnecessary clear instructions anyways. */
4959 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4961 if (GET_CODE (target
) == MEM
)
4963 = adjust_address (target
,
4964 GET_MODE (target
) == BLKmode
4966 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4967 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4970 /* Update the alias set, if required. */
4971 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4972 && MEM_ALIAS_SET (target
) != 0)
4974 target
= copy_rtx (target
);
4975 set_mem_alias_set (target
, alias_set
);
4978 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4981 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4985 /* Store the value of constructor EXP into the rtx TARGET.
4986 TARGET is either a REG or a MEM; we know it cannot conflict, since
4987 safe_from_p has been called.
4988 CLEARED is true if TARGET is known to have been zero'd.
4989 SIZE is the number of bytes of TARGET we are allowed to modify: this
4990 may not be the same as the size of EXP if we are assigning to a field
4991 which has been packed to exclude padding bits. */
4994 store_constructor (exp
, target
, cleared
, size
)
5000 tree type
= TREE_TYPE (exp
);
5001 #ifdef WORD_REGISTER_OPERATIONS
5002 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5005 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
5006 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5010 /* We either clear the aggregate or indicate the value is dead. */
5011 if ((TREE_CODE (type
) == UNION_TYPE
5012 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5014 && ! CONSTRUCTOR_ELTS (exp
))
5015 /* If the constructor is empty, clear the union. */
5017 clear_storage (target
, expr_size (exp
));
5021 /* If we are building a static constructor into a register,
5022 set the initial value as zero so we can fold the value into
5023 a constant. But if more than one register is involved,
5024 this probably loses. */
5025 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
5026 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5028 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5032 /* If the constructor has fewer fields than the structure
5033 or if we are initializing the structure to mostly zeros,
5034 clear the whole structure first. Don't do this if TARGET is a
5035 register whose mode size isn't equal to SIZE since clear_storage
5036 can't handle this case. */
5037 else if (! cleared
&& size
> 0
5038 && ((list_length (CONSTRUCTOR_ELTS (exp
))
5039 != fields_length (type
))
5040 || mostly_zeros_p (exp
))
5041 && (GET_CODE (target
) != REG
5042 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5045 rtx xtarget
= target
;
5047 if (readonly_fields_p (type
))
5049 xtarget
= copy_rtx (xtarget
);
5050 RTX_UNCHANGING_P (xtarget
) = 1;
5053 clear_storage (xtarget
, GEN_INT (size
));
5058 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5060 /* Store each element of the constructor into
5061 the corresponding field of TARGET. */
5063 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5065 tree field
= TREE_PURPOSE (elt
);
5066 tree value
= TREE_VALUE (elt
);
5067 enum machine_mode mode
;
5068 HOST_WIDE_INT bitsize
;
5069 HOST_WIDE_INT bitpos
= 0;
5071 rtx to_rtx
= target
;
5073 /* Just ignore missing fields.
5074 We cleared the whole structure, above,
5075 if any fields are missing. */
5079 if (cleared
&& is_zeros_p (value
))
5082 if (host_integerp (DECL_SIZE (field
), 1))
5083 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5087 mode
= DECL_MODE (field
);
5088 if (DECL_BIT_FIELD (field
))
5091 offset
= DECL_FIELD_OFFSET (field
);
5092 if (host_integerp (offset
, 0)
5093 && host_integerp (bit_position (field
), 0))
5095 bitpos
= int_bit_position (field
);
5099 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5105 if (CONTAINS_PLACEHOLDER_P (offset
))
5106 offset
= build (WITH_RECORD_EXPR
, sizetype
,
5107 offset
, make_tree (TREE_TYPE (exp
), target
));
5109 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5110 if (GET_CODE (to_rtx
) != MEM
)
5113 #ifdef POINTERS_EXTEND_UNSIGNED
5114 if (GET_MODE (offset_rtx
) != Pmode
)
5115 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
5117 if (GET_MODE (offset_rtx
) != ptr_mode
)
5118 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
5121 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5122 highest_pow2_factor (offset
));
5125 if (TREE_READONLY (field
))
5127 if (GET_CODE (to_rtx
) == MEM
)
5128 to_rtx
= copy_rtx (to_rtx
);
5130 RTX_UNCHANGING_P (to_rtx
) = 1;
5133 #ifdef WORD_REGISTER_OPERATIONS
5134 /* If this initializes a field that is smaller than a word, at the
5135 start of a word, try to widen it to a full word.
5136 This special case allows us to output C++ member function
5137 initializations in a form that the optimizers can understand. */
5138 if (GET_CODE (target
) == REG
5139 && bitsize
< BITS_PER_WORD
5140 && bitpos
% BITS_PER_WORD
== 0
5141 && GET_MODE_CLASS (mode
) == MODE_INT
5142 && TREE_CODE (value
) == INTEGER_CST
5144 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5146 tree type
= TREE_TYPE (value
);
5148 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5150 type
= (*lang_hooks
.types
.type_for_size
)
5151 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
5152 value
= convert (type
, value
);
5155 if (BYTES_BIG_ENDIAN
)
5157 = fold (build (LSHIFT_EXPR
, type
, value
,
5158 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
5159 bitsize
= BITS_PER_WORD
;
5164 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5165 && DECL_NONADDRESSABLE_P (field
))
5167 to_rtx
= copy_rtx (to_rtx
);
5168 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5171 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5172 value
, type
, cleared
,
5173 get_alias_set (TREE_TYPE (field
)));
5176 else if (TREE_CODE (type
) == ARRAY_TYPE
5177 || TREE_CODE (type
) == VECTOR_TYPE
)
5182 tree domain
= TYPE_DOMAIN (type
);
5183 tree elttype
= TREE_TYPE (type
);
5185 HOST_WIDE_INT minelt
= 0;
5186 HOST_WIDE_INT maxelt
= 0;
5188 /* Vectors are like arrays, but the domain is stored via an array
5190 if (TREE_CODE (type
) == VECTOR_TYPE
)
5192 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5193 the same field as TYPE_DOMAIN, we are not guaranteed that
5195 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
5196 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
5199 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5200 && TYPE_MAX_VALUE (domain
)
5201 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5202 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5204 /* If we have constant bounds for the range of the type, get them. */
5207 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5208 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5211 /* If the constructor has fewer elements than the array,
5212 clear the whole array first. Similarly if this is
5213 static constructor of a non-BLKmode object. */
5214 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
5218 HOST_WIDE_INT count
= 0, zero_count
= 0;
5219 need_to_clear
= ! const_bounds_p
;
5221 /* This loop is a more accurate version of the loop in
5222 mostly_zeros_p (it handles RANGE_EXPR in an index).
5223 It is also needed to check for missing elements. */
5224 for (elt
= CONSTRUCTOR_ELTS (exp
);
5225 elt
!= NULL_TREE
&& ! need_to_clear
;
5226 elt
= TREE_CHAIN (elt
))
5228 tree index
= TREE_PURPOSE (elt
);
5229 HOST_WIDE_INT this_node_count
;
5231 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5233 tree lo_index
= TREE_OPERAND (index
, 0);
5234 tree hi_index
= TREE_OPERAND (index
, 1);
5236 if (! host_integerp (lo_index
, 1)
5237 || ! host_integerp (hi_index
, 1))
5243 this_node_count
= (tree_low_cst (hi_index
, 1)
5244 - tree_low_cst (lo_index
, 1) + 1);
5247 this_node_count
= 1;
5249 count
+= this_node_count
;
5250 if (mostly_zeros_p (TREE_VALUE (elt
)))
5251 zero_count
+= this_node_count
;
5254 /* Clear the entire array first if there are any missing elements,
5255 or if the incidence of zero elements is >= 75%. */
5257 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
5261 if (need_to_clear
&& size
> 0)
5266 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5268 clear_storage (target
, GEN_INT (size
));
5272 else if (REG_P (target
))
5273 /* Inform later passes that the old value is dead. */
5274 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
5276 /* Store each element of the constructor into
5277 the corresponding element of TARGET, determined
5278 by counting the elements. */
5279 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
5281 elt
= TREE_CHAIN (elt
), i
++)
5283 enum machine_mode mode
;
5284 HOST_WIDE_INT bitsize
;
5285 HOST_WIDE_INT bitpos
;
5287 tree value
= TREE_VALUE (elt
);
5288 tree index
= TREE_PURPOSE (elt
);
5289 rtx xtarget
= target
;
5291 if (cleared
&& is_zeros_p (value
))
5294 unsignedp
= TREE_UNSIGNED (elttype
);
5295 mode
= TYPE_MODE (elttype
);
5296 if (mode
== BLKmode
)
5297 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5298 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5301 bitsize
= GET_MODE_BITSIZE (mode
);
5303 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5305 tree lo_index
= TREE_OPERAND (index
, 0);
5306 tree hi_index
= TREE_OPERAND (index
, 1);
5307 rtx index_r
, pos_rtx
, loop_end
;
5308 struct nesting
*loop
;
5309 HOST_WIDE_INT lo
, hi
, count
;
5312 /* If the range is constant and "small", unroll the loop. */
5314 && host_integerp (lo_index
, 0)
5315 && host_integerp (hi_index
, 0)
5316 && (lo
= tree_low_cst (lo_index
, 0),
5317 hi
= tree_low_cst (hi_index
, 0),
5318 count
= hi
- lo
+ 1,
5319 (GET_CODE (target
) != MEM
5321 || (host_integerp (TYPE_SIZE (elttype
), 1)
5322 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5325 lo
-= minelt
; hi
-= minelt
;
5326 for (; lo
<= hi
; lo
++)
5328 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5330 if (GET_CODE (target
) == MEM
5331 && !MEM_KEEP_ALIAS_SET_P (target
)
5332 && TREE_CODE (type
) == ARRAY_TYPE
5333 && TYPE_NONALIASED_COMPONENT (type
))
5335 target
= copy_rtx (target
);
5336 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5339 store_constructor_field
5340 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5341 get_alias_set (elttype
));
5346 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5347 loop_end
= gen_label_rtx ();
5349 unsignedp
= TREE_UNSIGNED (domain
);
5351 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5354 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5356 SET_DECL_RTL (index
, index_r
);
5357 if (TREE_CODE (value
) == SAVE_EXPR
5358 && SAVE_EXPR_RTL (value
) == 0)
5360 /* Make sure value gets expanded once before the
5362 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
5365 store_expr (lo_index
, index_r
, 0);
5366 loop
= expand_start_loop (0);
5368 /* Assign value to element index. */
5370 = convert (ssizetype
,
5371 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5372 index
, TYPE_MIN_VALUE (domain
))));
5373 position
= size_binop (MULT_EXPR
, position
,
5375 TYPE_SIZE_UNIT (elttype
)));
5377 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5378 xtarget
= offset_address (target
, pos_rtx
,
5379 highest_pow2_factor (position
));
5380 xtarget
= adjust_address (xtarget
, mode
, 0);
5381 if (TREE_CODE (value
) == CONSTRUCTOR
)
5382 store_constructor (value
, xtarget
, cleared
,
5383 bitsize
/ BITS_PER_UNIT
);
5385 store_expr (value
, xtarget
, 0);
5387 expand_exit_loop_if_false (loop
,
5388 build (LT_EXPR
, integer_type_node
,
5391 expand_increment (build (PREINCREMENT_EXPR
,
5393 index
, integer_one_node
), 0, 0);
5395 emit_label (loop_end
);
5398 else if ((index
!= 0 && ! host_integerp (index
, 0))
5399 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5404 index
= ssize_int (1);
5407 index
= convert (ssizetype
,
5408 fold (build (MINUS_EXPR
, index
,
5409 TYPE_MIN_VALUE (domain
))));
5411 position
= size_binop (MULT_EXPR
, index
,
5413 TYPE_SIZE_UNIT (elttype
)));
5414 xtarget
= offset_address (target
,
5415 expand_expr (position
, 0, VOIDmode
, 0),
5416 highest_pow2_factor (position
));
5417 xtarget
= adjust_address (xtarget
, mode
, 0);
5418 store_expr (value
, xtarget
, 0);
5423 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5424 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5426 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5428 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
5429 && TREE_CODE (type
) == ARRAY_TYPE
5430 && TYPE_NONALIASED_COMPONENT (type
))
5432 target
= copy_rtx (target
);
5433 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5436 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5437 type
, cleared
, get_alias_set (elttype
));
5443 /* Set constructor assignments. */
5444 else if (TREE_CODE (type
) == SET_TYPE
)
5446 tree elt
= CONSTRUCTOR_ELTS (exp
);
5447 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5448 tree domain
= TYPE_DOMAIN (type
);
5449 tree domain_min
, domain_max
, bitlength
;
5451 /* The default implementation strategy is to extract the constant
5452 parts of the constructor, use that to initialize the target,
5453 and then "or" in whatever non-constant ranges we need in addition.
5455 If a large set is all zero or all ones, it is
5456 probably better to set it using memset (if available) or bzero.
5457 Also, if a large set has just a single range, it may also be
5458 better to first clear all the first clear the set (using
5459 bzero/memset), and set the bits we want. */
5461 /* Check for all zeros. */
5462 if (elt
== NULL_TREE
&& size
> 0)
5465 clear_storage (target
, GEN_INT (size
));
5469 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5470 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5471 bitlength
= size_binop (PLUS_EXPR
,
5472 size_diffop (domain_max
, domain_min
),
5475 nbits
= tree_low_cst (bitlength
, 1);
5477 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5478 are "complicated" (more than one range), initialize (the
5479 constant parts) by copying from a constant. */
5480 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5481 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5483 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5484 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5485 char *bit_buffer
= (char *) alloca (nbits
);
5486 HOST_WIDE_INT word
= 0;
5487 unsigned int bit_pos
= 0;
5488 unsigned int ibit
= 0;
5489 unsigned int offset
= 0; /* In bytes from beginning of set. */
5491 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5494 if (bit_buffer
[ibit
])
5496 if (BYTES_BIG_ENDIAN
)
5497 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5499 word
|= 1 << bit_pos
;
5503 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5505 if (word
!= 0 || ! cleared
)
5507 rtx datum
= GEN_INT (word
);
5510 /* The assumption here is that it is safe to use
5511 XEXP if the set is multi-word, but not if
5512 it's single-word. */
5513 if (GET_CODE (target
) == MEM
)
5514 to_rtx
= adjust_address (target
, mode
, offset
);
5515 else if (offset
== 0)
5519 emit_move_insn (to_rtx
, datum
);
5526 offset
+= set_word_size
/ BITS_PER_UNIT
;
5531 /* Don't bother clearing storage if the set is all ones. */
5532 if (TREE_CHAIN (elt
) != NULL_TREE
5533 || (TREE_PURPOSE (elt
) == NULL_TREE
5535 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5536 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5537 || (tree_low_cst (TREE_VALUE (elt
), 0)
5538 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5539 != (HOST_WIDE_INT
) nbits
))))
5540 clear_storage (target
, expr_size (exp
));
5542 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5544 /* Start of range of element or NULL. */
5545 tree startbit
= TREE_PURPOSE (elt
);
5546 /* End of range of element, or element value. */
5547 tree endbit
= TREE_VALUE (elt
);
5548 HOST_WIDE_INT startb
, endb
;
5549 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5551 bitlength_rtx
= expand_expr (bitlength
,
5552 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5554 /* Handle non-range tuple element like [ expr ]. */
5555 if (startbit
== NULL_TREE
)
5557 startbit
= save_expr (endbit
);
5561 startbit
= convert (sizetype
, startbit
);
5562 endbit
= convert (sizetype
, endbit
);
5563 if (! integer_zerop (domain_min
))
5565 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5566 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5568 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5569 EXPAND_CONST_ADDRESS
);
5570 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5571 EXPAND_CONST_ADDRESS
);
5577 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5578 (GET_MODE (target
), 0),
5581 emit_move_insn (targetx
, target
);
5584 else if (GET_CODE (target
) == MEM
)
5589 /* Optimization: If startbit and endbit are constants divisible
5590 by BITS_PER_UNIT, call memset instead. */
5591 if (TARGET_MEM_FUNCTIONS
5592 && TREE_CODE (startbit
) == INTEGER_CST
5593 && TREE_CODE (endbit
) == INTEGER_CST
5594 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5595 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5597 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5599 plus_constant (XEXP (targetx
, 0),
5600 startb
/ BITS_PER_UNIT
),
5602 constm1_rtx
, TYPE_MODE (integer_type_node
),
5603 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5604 TYPE_MODE (sizetype
));
5607 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5608 VOIDmode
, 4, XEXP (targetx
, 0),
5609 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5610 startbit_rtx
, TYPE_MODE (sizetype
),
5611 endbit_rtx
, TYPE_MODE (sizetype
));
5614 emit_move_insn (target
, targetx
);
5622 /* Store the value of EXP (an expression tree)
5623 into a subfield of TARGET which has mode MODE and occupies
5624 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5625 If MODE is VOIDmode, it means that we are storing into a bit-field.
5627 If VALUE_MODE is VOIDmode, return nothing in particular.
5628 UNSIGNEDP is not used in this case.
5630 Otherwise, return an rtx for the value stored. This rtx
5631 has mode VALUE_MODE if that is convenient to do.
5632 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5634 TYPE is the type of the underlying object,
5636 ALIAS_SET is the alias set for the destination. This value will
5637 (in general) be different from that for TARGET, since TARGET is a
5638 reference to the containing structure. */
5641 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5644 HOST_WIDE_INT bitsize
;
5645 HOST_WIDE_INT bitpos
;
5646 enum machine_mode mode
;
5648 enum machine_mode value_mode
;
5653 HOST_WIDE_INT width_mask
= 0;
5655 if (TREE_CODE (exp
) == ERROR_MARK
)
5658 /* If we have nothing to store, do nothing unless the expression has
5661 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5662 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5663 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5665 /* If we are storing into an unaligned field of an aligned union that is
5666 in a register, we may have the mode of TARGET being an integer mode but
5667 MODE == BLKmode. In that case, get an aligned object whose size and
5668 alignment are the same as TARGET and store TARGET into it (we can avoid
5669 the store if the field being stored is the entire width of TARGET). Then
5670 call ourselves recursively to store the field into a BLKmode version of
5671 that object. Finally, load from the object into TARGET. This is not
5672 very efficient in general, but should only be slightly more expensive
5673 than the otherwise-required unaligned accesses. Perhaps this can be
5674 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5675 twice, once with emit_move_insn and once via store_field. */
5678 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5680 rtx object
= assign_temp (type
, 0, 1, 1);
5681 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5683 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5684 emit_move_insn (object
, target
);
5686 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5689 emit_move_insn (target
, object
);
5691 /* We want to return the BLKmode version of the data. */
5695 if (GET_CODE (target
) == CONCAT
)
5697 /* We're storing into a struct containing a single __complex. */
5701 return store_expr (exp
, target
, 0);
5704 /* If the structure is in a register or if the component
5705 is a bit field, we cannot use addressing to access it.
5706 Use bit-field techniques or SUBREG to store in it. */
5708 if (mode
== VOIDmode
5709 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5710 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5711 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5712 || GET_CODE (target
) == REG
5713 || GET_CODE (target
) == SUBREG
5714 /* If the field isn't aligned enough to store as an ordinary memref,
5715 store it as a bit field. */
5717 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5718 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5719 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5720 || (bitpos
% BITS_PER_UNIT
!= 0)))
5721 /* If the RHS and field are a constant size and the size of the
5722 RHS isn't the same size as the bitfield, we must use bitfield
5725 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5726 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5728 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5730 /* If BITSIZE is narrower than the size of the type of EXP
5731 we will be narrowing TEMP. Normally, what's wanted are the
5732 low-order bits. However, if EXP's type is a record and this is
5733 big-endian machine, we want the upper BITSIZE bits. */
5734 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5735 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5736 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5737 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5738 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5742 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5744 if (mode
!= VOIDmode
&& mode
!= BLKmode
5745 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5746 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5748 /* If the modes of TARGET and TEMP are both BLKmode, both
5749 must be in memory and BITPOS must be aligned on a byte
5750 boundary. If so, we simply do a block copy. */
5751 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5753 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5754 || bitpos
% BITS_PER_UNIT
!= 0)
5757 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5758 emit_block_move (target
, temp
,
5759 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5763 return value_mode
== VOIDmode
? const0_rtx
: target
;
5766 /* Store the value in the bitfield. */
5767 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5768 int_size_in_bytes (type
));
5770 if (value_mode
!= VOIDmode
)
5772 /* The caller wants an rtx for the value.
5773 If possible, avoid refetching from the bitfield itself. */
5775 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5778 enum machine_mode tmode
;
5780 tmode
= GET_MODE (temp
);
5781 if (tmode
== VOIDmode
)
5785 return expand_and (tmode
, temp
,
5786 gen_int_mode (width_mask
, tmode
),
5789 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5790 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5791 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5794 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5795 NULL_RTX
, value_mode
, VOIDmode
,
5796 int_size_in_bytes (type
));
5802 rtx addr
= XEXP (target
, 0);
5803 rtx to_rtx
= target
;
5805 /* If a value is wanted, it must be the lhs;
5806 so make the address stable for multiple use. */
5808 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5809 && ! CONSTANT_ADDRESS_P (addr
)
5810 /* A frame-pointer reference is already stable. */
5811 && ! (GET_CODE (addr
) == PLUS
5812 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5813 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5814 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5815 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5817 /* Now build a reference to just the desired component. */
5819 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5821 if (to_rtx
== target
)
5822 to_rtx
= copy_rtx (to_rtx
);
5824 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5825 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5826 set_mem_alias_set (to_rtx
, alias_set
);
5828 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5832 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5833 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5834 codes and find the ultimate containing object, which we return.
5836 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5837 bit position, and *PUNSIGNEDP to the signedness of the field.
5838 If the position of the field is variable, we store a tree
5839 giving the variable offset (in units) in *POFFSET.
5840 This offset is in addition to the bit position.
5841 If the position is not variable, we store 0 in *POFFSET.
5843 If any of the extraction expressions is volatile,
5844 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5846 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5847 is a mode that can be used to access the field. In that case, *PBITSIZE
5850 If the field describes a variable-sized object, *PMODE is set to
5851 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5852 this case, but the address of the object can be found. */
5855 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5856 punsignedp
, pvolatilep
)
5858 HOST_WIDE_INT
*pbitsize
;
5859 HOST_WIDE_INT
*pbitpos
;
5861 enum machine_mode
*pmode
;
5866 enum machine_mode mode
= VOIDmode
;
5867 tree offset
= size_zero_node
;
5868 tree bit_offset
= bitsize_zero_node
;
5869 tree placeholder_ptr
= 0;
5872 /* First get the mode, signedness, and size. We do this from just the
5873 outermost expression. */
5874 if (TREE_CODE (exp
) == COMPONENT_REF
)
5876 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5877 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5878 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5880 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5882 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5884 size_tree
= TREE_OPERAND (exp
, 1);
5885 *punsignedp
= TREE_UNSIGNED (exp
);
5889 mode
= TYPE_MODE (TREE_TYPE (exp
));
5890 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5892 if (mode
== BLKmode
)
5893 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5895 *pbitsize
= GET_MODE_BITSIZE (mode
);
5900 if (! host_integerp (size_tree
, 1))
5901 mode
= BLKmode
, *pbitsize
= -1;
5903 *pbitsize
= tree_low_cst (size_tree
, 1);
5906 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5907 and find the ultimate containing object. */
5910 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5911 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5912 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5914 tree field
= TREE_OPERAND (exp
, 1);
5915 tree this_offset
= DECL_FIELD_OFFSET (field
);
5917 /* If this field hasn't been filled in yet, don't go
5918 past it. This should only happen when folding expressions
5919 made during type construction. */
5920 if (this_offset
== 0)
5922 else if (CONTAINS_PLACEHOLDER_P (this_offset
))
5923 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5925 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5926 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5927 DECL_FIELD_BIT_OFFSET (field
));
5929 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5932 else if (TREE_CODE (exp
) == ARRAY_REF
5933 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5935 tree index
= TREE_OPERAND (exp
, 1);
5936 tree array
= TREE_OPERAND (exp
, 0);
5937 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5938 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5939 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5941 /* We assume all arrays have sizes that are a multiple of a byte.
5942 First subtract the lower bound, if any, in the type of the
5943 index, then convert to sizetype and multiply by the size of the
5945 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5946 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5949 /* If the index has a self-referential type, pass it to a
5950 WITH_RECORD_EXPR; if the component size is, pass our
5951 component to one. */
5952 if (CONTAINS_PLACEHOLDER_P (index
))
5953 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5954 if (CONTAINS_PLACEHOLDER_P (unit_size
))
5955 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5957 offset
= size_binop (PLUS_EXPR
, offset
,
5958 size_binop (MULT_EXPR
,
5959 convert (sizetype
, index
),
5963 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5965 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5967 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5968 We might have been called from tree optimization where we
5969 haven't set up an object yet. */
5978 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5979 conversions that don't change the mode, and all view conversions
5980 except those that need to "step up" the alignment. */
5981 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5982 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5983 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5984 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5986 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5987 < BIGGEST_ALIGNMENT
)
5988 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5989 || TYPE_ALIGN_OK (TREE_TYPE
5990 (TREE_OPERAND (exp
, 0))))))
5991 && ! ((TREE_CODE (exp
) == NOP_EXPR
5992 || TREE_CODE (exp
) == CONVERT_EXPR
)
5993 && (TYPE_MODE (TREE_TYPE (exp
))
5994 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5997 /* If any reference in the chain is volatile, the effect is volatile. */
5998 if (TREE_THIS_VOLATILE (exp
))
6001 exp
= TREE_OPERAND (exp
, 0);
6004 /* If OFFSET is constant, see if we can return the whole thing as a
6005 constant bit position. Otherwise, split it up. */
6006 if (host_integerp (offset
, 0)
6007 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
6009 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
6010 && host_integerp (tem
, 0))
6011 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
6013 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
6019 /* Return 1 if T is an expression that get_inner_reference handles. */
6022 handled_component_p (t
)
6025 switch (TREE_CODE (t
))
6030 case ARRAY_RANGE_REF
:
6031 case NON_LVALUE_EXPR
:
6032 case VIEW_CONVERT_EXPR
:
6037 return (TYPE_MODE (TREE_TYPE (t
))
6038 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
6045 /* Given an rtx VALUE that may contain additions and multiplications, return
6046 an equivalent value that just refers to a register, memory, or constant.
6047 This is done by generating instructions to perform the arithmetic and
6048 returning a pseudo-register containing the value.
6050 The returned value may be a REG, SUBREG, MEM or constant. */
6053 force_operand (value
, target
)
6057 /* Use subtarget as the target for operand 0 of a binary operation. */
6058 rtx subtarget
= get_subtarget (target
);
6059 enum rtx_code code
= GET_CODE (value
);
6061 /* Check for a PIC address load. */
6062 if ((code
== PLUS
|| code
== MINUS
)
6063 && XEXP (value
, 0) == pic_offset_table_rtx
6064 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
6065 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
6066 || GET_CODE (XEXP (value
, 1)) == CONST
))
6069 subtarget
= gen_reg_rtx (GET_MODE (value
));
6070 emit_move_insn (subtarget
, value
);
6074 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
6077 target
= gen_reg_rtx (GET_MODE (value
));
6078 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
6079 code
== ZERO_EXTEND
);
6083 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
6085 op2
= XEXP (value
, 1);
6086 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
6088 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
6091 op2
= negate_rtx (GET_MODE (value
), op2
);
6094 /* Check for an addition with OP2 a constant integer and our first
6095 operand a PLUS of a virtual register and something else. In that
6096 case, we want to emit the sum of the virtual register and the
6097 constant first and then add the other value. This allows virtual
6098 register instantiation to simply modify the constant rather than
6099 creating another one around this addition. */
6100 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
6101 && GET_CODE (XEXP (value
, 0)) == PLUS
6102 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
6103 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6104 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6106 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6107 XEXP (XEXP (value
, 0), 0), op2
,
6108 subtarget
, 0, OPTAB_LIB_WIDEN
);
6109 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6110 force_operand (XEXP (XEXP (value
,
6112 target
, 0, OPTAB_LIB_WIDEN
);
6115 op1
= force_operand (XEXP (value
, 0), subtarget
);
6116 op2
= force_operand (op2
, NULL_RTX
);
6120 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
6122 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
6123 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6124 target
, 1, OPTAB_LIB_WIDEN
);
6126 return expand_divmod (0,
6127 FLOAT_MODE_P (GET_MODE (value
))
6128 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6129 GET_MODE (value
), op1
, op2
, target
, 0);
6132 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6136 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6140 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6144 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6145 target
, 0, OPTAB_LIB_WIDEN
);
6148 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6149 target
, 1, OPTAB_LIB_WIDEN
);
6152 if (GET_RTX_CLASS (code
) == '1')
6154 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6155 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6158 #ifdef INSN_SCHEDULING
6159 /* On machines that have insn scheduling, we want all memory reference to be
6160 explicit, so we need to deal with such paradoxical SUBREGs. */
6161 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
6162 && (GET_MODE_SIZE (GET_MODE (value
))
6163 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6165 = simplify_gen_subreg (GET_MODE (value
),
6166 force_reg (GET_MODE (SUBREG_REG (value
)),
6167 force_operand (SUBREG_REG (value
),
6169 GET_MODE (SUBREG_REG (value
)),
6170 SUBREG_BYTE (value
));
6176 /* Subroutine of expand_expr: return nonzero iff there is no way that
6177 EXP can reference X, which is being modified. TOP_P is nonzero if this
6178 call is going to be used to determine whether we need a temporary
6179 for EXP, as opposed to a recursive call to this function.
6181 It is always safe for this routine to return zero since it merely
6182 searches for optimization opportunities. */
6185 safe_from_p (x
, exp
, top_p
)
6192 static tree save_expr_list
;
6195 /* If EXP has varying size, we MUST use a target since we currently
6196 have no way of allocating temporaries of variable size
6197 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6198 So we assume here that something at a higher level has prevented a
6199 clash. This is somewhat bogus, but the best we can do. Only
6200 do this when X is BLKmode and when we are at the top level. */
6201 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6202 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6203 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6204 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6205 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6207 && GET_MODE (x
) == BLKmode
)
6208 /* If X is in the outgoing argument area, it is always safe. */
6209 || (GET_CODE (x
) == MEM
6210 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6211 || (GET_CODE (XEXP (x
, 0)) == PLUS
6212 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6215 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6216 find the underlying pseudo. */
6217 if (GET_CODE (x
) == SUBREG
)
6220 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6224 /* A SAVE_EXPR might appear many times in the expression passed to the
6225 top-level safe_from_p call, and if it has a complex subexpression,
6226 examining it multiple times could result in a combinatorial explosion.
6227 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6228 with optimization took about 28 minutes to compile -- even though it was
6229 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6230 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6231 we have processed. Note that the only test of top_p was above. */
6240 rtn
= safe_from_p (x
, exp
, 0);
6242 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
6243 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
6248 /* Now look at our tree code and possibly recurse. */
6249 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6252 exp_rtl
= DECL_RTL_IF_SET (exp
);
6259 if (TREE_CODE (exp
) == TREE_LIST
)
6263 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6265 exp
= TREE_CHAIN (exp
);
6268 if (TREE_CODE (exp
) != TREE_LIST
)
6269 return safe_from_p (x
, exp
, 0);
6272 else if (TREE_CODE (exp
) == ERROR_MARK
)
6273 return 1; /* An already-visited SAVE_EXPR? */
6279 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6284 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6288 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6289 the expression. If it is set, we conflict iff we are that rtx or
6290 both are in memory. Otherwise, we check all operands of the
6291 expression recursively. */
6293 switch (TREE_CODE (exp
))
6296 /* If the operand is static or we are static, we can't conflict.
6297 Likewise if we don't conflict with the operand at all. */
6298 if (staticp (TREE_OPERAND (exp
, 0))
6299 || TREE_STATIC (exp
)
6300 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6303 /* Otherwise, the only way this can conflict is if we are taking
6304 the address of a DECL a that address if part of X, which is
6306 exp
= TREE_OPERAND (exp
, 0);
6309 if (!DECL_RTL_SET_P (exp
)
6310 || GET_CODE (DECL_RTL (exp
)) != MEM
)
6313 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6318 if (GET_CODE (x
) == MEM
6319 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6320 get_alias_set (exp
)))
6325 /* Assume that the call will clobber all hard registers and
6327 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6328 || GET_CODE (x
) == MEM
)
6333 /* If a sequence exists, we would have to scan every instruction
6334 in the sequence to see if it was safe. This is probably not
6336 if (RTL_EXPR_SEQUENCE (exp
))
6339 exp_rtl
= RTL_EXPR_RTL (exp
);
6342 case WITH_CLEANUP_EXPR
:
6343 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6346 case CLEANUP_POINT_EXPR
:
6347 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6350 exp_rtl
= SAVE_EXPR_RTL (exp
);
6354 /* If we've already scanned this, don't do it again. Otherwise,
6355 show we've scanned it and record for clearing the flag if we're
6357 if (TREE_PRIVATE (exp
))
6360 TREE_PRIVATE (exp
) = 1;
6361 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6363 TREE_PRIVATE (exp
) = 0;
6367 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
6371 /* The only operand we look at is operand 1. The rest aren't
6372 part of the expression. */
6373 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6375 case METHOD_CALL_EXPR
:
6376 /* This takes an rtx argument, but shouldn't appear here. */
6383 /* If we have an rtx, we do not need to scan our operands. */
6387 nops
= first_rtl_op (TREE_CODE (exp
));
6388 for (i
= 0; i
< nops
; i
++)
6389 if (TREE_OPERAND (exp
, i
) != 0
6390 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6393 /* If this is a language-specific tree code, it may require
6394 special handling. */
6395 if ((unsigned int) TREE_CODE (exp
)
6396 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6397 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
6401 /* If we have an rtl, find any enclosed object. Then see if we conflict
6405 if (GET_CODE (exp_rtl
) == SUBREG
)
6407 exp_rtl
= SUBREG_REG (exp_rtl
);
6408 if (GET_CODE (exp_rtl
) == REG
6409 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6413 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6414 are memory and they conflict. */
6415 return ! (rtx_equal_p (x
, exp_rtl
)
6416 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
6417 && true_dependence (exp_rtl
, VOIDmode
, x
,
6418 rtx_addr_varies_p
)));
6421 /* If we reach here, it is safe. */
6425 /* Subroutine of expand_expr: return rtx if EXP is a
6426 variable or parameter; else return 0. */
6433 switch (TREE_CODE (exp
))
6437 return DECL_RTL (exp
);
6443 #ifdef MAX_INTEGER_COMPUTATION_MODE
6446 check_max_integer_computation_mode (exp
)
6449 enum tree_code code
;
6450 enum machine_mode mode
;
6452 /* Strip any NOPs that don't change the mode. */
6454 code
= TREE_CODE (exp
);
6456 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6457 if (code
== NOP_EXPR
6458 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6461 /* First check the type of the overall operation. We need only look at
6462 unary, binary and relational operations. */
6463 if (TREE_CODE_CLASS (code
) == '1'
6464 || TREE_CODE_CLASS (code
) == '2'
6465 || TREE_CODE_CLASS (code
) == '<')
6467 mode
= TYPE_MODE (TREE_TYPE (exp
));
6468 if (GET_MODE_CLASS (mode
) == MODE_INT
6469 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6470 internal_error ("unsupported wide integer operation");
6473 /* Check operand of a unary op. */
6474 if (TREE_CODE_CLASS (code
) == '1')
6476 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6477 if (GET_MODE_CLASS (mode
) == MODE_INT
6478 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6479 internal_error ("unsupported wide integer operation");
6482 /* Check operands of a binary/comparison op. */
6483 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
6485 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6486 if (GET_MODE_CLASS (mode
) == MODE_INT
6487 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6488 internal_error ("unsupported wide integer operation");
6490 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6491 if (GET_MODE_CLASS (mode
) == MODE_INT
6492 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6493 internal_error ("unsupported wide integer operation");
6498 /* Return the highest power of two that EXP is known to be a multiple of.
6499 This is used in updating alignment of MEMs in array references. */
6501 static unsigned HOST_WIDE_INT
6502 highest_pow2_factor (exp
)
6505 unsigned HOST_WIDE_INT c0
, c1
;
6507 switch (TREE_CODE (exp
))
6510 /* We can find the lowest bit that's a one. If the low
6511 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6512 We need to handle this case since we can find it in a COND_EXPR,
6513 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6514 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6516 if (TREE_CONSTANT_OVERFLOW (exp
))
6517 return BIGGEST_ALIGNMENT
;
6520 /* Note: tree_low_cst is intentionally not used here,
6521 we don't care about the upper bits. */
6522 c0
= TREE_INT_CST_LOW (exp
);
6524 return c0
? c0
: BIGGEST_ALIGNMENT
;
6528 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6529 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6530 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6531 return MIN (c0
, c1
);
6534 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6535 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6538 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6540 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6541 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6543 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6544 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6545 return MAX (1, c0
/ c1
);
6549 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6550 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6551 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6554 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6557 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6558 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6559 return MIN (c0
, c1
);
6568 /* Similar, except that it is known that the expression must be a multiple
6569 of the alignment of TYPE. */
6571 static unsigned HOST_WIDE_INT
6572 highest_pow2_factor_for_type (type
, exp
)
6576 unsigned HOST_WIDE_INT type_align
, factor
;
6578 factor
= highest_pow2_factor (exp
);
6579 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6580 return MAX (factor
, type_align
);
6583 /* Return an object on the placeholder list that matches EXP, a
6584 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6585 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6586 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6587 is a location which initially points to a starting location in the
6588 placeholder list (zero means start of the list) and where a pointer into
6589 the placeholder list at which the object is found is placed. */
6592 find_placeholder (exp
, plist
)
6596 tree type
= TREE_TYPE (exp
);
6597 tree placeholder_expr
;
6599 for (placeholder_expr
6600 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6601 placeholder_expr
!= 0;
6602 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6604 tree need_type
= TYPE_MAIN_VARIANT (type
);
6607 /* Find the outermost reference that is of the type we want. If none,
6608 see if any object has a type that is a pointer to the type we
6610 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6611 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6612 || TREE_CODE (elt
) == COND_EXPR
)
6613 ? TREE_OPERAND (elt
, 1)
6614 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6615 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6616 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6617 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6618 ? TREE_OPERAND (elt
, 0) : 0))
6619 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6622 *plist
= placeholder_expr
;
6626 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6628 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6629 || TREE_CODE (elt
) == COND_EXPR
)
6630 ? TREE_OPERAND (elt
, 1)
6631 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6632 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6633 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6634 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6635 ? TREE_OPERAND (elt
, 0) : 0))
6636 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6637 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6641 *plist
= placeholder_expr
;
6642 return build1 (INDIRECT_REF
, need_type
, elt
);
6649 /* expand_expr: generate code for computing expression EXP.
6650 An rtx for the computed value is returned. The value is never null.
6651 In the case of a void EXP, const0_rtx is returned.
6653 The value may be stored in TARGET if TARGET is nonzero.
6654 TARGET is just a suggestion; callers must assume that
6655 the rtx returned may not be the same as TARGET.
6657 If TARGET is CONST0_RTX, it means that the value will be ignored.
6659 If TMODE is not VOIDmode, it suggests generating the
6660 result in mode TMODE. But this is done only when convenient.
6661 Otherwise, TMODE is ignored and the value generated in its natural mode.
6662 TMODE is just a suggestion; callers must assume that
6663 the rtx returned may not have mode TMODE.
6665 Note that TARGET may have neither TMODE nor MODE. In that case, it
6666 probably will not be used.
6668 If MODIFIER is EXPAND_SUM then when EXP is an addition
6669 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6670 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6671 products as above, or REG or MEM, or constant.
6672 Ordinarily in such cases we would output mul or add instructions
6673 and then return a pseudo reg containing the sum.
6675 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6676 it also marks a label as absolutely required (it can't be dead).
6677 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6678 This is used for outputting expressions used in initializers.
6680 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6681 with a constant address even if that address is not normally legitimate.
6682 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6684 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6685 a call parameter. Such targets require special care as we haven't yet
6686 marked TARGET so that it's safe from being trashed by libcalls. We
6687 don't want to use TARGET for anything but the final result;
6688 Intermediate values must go elsewhere. Additionally, calls to
6689 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6692 expand_expr (exp
, target
, tmode
, modifier
)
6695 enum machine_mode tmode
;
6696 enum expand_modifier modifier
;
6699 tree type
= TREE_TYPE (exp
);
6700 int unsignedp
= TREE_UNSIGNED (type
);
6701 enum machine_mode mode
;
6702 enum tree_code code
= TREE_CODE (exp
);
6704 rtx subtarget
, original_target
;
6708 /* Handle ERROR_MARK before anybody tries to access its type. */
6709 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6711 op0
= CONST0_RTX (tmode
);
6717 mode
= TYPE_MODE (type
);
6718 /* Use subtarget as the target for operand 0 of a binary operation. */
6719 subtarget
= get_subtarget (target
);
6720 original_target
= target
;
6721 ignore
= (target
== const0_rtx
6722 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6723 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6724 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6725 && TREE_CODE (type
) == VOID_TYPE
));
6727 /* If we are going to ignore this result, we need only do something
6728 if there is a side-effect somewhere in the expression. If there
6729 is, short-circuit the most common cases here. Note that we must
6730 not call expand_expr with anything but const0_rtx in case this
6731 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6735 if (! TREE_SIDE_EFFECTS (exp
))
6738 /* Ensure we reference a volatile object even if value is ignored, but
6739 don't do this if all we are doing is taking its address. */
6740 if (TREE_THIS_VOLATILE (exp
)
6741 && TREE_CODE (exp
) != FUNCTION_DECL
6742 && mode
!= VOIDmode
&& mode
!= BLKmode
6743 && modifier
!= EXPAND_CONST_ADDRESS
)
6745 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6746 if (GET_CODE (temp
) == MEM
)
6747 temp
= copy_to_reg (temp
);
6751 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6752 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6753 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6756 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6757 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6759 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6760 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6763 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6764 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6765 /* If the second operand has no side effects, just evaluate
6767 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6769 else if (code
== BIT_FIELD_REF
)
6771 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6772 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6773 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6780 #ifdef MAX_INTEGER_COMPUTATION_MODE
6781 /* Only check stuff here if the mode we want is different from the mode
6782 of the expression; if it's the same, check_max_integer_computation_mode
6783 will handle it. Do we really need to check this stuff at all? */
6786 && GET_MODE (target
) != mode
6787 && TREE_CODE (exp
) != INTEGER_CST
6788 && TREE_CODE (exp
) != PARM_DECL
6789 && TREE_CODE (exp
) != ARRAY_REF
6790 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6791 && TREE_CODE (exp
) != COMPONENT_REF
6792 && TREE_CODE (exp
) != BIT_FIELD_REF
6793 && TREE_CODE (exp
) != INDIRECT_REF
6794 && TREE_CODE (exp
) != CALL_EXPR
6795 && TREE_CODE (exp
) != VAR_DECL
6796 && TREE_CODE (exp
) != RTL_EXPR
)
6798 enum machine_mode mode
= GET_MODE (target
);
6800 if (GET_MODE_CLASS (mode
) == MODE_INT
6801 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6802 internal_error ("unsupported wide integer operation");
6806 && TREE_CODE (exp
) != INTEGER_CST
6807 && TREE_CODE (exp
) != PARM_DECL
6808 && TREE_CODE (exp
) != ARRAY_REF
6809 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6810 && TREE_CODE (exp
) != COMPONENT_REF
6811 && TREE_CODE (exp
) != BIT_FIELD_REF
6812 && TREE_CODE (exp
) != INDIRECT_REF
6813 && TREE_CODE (exp
) != VAR_DECL
6814 && TREE_CODE (exp
) != CALL_EXPR
6815 && TREE_CODE (exp
) != RTL_EXPR
6816 && GET_MODE_CLASS (tmode
) == MODE_INT
6817 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6818 internal_error ("unsupported wide integer operation");
6820 check_max_integer_computation_mode (exp
);
6823 /* If will do cse, generate all results into pseudo registers
6824 since 1) that allows cse to find more things
6825 and 2) otherwise cse could produce an insn the machine
6826 cannot support. An exception is a CONSTRUCTOR into a multi-word
6827 MEM: that's much more likely to be most efficient into the MEM.
6828 Another is a CALL_EXPR which must return in memory. */
6830 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6831 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6832 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6833 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
)))
6840 tree function
= decl_function_context (exp
);
6841 /* Labels in containing functions, or labels used from initializers,
6843 if (modifier
== EXPAND_INITIALIZER
6844 || (function
!= current_function_decl
6845 && function
!= inline_function_decl
6847 temp
= force_label_rtx (exp
);
6849 temp
= label_rtx (exp
);
6851 temp
= gen_rtx_MEM (FUNCTION_MODE
, gen_rtx_LABEL_REF (Pmode
, temp
));
6852 if (function
!= current_function_decl
6853 && function
!= inline_function_decl
&& function
!= 0)
6854 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6859 if (!DECL_RTL_SET_P (exp
))
6861 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6862 return CONST0_RTX (mode
);
6865 /* ... fall through ... */
6868 /* If a static var's type was incomplete when the decl was written,
6869 but the type is complete now, lay out the decl now. */
6870 if (DECL_SIZE (exp
) == 0
6871 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6872 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6873 layout_decl (exp
, 0);
6875 /* ... fall through ... */
6879 if (DECL_RTL (exp
) == 0)
6882 /* Ensure variable marked as used even if it doesn't go through
6883 a parser. If it hasn't be used yet, write out an external
6885 if (! TREE_USED (exp
))
6887 assemble_external (exp
);
6888 TREE_USED (exp
) = 1;
6891 /* Show we haven't gotten RTL for this yet. */
6894 /* Handle variables inherited from containing functions. */
6895 context
= decl_function_context (exp
);
6897 /* We treat inline_function_decl as an alias for the current function
6898 because that is the inline function whose vars, types, etc.
6899 are being merged into the current function.
6900 See expand_inline_function. */
6902 if (context
!= 0 && context
!= current_function_decl
6903 && context
!= inline_function_decl
6904 /* If var is static, we don't need a static chain to access it. */
6905 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6906 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6910 /* Mark as non-local and addressable. */
6911 DECL_NONLOCAL (exp
) = 1;
6912 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6914 (*lang_hooks
.mark_addressable
) (exp
);
6915 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6917 addr
= XEXP (DECL_RTL (exp
), 0);
6918 if (GET_CODE (addr
) == MEM
)
6920 = replace_equiv_address (addr
,
6921 fix_lexical_addr (XEXP (addr
, 0), exp
));
6923 addr
= fix_lexical_addr (addr
, exp
);
6925 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6928 /* This is the case of an array whose size is to be determined
6929 from its initializer, while the initializer is still being parsed.
6932 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6933 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6934 temp
= validize_mem (DECL_RTL (exp
));
6936 /* If DECL_RTL is memory, we are in the normal case and either
6937 the address is not valid or it is not a register and -fforce-addr
6938 is specified, get the address into a register. */
6940 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6941 && modifier
!= EXPAND_CONST_ADDRESS
6942 && modifier
!= EXPAND_SUM
6943 && modifier
!= EXPAND_INITIALIZER
6944 && (! memory_address_p (DECL_MODE (exp
),
6945 XEXP (DECL_RTL (exp
), 0))
6947 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6948 temp
= replace_equiv_address (DECL_RTL (exp
),
6949 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6951 /* If we got something, return it. But first, set the alignment
6952 if the address is a register. */
6955 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6956 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6961 /* If the mode of DECL_RTL does not match that of the decl, it
6962 must be a promoted value. We return a SUBREG of the wanted mode,
6963 but mark it so that we know that it was already extended. */
6965 if (GET_CODE (DECL_RTL (exp
)) == REG
6966 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6968 /* Get the signedness used for this variable. Ensure we get the
6969 same mode we got when the variable was declared. */
6970 if (GET_MODE (DECL_RTL (exp
))
6971 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6972 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6975 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6976 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6977 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6981 return DECL_RTL (exp
);
6984 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6985 TREE_INT_CST_HIGH (exp
), mode
);
6987 /* ??? If overflow is set, fold will have done an incomplete job,
6988 which can result in (plus xx (const_int 0)), which can get
6989 simplified by validate_replace_rtx during virtual register
6990 instantiation, which can result in unrecognizable insns.
6991 Avoid this by forcing all overflows into registers. */
6992 if (TREE_CONSTANT_OVERFLOW (exp
)
6993 && modifier
!= EXPAND_INITIALIZER
)
6994 temp
= force_reg (mode
, temp
);
6999 return const_vector_from_tree (exp
);
7002 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
7005 /* If optimized, generate immediate CONST_DOUBLE
7006 which will be turned into memory by reload if necessary.
7008 We used to force a register so that loop.c could see it. But
7009 this does not allow gen_* patterns to perform optimizations with
7010 the constants. It also produces two insns in cases like "x = 1.0;".
7011 On most machines, floating-point constants are not permitted in
7012 many insns, so we'd end up copying it to a register in any case.
7014 Now, we do the copying in expand_binop, if appropriate. */
7015 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
7016 TYPE_MODE (TREE_TYPE (exp
)));
7019 /* Handle evaluating a complex constant in a CONCAT target. */
7020 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
7022 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7025 rtarg
= XEXP (original_target
, 0);
7026 itarg
= XEXP (original_target
, 1);
7028 /* Move the real and imaginary parts separately. */
7029 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
7030 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
7033 emit_move_insn (rtarg
, op0
);
7035 emit_move_insn (itarg
, op1
);
7037 return original_target
;
7040 /* ... fall through ... */
7043 temp
= output_constant_def (exp
, 1);
7045 /* temp contains a constant address.
7046 On RISC machines where a constant address isn't valid,
7047 make some insns to get that address into a register. */
7048 if (modifier
!= EXPAND_CONST_ADDRESS
7049 && modifier
!= EXPAND_INITIALIZER
7050 && modifier
!= EXPAND_SUM
7051 && (! memory_address_p (mode
, XEXP (temp
, 0))
7052 || flag_force_addr
))
7053 return replace_equiv_address (temp
,
7054 copy_rtx (XEXP (temp
, 0)));
7057 case EXPR_WITH_FILE_LOCATION
:
7060 location_t saved_loc
= input_location
;
7061 input_filename
= EXPR_WFL_FILENAME (exp
);
7062 input_line
= EXPR_WFL_LINENO (exp
);
7063 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
7064 emit_line_note (input_filename
, input_line
);
7065 /* Possibly avoid switching back and forth here. */
7066 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
7067 input_location
= saved_loc
;
7072 context
= decl_function_context (exp
);
7074 /* If this SAVE_EXPR was at global context, assume we are an
7075 initialization function and move it into our context. */
7077 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
7079 /* We treat inline_function_decl as an alias for the current function
7080 because that is the inline function whose vars, types, etc.
7081 are being merged into the current function.
7082 See expand_inline_function. */
7083 if (context
== current_function_decl
|| context
== inline_function_decl
)
7086 /* If this is non-local, handle it. */
7089 /* The following call just exists to abort if the context is
7090 not of a containing function. */
7091 find_function_data (context
);
7093 temp
= SAVE_EXPR_RTL (exp
);
7094 if (temp
&& GET_CODE (temp
) == REG
)
7096 put_var_into_stack (exp
, /*rescan=*/true);
7097 temp
= SAVE_EXPR_RTL (exp
);
7099 if (temp
== 0 || GET_CODE (temp
) != MEM
)
7102 replace_equiv_address (temp
,
7103 fix_lexical_addr (XEXP (temp
, 0), exp
));
7105 if (SAVE_EXPR_RTL (exp
) == 0)
7107 if (mode
== VOIDmode
)
7110 temp
= assign_temp (build_qualified_type (type
,
7112 | TYPE_QUAL_CONST
)),
7115 SAVE_EXPR_RTL (exp
) = temp
;
7116 if (!optimize
&& GET_CODE (temp
) == REG
)
7117 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
7120 /* If the mode of TEMP does not match that of the expression, it
7121 must be a promoted value. We pass store_expr a SUBREG of the
7122 wanted mode but mark it so that we know that it was already
7125 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
7127 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
7128 promote_mode (type
, mode
, &unsignedp
, 0);
7129 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7130 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7133 if (temp
== const0_rtx
)
7134 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7136 store_expr (TREE_OPERAND (exp
, 0), temp
,
7137 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7139 TREE_USED (exp
) = 1;
7142 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7143 must be a promoted value. We return a SUBREG of the wanted mode,
7144 but mark it so that we know that it was already extended. */
7146 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
7147 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
7149 /* Compute the signedness and make the proper SUBREG. */
7150 promote_mode (type
, mode
, &unsignedp
, 0);
7151 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
7152 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7153 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7157 return SAVE_EXPR_RTL (exp
);
7162 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7163 TREE_OPERAND (exp
, 0)
7164 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
7168 case PLACEHOLDER_EXPR
:
7170 tree old_list
= placeholder_list
;
7171 tree placeholder_expr
= 0;
7173 exp
= find_placeholder (exp
, &placeholder_expr
);
7177 placeholder_list
= TREE_CHAIN (placeholder_expr
);
7178 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
7179 placeholder_list
= old_list
;
7183 case WITH_RECORD_EXPR
:
7184 /* Put the object on the placeholder list, expand our first operand,
7185 and pop the list. */
7186 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
7188 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
7190 placeholder_list
= TREE_CHAIN (placeholder_list
);
7194 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
7195 expand_goto (TREE_OPERAND (exp
, 0));
7197 expand_computed_goto (TREE_OPERAND (exp
, 0));
7201 expand_exit_loop_if_false (NULL
,
7202 invert_truthvalue (TREE_OPERAND (exp
, 0)));
7205 case LABELED_BLOCK_EXPR
:
7206 if (LABELED_BLOCK_BODY (exp
))
7207 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
7208 /* Should perhaps use expand_label, but this is simpler and safer. */
7209 do_pending_stack_adjust ();
7210 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
7213 case EXIT_BLOCK_EXPR
:
7214 if (EXIT_BLOCK_RETURN (exp
))
7215 sorry ("returned value in block_exit_expr");
7216 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
7221 expand_start_loop (1);
7222 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
7230 tree vars
= TREE_OPERAND (exp
, 0);
7232 /* Need to open a binding contour here because
7233 if there are any cleanups they must be contained here. */
7234 expand_start_bindings (2);
7236 /* Mark the corresponding BLOCK for output in its proper place. */
7237 if (TREE_OPERAND (exp
, 2) != 0
7238 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
7239 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
7241 /* If VARS have not yet been expanded, expand them now. */
7244 if (!DECL_RTL_SET_P (vars
))
7246 expand_decl_init (vars
);
7247 vars
= TREE_CHAIN (vars
);
7250 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
7252 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7258 if (RTL_EXPR_SEQUENCE (exp
))
7260 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
7262 emit_insn (RTL_EXPR_SEQUENCE (exp
));
7263 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
7265 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
7266 free_temps_for_rtl_expr (exp
);
7267 return RTL_EXPR_RTL (exp
);
7270 /* If we don't need the result, just ensure we evaluate any
7276 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
7277 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
7282 /* All elts simple constants => refer to a constant in memory. But
7283 if this is a non-BLKmode mode, let it store a field at a time
7284 since that should make a CONST_INT or CONST_DOUBLE when we
7285 fold. Likewise, if we have a target we can use, it is best to
7286 store directly into the target unless the type is large enough
7287 that memcpy will be used. If we are making an initializer and
7288 all operands are constant, put it in memory as well.
7290 FIXME: Avoid trying to fill vector constructors piece-meal.
7291 Output them with output_constant_def below unless we're sure
7292 they're zeros. This should go away when vector initializers
7293 are treated like VECTOR_CST instead of arrays.
7295 else if ((TREE_STATIC (exp
)
7296 && ((mode
== BLKmode
7297 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7298 || TREE_ADDRESSABLE (exp
)
7299 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7300 && (! MOVE_BY_PIECES_P
7301 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7303 && ((TREE_CODE (type
) == VECTOR_TYPE
7304 && !is_zeros_p (exp
))
7305 || ! mostly_zeros_p (exp
)))))
7306 || ((modifier
== EXPAND_INITIALIZER
7307 || modifier
== EXPAND_CONST_ADDRESS
)
7308 && TREE_CONSTANT (exp
)))
7310 rtx constructor
= output_constant_def (exp
, 1);
7312 if (modifier
!= EXPAND_CONST_ADDRESS
7313 && modifier
!= EXPAND_INITIALIZER
7314 && modifier
!= EXPAND_SUM
)
7315 constructor
= validize_mem (constructor
);
7321 /* Handle calls that pass values in multiple non-contiguous
7322 locations. The Irix 6 ABI has examples of this. */
7323 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7324 || GET_CODE (target
) == PARALLEL
7325 || modifier
== EXPAND_STACK_PARM
)
7327 = assign_temp (build_qualified_type (type
,
7329 | (TREE_READONLY (exp
)
7330 * TYPE_QUAL_CONST
))),
7331 0, TREE_ADDRESSABLE (exp
), 1);
7333 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7339 tree exp1
= TREE_OPERAND (exp
, 0);
7341 tree string
= string_constant (exp1
, &index
);
7343 /* Try to optimize reads from const strings. */
7345 && TREE_CODE (string
) == STRING_CST
7346 && TREE_CODE (index
) == INTEGER_CST
7347 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
7348 && GET_MODE_CLASS (mode
) == MODE_INT
7349 && GET_MODE_SIZE (mode
) == 1
7350 && modifier
!= EXPAND_WRITE
)
7351 return gen_int_mode (TREE_STRING_POINTER (string
)
7352 [TREE_INT_CST_LOW (index
)], mode
);
7354 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7355 op0
= memory_address (mode
, op0
);
7356 temp
= gen_rtx_MEM (mode
, op0
);
7357 set_mem_attributes (temp
, exp
, 0);
7359 /* If we are writing to this object and its type is a record with
7360 readonly fields, we must mark it as readonly so it will
7361 conflict with readonly references to those fields. */
7362 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
7363 RTX_UNCHANGING_P (temp
) = 1;
7369 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
7373 tree array
= TREE_OPERAND (exp
, 0);
7374 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
7375 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
7376 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
7379 /* Optimize the special-case of a zero lower bound.
7381 We convert the low_bound to sizetype to avoid some problems
7382 with constant folding. (E.g. suppose the lower bound is 1,
7383 and its mode is QI. Without the conversion, (ARRAY
7384 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7385 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7387 if (! integer_zerop (low_bound
))
7388 index
= size_diffop (index
, convert (sizetype
, low_bound
));
7390 /* Fold an expression like: "foo"[2].
7391 This is not done in fold so it won't happen inside &.
7392 Don't fold if this is for wide characters since it's too
7393 difficult to do correctly and this is a very rare case. */
7395 if (modifier
!= EXPAND_CONST_ADDRESS
7396 && modifier
!= EXPAND_INITIALIZER
7397 && modifier
!= EXPAND_MEMORY
7398 && TREE_CODE (array
) == STRING_CST
7399 && TREE_CODE (index
) == INTEGER_CST
7400 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
7401 && GET_MODE_CLASS (mode
) == MODE_INT
7402 && GET_MODE_SIZE (mode
) == 1)
7403 return gen_int_mode (TREE_STRING_POINTER (array
)
7404 [TREE_INT_CST_LOW (index
)], mode
);
7406 /* If this is a constant index into a constant array,
7407 just get the value from the array. Handle both the cases when
7408 we have an explicit constructor and when our operand is a variable
7409 that was declared const. */
7411 if (modifier
!= EXPAND_CONST_ADDRESS
7412 && modifier
!= EXPAND_INITIALIZER
7413 && modifier
!= EXPAND_MEMORY
7414 && TREE_CODE (array
) == CONSTRUCTOR
7415 && ! TREE_SIDE_EFFECTS (array
)
7416 && TREE_CODE (index
) == INTEGER_CST
7417 && 0 > compare_tree_int (index
,
7418 list_length (CONSTRUCTOR_ELTS
7419 (TREE_OPERAND (exp
, 0)))))
7423 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7424 i
= TREE_INT_CST_LOW (index
);
7425 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
7429 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
7433 else if (optimize
>= 1
7434 && modifier
!= EXPAND_CONST_ADDRESS
7435 && modifier
!= EXPAND_INITIALIZER
7436 && modifier
!= EXPAND_MEMORY
7437 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7438 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7439 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
7441 if (TREE_CODE (index
) == INTEGER_CST
)
7443 tree init
= DECL_INITIAL (array
);
7445 if (TREE_CODE (init
) == CONSTRUCTOR
)
7449 for (elem
= CONSTRUCTOR_ELTS (init
);
7451 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
7452 elem
= TREE_CHAIN (elem
))
7455 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
7456 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7459 else if (TREE_CODE (init
) == STRING_CST
7460 && 0 > compare_tree_int (index
,
7461 TREE_STRING_LENGTH (init
)))
7463 tree type
= TREE_TYPE (TREE_TYPE (init
));
7464 enum machine_mode mode
= TYPE_MODE (type
);
7466 if (GET_MODE_CLASS (mode
) == MODE_INT
7467 && GET_MODE_SIZE (mode
) == 1)
7468 return gen_int_mode (TREE_STRING_POINTER (init
)
7469 [TREE_INT_CST_LOW (index
)], mode
);
7474 goto normal_inner_ref
;
7477 /* If the operand is a CONSTRUCTOR, we can just extract the
7478 appropriate field if it is present. */
7479 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7483 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7484 elt
= TREE_CHAIN (elt
))
7485 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7486 /* We can normally use the value of the field in the
7487 CONSTRUCTOR. However, if this is a bitfield in
7488 an integral mode that we can fit in a HOST_WIDE_INT,
7489 we must mask only the number of bits in the bitfield,
7490 since this is done implicitly by the constructor. If
7491 the bitfield does not meet either of those conditions,
7492 we can't do this optimization. */
7493 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7494 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7496 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7497 <= HOST_BITS_PER_WIDE_INT
))))
7499 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7500 && modifier
== EXPAND_STACK_PARM
)
7502 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7503 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7505 HOST_WIDE_INT bitsize
7506 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7507 enum machine_mode imode
7508 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7510 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7512 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7513 op0
= expand_and (imode
, op0
, op1
, target
);
7518 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7521 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7523 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7531 goto normal_inner_ref
;
7534 case ARRAY_RANGE_REF
:
7537 enum machine_mode mode1
;
7538 HOST_WIDE_INT bitsize
, bitpos
;
7541 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7542 &mode1
, &unsignedp
, &volatilep
);
7545 /* If we got back the original object, something is wrong. Perhaps
7546 we are evaluating an expression too early. In any event, don't
7547 infinitely recurse. */
7551 /* If TEM's type is a union of variable size, pass TARGET to the inner
7552 computation, since it will need a temporary and TARGET is known
7553 to have to do. This occurs in unchecked conversion in Ada. */
7557 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7558 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7560 && modifier
!= EXPAND_STACK_PARM
7561 ? target
: NULL_RTX
),
7563 (modifier
== EXPAND_INITIALIZER
7564 || modifier
== EXPAND_CONST_ADDRESS
7565 || modifier
== EXPAND_STACK_PARM
)
7566 ? modifier
: EXPAND_NORMAL
);
7568 /* If this is a constant, put it into a register if it is a
7569 legitimate constant and OFFSET is 0 and memory if it isn't. */
7570 if (CONSTANT_P (op0
))
7572 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7573 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7575 op0
= force_reg (mode
, op0
);
7577 op0
= validize_mem (force_const_mem (mode
, op0
));
7582 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7585 /* If this object is in a register, put it into memory.
7586 This case can't occur in C, but can in Ada if we have
7587 unchecked conversion of an expression from a scalar type to
7588 an array or record type. */
7589 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7590 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7592 /* If the operand is a SAVE_EXPR, we can deal with this by
7593 forcing the SAVE_EXPR into memory. */
7594 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7596 put_var_into_stack (TREE_OPERAND (exp
, 0),
7598 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7603 = build_qualified_type (TREE_TYPE (tem
),
7604 (TYPE_QUALS (TREE_TYPE (tem
))
7605 | TYPE_QUAL_CONST
));
7606 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7608 emit_move_insn (memloc
, op0
);
7613 if (GET_CODE (op0
) != MEM
)
7616 #ifdef POINTERS_EXTEND_UNSIGNED
7617 if (GET_MODE (offset_rtx
) != Pmode
)
7618 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7620 if (GET_MODE (offset_rtx
) != ptr_mode
)
7621 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7624 /* A constant address in OP0 can have VOIDmode, we must not try
7625 to call force_reg for that case. Avoid that case. */
7626 if (GET_CODE (op0
) == MEM
7627 && GET_MODE (op0
) == BLKmode
7628 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7630 && (bitpos
% bitsize
) == 0
7631 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7632 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7634 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7638 op0
= offset_address (op0
, offset_rtx
,
7639 highest_pow2_factor (offset
));
7642 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7643 record its alignment as BIGGEST_ALIGNMENT. */
7644 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7645 && is_aligning_offset (offset
, tem
))
7646 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7648 /* Don't forget about volatility even if this is a bitfield. */
7649 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7651 if (op0
== orig_op0
)
7652 op0
= copy_rtx (op0
);
7654 MEM_VOLATILE_P (op0
) = 1;
7657 /* The following code doesn't handle CONCAT.
7658 Assume only bitpos == 0 can be used for CONCAT, due to
7659 one element arrays having the same mode as its element. */
7660 if (GET_CODE (op0
) == CONCAT
)
7662 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7667 /* In cases where an aligned union has an unaligned object
7668 as a field, we might be extracting a BLKmode value from
7669 an integer-mode (e.g., SImode) object. Handle this case
7670 by doing the extract into an object as wide as the field
7671 (which we know to be the width of a basic mode), then
7672 storing into memory, and changing the mode to BLKmode. */
7673 if (mode1
== VOIDmode
7674 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7675 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7676 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7677 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7678 && modifier
!= EXPAND_CONST_ADDRESS
7679 && modifier
!= EXPAND_INITIALIZER
)
7680 /* If the field isn't aligned enough to fetch as a memref,
7681 fetch it as a bit field. */
7682 || (mode1
!= BLKmode
7683 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7684 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
7685 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
)))
7686 || (bitpos
% BITS_PER_UNIT
!= 0)))
7687 /* If the type and the field are a constant size and the
7688 size of the type isn't the same size as the bitfield,
7689 we must use bitfield operations. */
7691 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7693 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7696 enum machine_mode ext_mode
= mode
;
7698 if (ext_mode
== BLKmode
7699 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7700 && GET_CODE (target
) == MEM
7701 && bitpos
% BITS_PER_UNIT
== 0))
7702 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7704 if (ext_mode
== BLKmode
)
7706 /* In this case, BITPOS must start at a byte boundary and
7707 TARGET, if specified, must be a MEM. */
7708 if (GET_CODE (op0
) != MEM
7709 || (target
!= 0 && GET_CODE (target
) != MEM
)
7710 || bitpos
% BITS_PER_UNIT
!= 0)
7713 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7715 target
= assign_temp (type
, 0, 1, 1);
7717 emit_block_move (target
, op0
,
7718 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7720 (modifier
== EXPAND_STACK_PARM
7721 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7726 op0
= validize_mem (op0
);
7728 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7729 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7731 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7732 (modifier
== EXPAND_STACK_PARM
7733 ? NULL_RTX
: target
),
7735 int_size_in_bytes (TREE_TYPE (tem
)));
7737 /* If the result is a record type and BITSIZE is narrower than
7738 the mode of OP0, an integral mode, and this is a big endian
7739 machine, we must put the field into the high-order bits. */
7740 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7741 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7742 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7743 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7744 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7748 if (mode
== BLKmode
)
7750 rtx
new = assign_temp (build_qualified_type
7751 ((*lang_hooks
.types
.type_for_mode
)
7753 TYPE_QUAL_CONST
), 0, 1, 1);
7755 emit_move_insn (new, op0
);
7756 op0
= copy_rtx (new);
7757 PUT_MODE (op0
, BLKmode
);
7758 set_mem_attributes (op0
, exp
, 1);
7764 /* If the result is BLKmode, use that to access the object
7766 if (mode
== BLKmode
)
7769 /* Get a reference to just this component. */
7770 if (modifier
== EXPAND_CONST_ADDRESS
7771 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7772 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7774 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7776 if (op0
== orig_op0
)
7777 op0
= copy_rtx (op0
);
7779 set_mem_attributes (op0
, exp
, 0);
7780 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7781 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7783 MEM_VOLATILE_P (op0
) |= volatilep
;
7784 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7785 || modifier
== EXPAND_CONST_ADDRESS
7786 || modifier
== EXPAND_INITIALIZER
)
7788 else if (target
== 0)
7789 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7791 convert_move (target
, op0
, unsignedp
);
7797 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7799 /* Evaluate the interior expression. */
7800 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7803 /* Get or create an instruction off which to hang a note. */
7804 if (REG_P (subtarget
))
7807 insn
= get_last_insn ();
7810 if (! INSN_P (insn
))
7811 insn
= prev_nonnote_insn (insn
);
7815 target
= gen_reg_rtx (GET_MODE (subtarget
));
7816 insn
= emit_move_insn (target
, subtarget
);
7819 /* Collect the data for the note. */
7820 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7821 vtbl_ref
= plus_constant (vtbl_ref
,
7822 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7823 /* Discard the initial CONST that was added. */
7824 vtbl_ref
= XEXP (vtbl_ref
, 0);
7827 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7832 /* Intended for a reference to a buffer of a file-object in Pascal.
7833 But it's not certain that a special tree code will really be
7834 necessary for these. INDIRECT_REF might work for them. */
7840 /* Pascal set IN expression.
7843 rlo = set_low - (set_low%bits_per_word);
7844 the_word = set [ (index - rlo)/bits_per_word ];
7845 bit_index = index % bits_per_word;
7846 bitmask = 1 << bit_index;
7847 return !!(the_word & bitmask); */
7849 tree set
= TREE_OPERAND (exp
, 0);
7850 tree index
= TREE_OPERAND (exp
, 1);
7851 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7852 tree set_type
= TREE_TYPE (set
);
7853 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7854 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7855 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7856 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7857 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7858 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7859 rtx setaddr
= XEXP (setval
, 0);
7860 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7862 rtx diff
, quo
, rem
, addr
, bit
, result
;
7864 /* If domain is empty, answer is no. Likewise if index is constant
7865 and out of bounds. */
7866 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7867 && TREE_CODE (set_low_bound
) == INTEGER_CST
7868 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7869 || (TREE_CODE (index
) == INTEGER_CST
7870 && TREE_CODE (set_low_bound
) == INTEGER_CST
7871 && tree_int_cst_lt (index
, set_low_bound
))
7872 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7873 && TREE_CODE (index
) == INTEGER_CST
7874 && tree_int_cst_lt (set_high_bound
, index
))))
7878 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7880 /* If we get here, we have to generate the code for both cases
7881 (in range and out of range). */
7883 op0
= gen_label_rtx ();
7884 op1
= gen_label_rtx ();
7886 if (! (GET_CODE (index_val
) == CONST_INT
7887 && GET_CODE (lo_r
) == CONST_INT
))
7888 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7889 GET_MODE (index_val
), iunsignedp
, op1
);
7891 if (! (GET_CODE (index_val
) == CONST_INT
7892 && GET_CODE (hi_r
) == CONST_INT
))
7893 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7894 GET_MODE (index_val
), iunsignedp
, op1
);
7896 /* Calculate the element number of bit zero in the first word
7898 if (GET_CODE (lo_r
) == CONST_INT
)
7899 rlow
= GEN_INT (INTVAL (lo_r
)
7900 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7902 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7903 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7904 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7906 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7907 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7909 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7910 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7911 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7912 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7914 addr
= memory_address (byte_mode
,
7915 expand_binop (index_mode
, add_optab
, diff
,
7916 setaddr
, NULL_RTX
, iunsignedp
,
7919 /* Extract the bit we want to examine. */
7920 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7921 gen_rtx_MEM (byte_mode
, addr
),
7922 make_tree (TREE_TYPE (index
), rem
),
7924 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7925 GET_MODE (target
) == byte_mode
? target
: 0,
7926 1, OPTAB_LIB_WIDEN
);
7928 if (result
!= target
)
7929 convert_move (target
, result
, 1);
7931 /* Output the code to handle the out-of-range case. */
7934 emit_move_insn (target
, const0_rtx
);
7939 case WITH_CLEANUP_EXPR
:
7940 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7942 WITH_CLEANUP_EXPR_RTL (exp
)
7943 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7944 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7945 CLEANUP_EH_ONLY (exp
));
7947 /* That's it for this cleanup. */
7948 TREE_OPERAND (exp
, 1) = 0;
7950 return WITH_CLEANUP_EXPR_RTL (exp
);
7952 case CLEANUP_POINT_EXPR
:
7954 /* Start a new binding layer that will keep track of all cleanup
7955 actions to be performed. */
7956 expand_start_bindings (2);
7958 target_temp_slot_level
= temp_slot_level
;
7960 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7961 /* If we're going to use this value, load it up now. */
7963 op0
= force_not_mem (op0
);
7964 preserve_temp_slots (op0
);
7965 expand_end_bindings (NULL_TREE
, 0, 0);
7970 /* Check for a built-in function. */
7971 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7972 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7974 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7976 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7977 == BUILT_IN_FRONTEND
)
7978 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7981 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7984 return expand_call (exp
, target
, ignore
);
7986 case NON_LVALUE_EXPR
:
7989 case REFERENCE_EXPR
:
7990 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7993 if (TREE_CODE (type
) == UNION_TYPE
)
7995 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7997 /* If both input and output are BLKmode, this conversion isn't doing
7998 anything except possibly changing memory attribute. */
7999 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8001 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
8004 result
= copy_rtx (result
);
8005 set_mem_attributes (result
, exp
, 0);
8010 target
= assign_temp (type
, 0, 1, 1);
8012 if (GET_CODE (target
) == MEM
)
8013 /* Store data into beginning of memory target. */
8014 store_expr (TREE_OPERAND (exp
, 0),
8015 adjust_address (target
, TYPE_MODE (valtype
), 0),
8016 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8018 else if (GET_CODE (target
) == REG
)
8019 /* Store this field into a union of the proper type. */
8020 store_field (target
,
8021 MIN ((int_size_in_bytes (TREE_TYPE
8022 (TREE_OPERAND (exp
, 0)))
8024 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8025 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
8026 VOIDmode
, 0, type
, 0);
8030 /* Return the entire union. */
8034 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8036 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
8039 /* If the signedness of the conversion differs and OP0 is
8040 a promoted SUBREG, clear that indication since we now
8041 have to do the proper extension. */
8042 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
8043 && GET_CODE (op0
) == SUBREG
)
8044 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8049 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
8050 if (GET_MODE (op0
) == mode
)
8053 /* If OP0 is a constant, just convert it into the proper mode. */
8054 if (CONSTANT_P (op0
))
8056 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8057 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
8059 if (modifier
== EXPAND_INITIALIZER
)
8060 return simplify_gen_subreg (mode
, op0
, inner_mode
,
8061 subreg_lowpart_offset (mode
,
8064 return convert_modes (mode
, inner_mode
, op0
,
8065 TREE_UNSIGNED (inner_type
));
8068 if (modifier
== EXPAND_INITIALIZER
)
8069 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8073 convert_to_mode (mode
, op0
,
8074 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8076 convert_move (target
, op0
,
8077 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8080 case VIEW_CONVERT_EXPR
:
8081 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
8083 /* If the input and output modes are both the same, we are done.
8084 Otherwise, if neither mode is BLKmode and both are integral and within
8085 a word, we can use gen_lowpart. If neither is true, make sure the
8086 operand is in memory and convert the MEM to the new mode. */
8087 if (TYPE_MODE (type
) == GET_MODE (op0
))
8089 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
8090 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
8091 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
8092 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
8093 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
8094 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
8095 else if (GET_CODE (op0
) != MEM
)
8097 /* If the operand is not a MEM, force it into memory. Since we
8098 are going to be be changing the mode of the MEM, don't call
8099 force_const_mem for constants because we don't allow pool
8100 constants to change mode. */
8101 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8103 if (TREE_ADDRESSABLE (exp
))
8106 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
8108 = assign_stack_temp_for_type
8109 (TYPE_MODE (inner_type
),
8110 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
8112 emit_move_insn (target
, op0
);
8116 /* At this point, OP0 is in the correct mode. If the output type is such
8117 that the operand is known to be aligned, indicate that it is.
8118 Otherwise, we need only be concerned about alignment for non-BLKmode
8120 if (GET_CODE (op0
) == MEM
)
8122 op0
= copy_rtx (op0
);
8124 if (TYPE_ALIGN_OK (type
))
8125 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
8126 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
8127 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
8129 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8130 HOST_WIDE_INT temp_size
8131 = MAX (int_size_in_bytes (inner_type
),
8132 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
8133 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
8134 temp_size
, 0, type
);
8135 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
8137 if (TREE_ADDRESSABLE (exp
))
8140 if (GET_MODE (op0
) == BLKmode
)
8141 emit_block_move (new_with_op0_mode
, op0
,
8142 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
8143 (modifier
== EXPAND_STACK_PARM
8144 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8146 emit_move_insn (new_with_op0_mode
, op0
);
8151 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
8157 this_optab
= ! unsignedp
&& flag_trapv
8158 && (GET_MODE_CLASS (mode
) == MODE_INT
)
8159 ? addv_optab
: add_optab
;
8161 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8162 something else, make sure we add the register to the constant and
8163 then to the other thing. This case can occur during strength
8164 reduction and doing it this way will produce better code if the
8165 frame pointer or argument pointer is eliminated.
8167 fold-const.c will ensure that the constant is always in the inner
8168 PLUS_EXPR, so the only case we need to do anything about is if
8169 sp, ap, or fp is our second argument, in which case we must swap
8170 the innermost first argument and our second argument. */
8172 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
8173 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
8174 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
8175 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
8176 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
8177 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
8179 tree t
= TREE_OPERAND (exp
, 1);
8181 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8182 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
8185 /* If the result is to be ptr_mode and we are adding an integer to
8186 something, we might be forming a constant. So try to use
8187 plus_constant. If it produces a sum and we can't accept it,
8188 use force_operand. This allows P = &ARR[const] to generate
8189 efficient code on machines where a SYMBOL_REF is not a valid
8192 If this is an EXPAND_SUM call, always return the sum. */
8193 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8194 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8196 if (modifier
== EXPAND_STACK_PARM
)
8198 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
8199 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8200 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
8204 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
8206 /* Use immed_double_const to ensure that the constant is
8207 truncated according to the mode of OP1, then sign extended
8208 to a HOST_WIDE_INT. Using the constant directly can result
8209 in non-canonical RTL in a 64x32 cross compile. */
8211 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
8213 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8214 op1
= plus_constant (op1
, INTVAL (constant_part
));
8215 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8216 op1
= force_operand (op1
, target
);
8220 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8221 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
8222 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
8226 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8227 (modifier
== EXPAND_INITIALIZER
8228 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8229 if (! CONSTANT_P (op0
))
8231 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8232 VOIDmode
, modifier
);
8233 /* Don't go to both_summands if modifier
8234 says it's not right to return a PLUS. */
8235 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8239 /* Use immed_double_const to ensure that the constant is
8240 truncated according to the mode of OP1, then sign extended
8241 to a HOST_WIDE_INT. Using the constant directly can result
8242 in non-canonical RTL in a 64x32 cross compile. */
8244 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
8246 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8247 op0
= plus_constant (op0
, INTVAL (constant_part
));
8248 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8249 op0
= force_operand (op0
, target
);
8254 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8257 /* No sense saving up arithmetic to be done
8258 if it's all in the wrong mode to form part of an address.
8259 And force_operand won't know whether to sign-extend or
8261 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8262 || mode
!= ptr_mode
)
8264 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8265 if (! operand_equal_p (TREE_OPERAND (exp
, 0),
8266 TREE_OPERAND (exp
, 1), 0))
8267 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8270 if (op0
== const0_rtx
)
8272 if (op1
== const0_rtx
)
8277 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8278 if (! operand_equal_p (TREE_OPERAND (exp
, 0),
8279 TREE_OPERAND (exp
, 1), 0))
8280 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8281 VOIDmode
, modifier
);
8285 /* We come here from MINUS_EXPR when the second operand is a
8288 /* Make sure any term that's a sum with a constant comes last. */
8289 if (GET_CODE (op0
) == PLUS
8290 && CONSTANT_P (XEXP (op0
, 1)))
8296 /* If adding to a sum including a constant,
8297 associate it to put the constant outside. */
8298 if (GET_CODE (op1
) == PLUS
8299 && CONSTANT_P (XEXP (op1
, 1)))
8301 rtx constant_term
= const0_rtx
;
8303 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
8306 /* Ensure that MULT comes first if there is one. */
8307 else if (GET_CODE (op0
) == MULT
)
8308 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
8310 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
8312 /* Let's also eliminate constants from op0 if possible. */
8313 op0
= eliminate_constant_term (op0
, &constant_term
);
8315 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8316 their sum should be a constant. Form it into OP1, since the
8317 result we want will then be OP0 + OP1. */
8319 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
8324 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
8327 /* Put a constant term last and put a multiplication first. */
8328 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
8329 temp
= op1
, op1
= op0
, op0
= temp
;
8331 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
8332 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
8335 /* For initializers, we are allowed to return a MINUS of two
8336 symbolic constants. Here we handle all cases when both operands
8338 /* Handle difference of two symbolic constants,
8339 for the sake of an initializer. */
8340 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8341 && really_constant_p (TREE_OPERAND (exp
, 0))
8342 && really_constant_p (TREE_OPERAND (exp
, 1)))
8344 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
8346 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8349 /* If the last operand is a CONST_INT, use plus_constant of
8350 the negated constant. Else make the MINUS. */
8351 if (GET_CODE (op1
) == CONST_INT
)
8352 return plus_constant (op0
, - INTVAL (op1
));
8354 return gen_rtx_MINUS (mode
, op0
, op1
);
8357 this_optab
= ! unsignedp
&& flag_trapv
8358 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8359 ? subv_optab
: sub_optab
;
8361 /* No sense saving up arithmetic to be done
8362 if it's all in the wrong mode to form part of an address.
8363 And force_operand won't know whether to sign-extend or
8365 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8366 || mode
!= ptr_mode
)
8369 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8372 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
8373 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
8375 /* Convert A - const to A + (-const). */
8376 if (GET_CODE (op1
) == CONST_INT
)
8378 op1
= negate_rtx (mode
, op1
);
8385 /* If first operand is constant, swap them.
8386 Thus the following special case checks need only
8387 check the second operand. */
8388 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8390 tree t1
= TREE_OPERAND (exp
, 0);
8391 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8392 TREE_OPERAND (exp
, 1) = t1
;
8395 /* Attempt to return something suitable for generating an
8396 indexed address, for machines that support that. */
8398 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8399 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8401 tree exp1
= TREE_OPERAND (exp
, 1);
8403 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8406 /* If we knew for certain that this is arithmetic for an array
8407 reference, and we knew the bounds of the array, then we could
8408 apply the distributive law across (PLUS X C) for constant C.
8409 Without such knowledge, we risk overflowing the computation
8410 when both X and C are large, but X+C isn't. */
8411 /* ??? Could perhaps special-case EXP being unsigned and C being
8412 positive. In that case we are certain that X+C is no smaller
8413 than X and so the transformed expression will overflow iff the
8414 original would have. */
8416 if (GET_CODE (op0
) != REG
)
8417 op0
= force_operand (op0
, NULL_RTX
);
8418 if (GET_CODE (op0
) != REG
)
8419 op0
= copy_to_mode_reg (mode
, op0
);
8421 return gen_rtx_MULT (mode
, op0
,
8422 gen_int_mode (tree_low_cst (exp1
, 0),
8423 TYPE_MODE (TREE_TYPE (exp1
))));
8426 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8429 if (modifier
== EXPAND_STACK_PARM
)
8432 /* Check for multiplying things that have been extended
8433 from a narrower type. If this machine supports multiplying
8434 in that narrower type with a result in the desired type,
8435 do it that way, and avoid the explicit type-conversion. */
8436 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
8437 && TREE_CODE (type
) == INTEGER_TYPE
8438 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8439 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8440 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8441 && int_fits_type_p (TREE_OPERAND (exp
, 1),
8442 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8443 /* Don't use a widening multiply if a shift will do. */
8444 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
8445 > HOST_BITS_PER_WIDE_INT
)
8446 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
8448 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8449 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8451 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
8452 /* If both operands are extended, they must either both
8453 be zero-extended or both be sign-extended. */
8454 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
8456 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
8458 enum machine_mode innermode
8459 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
8460 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8461 ? smul_widen_optab
: umul_widen_optab
);
8462 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
8463 ? umul_widen_optab
: smul_widen_optab
);
8464 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8466 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
8468 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8469 NULL_RTX
, VOIDmode
, 0);
8470 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8471 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8474 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8475 NULL_RTX
, VOIDmode
, 0);
8478 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
8479 && innermode
== word_mode
)
8482 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8483 NULL_RTX
, VOIDmode
, 0);
8484 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
8485 op1
= convert_modes (innermode
, mode
,
8486 expand_expr (TREE_OPERAND (exp
, 1),
8487 NULL_RTX
, VOIDmode
, 0),
8490 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
8491 NULL_RTX
, VOIDmode
, 0);
8492 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8493 unsignedp
, OPTAB_LIB_WIDEN
);
8494 htem
= expand_mult_highpart_adjust (innermode
,
8495 gen_highpart (innermode
, temp
),
8497 gen_highpart (innermode
, temp
),
8499 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
8504 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8505 if (! operand_equal_p (TREE_OPERAND (exp
, 0),
8506 TREE_OPERAND (exp
, 1), 0))
8507 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8510 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
8512 case TRUNC_DIV_EXPR
:
8513 case FLOOR_DIV_EXPR
:
8515 case ROUND_DIV_EXPR
:
8516 case EXACT_DIV_EXPR
:
8517 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8519 if (modifier
== EXPAND_STACK_PARM
)
8521 /* Possible optimization: compute the dividend with EXPAND_SUM
8522 then if the divisor is constant can optimize the case
8523 where some terms of the dividend have coeffs divisible by it. */
8524 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8525 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8526 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8529 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8530 expensive divide. If not, combine will rebuild the original
8532 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
8533 && TREE_CODE (type
) == REAL_TYPE
8534 && !real_onep (TREE_OPERAND (exp
, 0)))
8535 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
8536 build (RDIV_EXPR
, type
,
8537 build_real (type
, dconst1
),
8538 TREE_OPERAND (exp
, 1))),
8539 target
, tmode
, modifier
);
8540 this_optab
= sdiv_optab
;
8543 case TRUNC_MOD_EXPR
:
8544 case FLOOR_MOD_EXPR
:
8546 case ROUND_MOD_EXPR
:
8547 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8549 if (modifier
== EXPAND_STACK_PARM
)
8551 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8552 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8553 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8555 case FIX_ROUND_EXPR
:
8556 case FIX_FLOOR_EXPR
:
8558 abort (); /* Not used for C. */
8560 case FIX_TRUNC_EXPR
:
8561 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8562 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8563 target
= gen_reg_rtx (mode
);
8564 expand_fix (target
, op0
, unsignedp
);
8568 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8569 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8570 target
= gen_reg_rtx (mode
);
8571 /* expand_float can't figure out what to do if FROM has VOIDmode.
8572 So give it the correct mode. With -O, cse will optimize this. */
8573 if (GET_MODE (op0
) == VOIDmode
)
8574 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8576 expand_float (target
, op0
,
8577 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8581 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8582 if (modifier
== EXPAND_STACK_PARM
)
8584 temp
= expand_unop (mode
,
8585 ! unsignedp
&& flag_trapv
8586 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8587 ? negv_optab
: neg_optab
, op0
, target
, 0);
8593 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8594 if (modifier
== EXPAND_STACK_PARM
)
8597 /* Handle complex values specially. */
8598 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8599 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8600 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8602 /* Unsigned abs is simply the operand. Testing here means we don't
8603 risk generating incorrect code below. */
8604 if (TREE_UNSIGNED (type
))
8607 return expand_abs (mode
, op0
, target
, unsignedp
,
8608 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8612 target
= original_target
;
8614 || modifier
== EXPAND_STACK_PARM
8615 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8616 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8617 || GET_MODE (target
) != mode
8618 || (GET_CODE (target
) == REG
8619 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8620 target
= gen_reg_rtx (mode
);
8621 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8622 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8624 /* First try to do it with a special MIN or MAX instruction.
8625 If that does not win, use a conditional jump to select the proper
8627 this_optab
= (TREE_UNSIGNED (type
)
8628 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8629 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8631 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8636 /* At this point, a MEM target is no longer useful; we will get better
8639 if (GET_CODE (target
) == MEM
)
8640 target
= gen_reg_rtx (mode
);
8643 emit_move_insn (target
, op0
);
8645 op0
= gen_label_rtx ();
8647 /* If this mode is an integer too wide to compare properly,
8648 compare word by word. Rely on cse to optimize constant cases. */
8649 if (GET_MODE_CLASS (mode
) == MODE_INT
8650 && ! can_compare_p (GE
, mode
, ccp_jump
))
8652 if (code
== MAX_EXPR
)
8653 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8654 target
, op1
, NULL_RTX
, op0
);
8656 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8657 op1
, target
, NULL_RTX
, op0
);
8661 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8662 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8663 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8666 emit_move_insn (target
, op1
);
8671 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8672 if (modifier
== EXPAND_STACK_PARM
)
8674 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8680 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8681 if (modifier
== EXPAND_STACK_PARM
)
8683 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8689 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8690 temp
= expand_unop (mode
, clz_optab
, op0
, target
, 1);
8696 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8697 temp
= expand_unop (mode
, ctz_optab
, op0
, target
, 1);
8703 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8704 temp
= expand_unop (mode
, popcount_optab
, op0
, target
, 1);
8710 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8711 temp
= expand_unop (mode
, parity_optab
, op0
, target
, 1);
8716 /* ??? Can optimize bitwise operations with one arg constant.
8717 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8718 and (a bitwise1 b) bitwise2 b (etc)
8719 but that is probably not worth while. */
8721 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8722 boolean values when we want in all cases to compute both of them. In
8723 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8724 as actual zero-or-1 values and then bitwise anding. In cases where
8725 there cannot be any side effects, better code would be made by
8726 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8727 how to recognize those cases. */
8729 case TRUTH_AND_EXPR
:
8731 this_optab
= and_optab
;
8736 this_optab
= ior_optab
;
8739 case TRUTH_XOR_EXPR
:
8741 this_optab
= xor_optab
;
8748 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8750 if (modifier
== EXPAND_STACK_PARM
)
8752 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8753 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8756 /* Could determine the answer when only additive constants differ. Also,
8757 the addition of one can be handled by changing the condition. */
8764 case UNORDERED_EXPR
:
8771 temp
= do_store_flag (exp
,
8772 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8773 tmode
!= VOIDmode
? tmode
: mode
, 0);
8777 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8778 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8780 && GET_CODE (original_target
) == REG
8781 && (GET_MODE (original_target
)
8782 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8784 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8787 /* If temp is constant, we can just compute the result. */
8788 if (GET_CODE (temp
) == CONST_INT
)
8790 if (INTVAL (temp
) != 0)
8791 emit_move_insn (target
, const1_rtx
);
8793 emit_move_insn (target
, const0_rtx
);
8798 if (temp
!= original_target
)
8800 enum machine_mode mode1
= GET_MODE (temp
);
8801 if (mode1
== VOIDmode
)
8802 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8804 temp
= copy_to_mode_reg (mode1
, temp
);
8807 op1
= gen_label_rtx ();
8808 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8809 GET_MODE (temp
), unsignedp
, op1
);
8810 emit_move_insn (temp
, const1_rtx
);
8815 /* If no set-flag instruction, must generate a conditional
8816 store into a temporary variable. Drop through
8817 and handle this like && and ||. */
8819 case TRUTH_ANDIF_EXPR
:
8820 case TRUTH_ORIF_EXPR
:
8823 || modifier
== EXPAND_STACK_PARM
8824 || ! safe_from_p (target
, exp
, 1)
8825 /* Make sure we don't have a hard reg (such as function's return
8826 value) live across basic blocks, if not optimizing. */
8827 || (!optimize
&& GET_CODE (target
) == REG
8828 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8829 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8832 emit_clr_insn (target
);
8834 op1
= gen_label_rtx ();
8835 jumpifnot (exp
, op1
);
8838 emit_0_to_1_insn (target
);
8841 return ignore
? const0_rtx
: target
;
8843 case TRUTH_NOT_EXPR
:
8844 if (modifier
== EXPAND_STACK_PARM
)
8846 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8847 /* The parser is careful to generate TRUTH_NOT_EXPR
8848 only with operands that are always zero or one. */
8849 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8850 target
, 1, OPTAB_LIB_WIDEN
);
8856 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8858 return expand_expr (TREE_OPERAND (exp
, 1),
8859 (ignore
? const0_rtx
: target
),
8860 VOIDmode
, modifier
);
8863 /* If we would have a "singleton" (see below) were it not for a
8864 conversion in each arm, bring that conversion back out. */
8865 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8866 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8867 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8868 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8870 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8871 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8873 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8874 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8875 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8876 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8877 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8878 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8879 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8880 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8881 return expand_expr (build1 (NOP_EXPR
, type
,
8882 build (COND_EXPR
, TREE_TYPE (iftrue
),
8883 TREE_OPERAND (exp
, 0),
8885 target
, tmode
, modifier
);
8889 /* Note that COND_EXPRs whose type is a structure or union
8890 are required to be constructed to contain assignments of
8891 a temporary variable, so that we can evaluate them here
8892 for side effect only. If type is void, we must do likewise. */
8894 /* If an arm of the branch requires a cleanup,
8895 only that cleanup is performed. */
8898 tree binary_op
= 0, unary_op
= 0;
8900 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8901 convert it to our mode, if necessary. */
8902 if (integer_onep (TREE_OPERAND (exp
, 1))
8903 && integer_zerop (TREE_OPERAND (exp
, 2))
8904 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8908 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8913 if (modifier
== EXPAND_STACK_PARM
)
8915 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8916 if (GET_MODE (op0
) == mode
)
8920 target
= gen_reg_rtx (mode
);
8921 convert_move (target
, op0
, unsignedp
);
8925 /* Check for X ? A + B : A. If we have this, we can copy A to the
8926 output and conditionally add B. Similarly for unary operations.
8927 Don't do this if X has side-effects because those side effects
8928 might affect A or B and the "?" operation is a sequence point in
8929 ANSI. (operand_equal_p tests for side effects.) */
8931 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8932 && operand_equal_p (TREE_OPERAND (exp
, 2),
8933 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8934 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8935 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8936 && operand_equal_p (TREE_OPERAND (exp
, 1),
8937 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8938 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8939 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8940 && operand_equal_p (TREE_OPERAND (exp
, 2),
8941 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8942 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8943 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8944 && operand_equal_p (TREE_OPERAND (exp
, 1),
8945 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8946 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8948 /* If we are not to produce a result, we have no target. Otherwise,
8949 if a target was specified use it; it will not be used as an
8950 intermediate target unless it is safe. If no target, use a
8955 else if (modifier
== EXPAND_STACK_PARM
)
8956 temp
= assign_temp (type
, 0, 0, 1);
8957 else if (original_target
8958 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8959 || (singleton
&& GET_CODE (original_target
) == REG
8960 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8961 && original_target
== var_rtx (singleton
)))
8962 && GET_MODE (original_target
) == mode
8963 #ifdef HAVE_conditional_move
8964 && (! can_conditionally_move_p (mode
)
8965 || GET_CODE (original_target
) == REG
8966 || TREE_ADDRESSABLE (type
))
8968 && (GET_CODE (original_target
) != MEM
8969 || TREE_ADDRESSABLE (type
)))
8970 temp
= original_target
;
8971 else if (TREE_ADDRESSABLE (type
))
8974 temp
= assign_temp (type
, 0, 0, 1);
8976 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8977 do the test of X as a store-flag operation, do this as
8978 A + ((X != 0) << log C). Similarly for other simple binary
8979 operators. Only do for C == 1 if BRANCH_COST is low. */
8980 if (temp
&& singleton
&& binary_op
8981 && (TREE_CODE (binary_op
) == PLUS_EXPR
8982 || TREE_CODE (binary_op
) == MINUS_EXPR
8983 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8984 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8985 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8986 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8987 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8991 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8992 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8993 ? addv_optab
: add_optab
)
8994 : TREE_CODE (binary_op
) == MINUS_EXPR
8995 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8996 ? subv_optab
: sub_optab
)
8997 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
9000 /* If we had X ? A : A + 1, do this as A + (X == 0). */
9001 if (singleton
== TREE_OPERAND (exp
, 1))
9002 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
9004 cond
= TREE_OPERAND (exp
, 0);
9006 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
9008 mode
, BRANCH_COST
<= 1);
9010 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
9011 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
9012 build_int_2 (tree_log2
9016 (safe_from_p (temp
, singleton
, 1)
9017 ? temp
: NULL_RTX
), 0);
9021 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
9022 return expand_binop (mode
, boptab
, op1
, result
, temp
,
9023 unsignedp
, OPTAB_LIB_WIDEN
);
9027 do_pending_stack_adjust ();
9029 op0
= gen_label_rtx ();
9031 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
9035 /* If the target conflicts with the other operand of the
9036 binary op, we can't use it. Also, we can't use the target
9037 if it is a hard register, because evaluating the condition
9038 might clobber it. */
9040 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
9041 || (GET_CODE (temp
) == REG
9042 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
9043 temp
= gen_reg_rtx (mode
);
9044 store_expr (singleton
, temp
,
9045 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9048 expand_expr (singleton
,
9049 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
9050 if (singleton
== TREE_OPERAND (exp
, 1))
9051 jumpif (TREE_OPERAND (exp
, 0), op0
);
9053 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
9055 start_cleanup_deferral ();
9056 if (binary_op
&& temp
== 0)
9057 /* Just touch the other operand. */
9058 expand_expr (TREE_OPERAND (binary_op
, 1),
9059 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
9061 store_expr (build (TREE_CODE (binary_op
), type
,
9062 make_tree (type
, temp
),
9063 TREE_OPERAND (binary_op
, 1)),
9064 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
9066 store_expr (build1 (TREE_CODE (unary_op
), type
,
9067 make_tree (type
, temp
)),
9068 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
9071 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
9072 comparison operator. If we have one of these cases, set the
9073 output to A, branch on A (cse will merge these two references),
9074 then set the output to FOO. */
9076 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
9077 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
9078 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
9079 TREE_OPERAND (exp
, 1), 0)
9080 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
9081 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
9082 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
9084 if (GET_CODE (temp
) == REG
9085 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
9086 temp
= gen_reg_rtx (mode
);
9087 store_expr (TREE_OPERAND (exp
, 1), temp
,
9088 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9089 jumpif (TREE_OPERAND (exp
, 0), op0
);
9091 start_cleanup_deferral ();
9092 store_expr (TREE_OPERAND (exp
, 2), temp
,
9093 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9097 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
9098 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
9099 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
9100 TREE_OPERAND (exp
, 2), 0)
9101 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
9102 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
9103 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
9105 if (GET_CODE (temp
) == REG
9106 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
9107 temp
= gen_reg_rtx (mode
);
9108 store_expr (TREE_OPERAND (exp
, 2), temp
,
9109 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9110 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
9112 start_cleanup_deferral ();
9113 store_expr (TREE_OPERAND (exp
, 1), temp
,
9114 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9119 op1
= gen_label_rtx ();
9120 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
9122 start_cleanup_deferral ();
9124 /* One branch of the cond can be void, if it never returns. For
9125 example A ? throw : E */
9127 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
9128 store_expr (TREE_OPERAND (exp
, 1), temp
,
9129 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9131 expand_expr (TREE_OPERAND (exp
, 1),
9132 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
9133 end_cleanup_deferral ();
9135 emit_jump_insn (gen_jump (op1
));
9138 start_cleanup_deferral ();
9140 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
9141 store_expr (TREE_OPERAND (exp
, 2), temp
,
9142 modifier
== EXPAND_STACK_PARM
? 2 : 0);
9144 expand_expr (TREE_OPERAND (exp
, 2),
9145 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
9148 end_cleanup_deferral ();
9159 /* Something needs to be initialized, but we didn't know
9160 where that thing was when building the tree. For example,
9161 it could be the return value of a function, or a parameter
9162 to a function which lays down in the stack, or a temporary
9163 variable which must be passed by reference.
9165 We guarantee that the expression will either be constructed
9166 or copied into our original target. */
9168 tree slot
= TREE_OPERAND (exp
, 0);
9169 tree cleanups
= NULL_TREE
;
9172 if (TREE_CODE (slot
) != VAR_DECL
)
9176 target
= original_target
;
9178 /* Set this here so that if we get a target that refers to a
9179 register variable that's already been used, put_reg_into_stack
9180 knows that it should fix up those uses. */
9181 TREE_USED (slot
) = 1;
9185 if (DECL_RTL_SET_P (slot
))
9187 target
= DECL_RTL (slot
);
9188 /* If we have already expanded the slot, so don't do
9190 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9195 target
= assign_temp (type
, 2, 0, 1);
9196 /* All temp slots at this level must not conflict. */
9197 preserve_temp_slots (target
);
9198 SET_DECL_RTL (slot
, target
);
9199 if (TREE_ADDRESSABLE (slot
))
9200 put_var_into_stack (slot
, /*rescan=*/false);
9202 /* Since SLOT is not known to the called function
9203 to belong to its stack frame, we must build an explicit
9204 cleanup. This case occurs when we must build up a reference
9205 to pass the reference as an argument. In this case,
9206 it is very likely that such a reference need not be
9209 if (TREE_OPERAND (exp
, 2) == 0)
9210 TREE_OPERAND (exp
, 2)
9211 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
9212 cleanups
= TREE_OPERAND (exp
, 2);
9217 /* This case does occur, when expanding a parameter which
9218 needs to be constructed on the stack. The target
9219 is the actual stack address that we want to initialize.
9220 The function we call will perform the cleanup in this case. */
9222 /* If we have already assigned it space, use that space,
9223 not target that we were passed in, as our target
9224 parameter is only a hint. */
9225 if (DECL_RTL_SET_P (slot
))
9227 target
= DECL_RTL (slot
);
9228 /* If we have already expanded the slot, so don't do
9230 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
9235 SET_DECL_RTL (slot
, target
);
9236 /* If we must have an addressable slot, then make sure that
9237 the RTL that we just stored in slot is OK. */
9238 if (TREE_ADDRESSABLE (slot
))
9239 put_var_into_stack (slot
, /*rescan=*/true);
9243 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
9244 /* Mark it as expanded. */
9245 TREE_OPERAND (exp
, 1) = NULL_TREE
;
9247 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
9249 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
9256 tree lhs
= TREE_OPERAND (exp
, 0);
9257 tree rhs
= TREE_OPERAND (exp
, 1);
9259 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
9265 /* If lhs is complex, expand calls in rhs before computing it.
9266 That's so we don't compute a pointer and save it over a
9267 call. If lhs is simple, compute it first so we can give it
9268 as a target if the rhs is just a call. This avoids an
9269 extra temp and copy and that prevents a partial-subsumption
9270 which makes bad code. Actually we could treat
9271 component_ref's of vars like vars. */
9273 tree lhs
= TREE_OPERAND (exp
, 0);
9274 tree rhs
= TREE_OPERAND (exp
, 1);
9278 /* Check for |= or &= of a bitfield of size one into another bitfield
9279 of size 1. In this case, (unless we need the result of the
9280 assignment) we can do this more efficiently with a
9281 test followed by an assignment, if necessary.
9283 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9284 things change so we do, this code should be enhanced to
9287 && TREE_CODE (lhs
) == COMPONENT_REF
9288 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9289 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9290 && TREE_OPERAND (rhs
, 0) == lhs
9291 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9292 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9293 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9295 rtx label
= gen_label_rtx ();
9297 do_jump (TREE_OPERAND (rhs
, 1),
9298 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
9299 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
9300 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
9301 (TREE_CODE (rhs
) == BIT_IOR_EXPR
9303 : integer_zero_node
)),
9305 do_pending_stack_adjust ();
9310 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
9316 if (!TREE_OPERAND (exp
, 0))
9317 expand_null_return ();
9319 expand_return (TREE_OPERAND (exp
, 0));
9322 case PREINCREMENT_EXPR
:
9323 case PREDECREMENT_EXPR
:
9324 return expand_increment (exp
, 0, ignore
);
9326 case POSTINCREMENT_EXPR
:
9327 case POSTDECREMENT_EXPR
:
9328 /* Faster to treat as pre-increment if result is not used. */
9329 return expand_increment (exp
, ! ignore
, ignore
);
9332 if (modifier
== EXPAND_STACK_PARM
)
9334 /* Are we taking the address of a nested function? */
9335 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
9336 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
9337 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
9338 && ! TREE_STATIC (exp
))
9340 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
9341 op0
= force_operand (op0
, target
);
9343 /* If we are taking the address of something erroneous, just
9345 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
9347 /* If we are taking the address of a constant and are at the
9348 top level, we have to use output_constant_def since we can't
9349 call force_const_mem at top level. */
9351 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
9352 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
9354 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
9357 /* We make sure to pass const0_rtx down if we came in with
9358 ignore set, to avoid doing the cleanups twice for something. */
9359 op0
= expand_expr (TREE_OPERAND (exp
, 0),
9360 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
9361 (modifier
== EXPAND_INITIALIZER
9362 ? modifier
: EXPAND_CONST_ADDRESS
));
9364 /* If we are going to ignore the result, OP0 will have been set
9365 to const0_rtx, so just return it. Don't get confused and
9366 think we are taking the address of the constant. */
9370 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9371 clever and returns a REG when given a MEM. */
9372 op0
= protect_from_queue (op0
, 1);
9374 /* We would like the object in memory. If it is a constant, we can
9375 have it be statically allocated into memory. For a non-constant,
9376 we need to allocate some memory and store the value into it. */
9378 if (CONSTANT_P (op0
))
9379 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
9381 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
9382 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
9383 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
9385 /* If the operand is a SAVE_EXPR, we can deal with this by
9386 forcing the SAVE_EXPR into memory. */
9387 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
9389 put_var_into_stack (TREE_OPERAND (exp
, 0),
9391 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
9395 /* If this object is in a register, it can't be BLKmode. */
9396 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9397 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
9399 if (GET_CODE (op0
) == PARALLEL
)
9400 /* Handle calls that pass values in multiple
9401 non-contiguous locations. The Irix 6 ABI has examples
9403 emit_group_store (memloc
, op0
,
9404 int_size_in_bytes (inner_type
));
9406 emit_move_insn (memloc
, op0
);
9412 if (GET_CODE (op0
) != MEM
)
9415 mark_temp_addr_taken (op0
);
9416 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9418 op0
= XEXP (op0
, 0);
9419 #ifdef POINTERS_EXTEND_UNSIGNED
9420 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9421 && mode
== ptr_mode
)
9422 op0
= convert_memory_address (ptr_mode
, op0
);
9427 /* If OP0 is not aligned as least as much as the type requires, we
9428 need to make a temporary, copy OP0 to it, and take the address of
9429 the temporary. We want to use the alignment of the type, not of
9430 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9431 the test for BLKmode means that can't happen. The test for
9432 BLKmode is because we never make mis-aligned MEMs with
9435 We don't need to do this at all if the machine doesn't have
9436 strict alignment. */
9437 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
9438 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
9440 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
9442 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9445 if (TYPE_ALIGN_OK (inner_type
))
9448 if (TREE_ADDRESSABLE (inner_type
))
9450 /* We can't make a bitwise copy of this object, so fail. */
9451 error ("cannot take the address of an unaligned member");
9455 new = assign_stack_temp_for_type
9456 (TYPE_MODE (inner_type
),
9457 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
9458 : int_size_in_bytes (inner_type
),
9459 1, build_qualified_type (inner_type
,
9460 (TYPE_QUALS (inner_type
)
9461 | TYPE_QUAL_CONST
)));
9463 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
9464 (modifier
== EXPAND_STACK_PARM
9465 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9470 op0
= force_operand (XEXP (op0
, 0), target
);
9474 && GET_CODE (op0
) != REG
9475 && modifier
!= EXPAND_CONST_ADDRESS
9476 && modifier
!= EXPAND_INITIALIZER
9477 && modifier
!= EXPAND_SUM
)
9478 op0
= force_reg (Pmode
, op0
);
9480 if (GET_CODE (op0
) == REG
9481 && ! REG_USERVAR_P (op0
))
9482 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
9484 #ifdef POINTERS_EXTEND_UNSIGNED
9485 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
9486 && mode
== ptr_mode
)
9487 op0
= convert_memory_address (ptr_mode
, op0
);
9492 case ENTRY_VALUE_EXPR
:
9495 /* COMPLEX type for Extended Pascal & Fortran */
9498 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9501 /* Get the rtx code of the operands. */
9502 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9503 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
9506 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9510 /* Move the real (op0) and imaginary (op1) parts to their location. */
9511 emit_move_insn (gen_realpart (mode
, target
), op0
);
9512 emit_move_insn (gen_imagpart (mode
, target
), op1
);
9514 insns
= get_insns ();
9517 /* Complex construction should appear as a single unit. */
9518 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9519 each with a separate pseudo as destination.
9520 It's not correct for flow to treat them as a unit. */
9521 if (GET_CODE (target
) != CONCAT
)
9522 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
9530 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9531 return gen_realpart (mode
, op0
);
9534 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9535 return gen_imagpart (mode
, op0
);
9539 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9543 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9546 target
= gen_reg_rtx (mode
);
9550 /* Store the realpart and the negated imagpart to target. */
9551 emit_move_insn (gen_realpart (partmode
, target
),
9552 gen_realpart (partmode
, op0
));
9554 imag_t
= gen_imagpart (partmode
, target
);
9555 temp
= expand_unop (partmode
,
9556 ! unsignedp
&& flag_trapv
9557 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9558 ? negv_optab
: neg_optab
,
9559 gen_imagpart (partmode
, op0
), imag_t
, 0);
9561 emit_move_insn (imag_t
, temp
);
9563 insns
= get_insns ();
9566 /* Conjugate should appear as a single unit
9567 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9568 each with a separate pseudo as destination.
9569 It's not correct for flow to treat them as a unit. */
9570 if (GET_CODE (target
) != CONCAT
)
9571 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9578 case TRY_CATCH_EXPR
:
9580 tree handler
= TREE_OPERAND (exp
, 1);
9582 expand_eh_region_start ();
9584 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9586 expand_eh_region_end_cleanup (handler
);
9591 case TRY_FINALLY_EXPR
:
9593 tree try_block
= TREE_OPERAND (exp
, 0);
9594 tree finally_block
= TREE_OPERAND (exp
, 1);
9596 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
9598 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9599 is not sufficient, so we cannot expand the block twice.
9600 So we play games with GOTO_SUBROUTINE_EXPR to let us
9601 expand the thing only once. */
9602 /* When not optimizing, we go ahead with this form since
9603 (1) user breakpoints operate more predictably without
9604 code duplication, and
9605 (2) we're not running any of the global optimizers
9606 that would explode in time/space with the highly
9607 connected CFG created by the indirect branching. */
9609 rtx finally_label
= gen_label_rtx ();
9610 rtx done_label
= gen_label_rtx ();
9611 rtx return_link
= gen_reg_rtx (Pmode
);
9612 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9613 (tree
) finally_label
, (tree
) return_link
);
9614 TREE_SIDE_EFFECTS (cleanup
) = 1;
9616 /* Start a new binding layer that will keep track of all cleanup
9617 actions to be performed. */
9618 expand_start_bindings (2);
9619 target_temp_slot_level
= temp_slot_level
;
9621 expand_decl_cleanup (NULL_TREE
, cleanup
);
9622 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9624 preserve_temp_slots (op0
);
9625 expand_end_bindings (NULL_TREE
, 0, 0);
9626 emit_jump (done_label
);
9627 emit_label (finally_label
);
9628 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9629 emit_indirect_jump (return_link
);
9630 emit_label (done_label
);
9634 expand_start_bindings (2);
9635 target_temp_slot_level
= temp_slot_level
;
9637 expand_decl_cleanup (NULL_TREE
, finally_block
);
9638 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9640 preserve_temp_slots (op0
);
9641 expand_end_bindings (NULL_TREE
, 0, 0);
9647 case GOTO_SUBROUTINE_EXPR
:
9649 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9650 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9651 rtx return_address
= gen_label_rtx ();
9652 emit_move_insn (return_link
,
9653 gen_rtx_LABEL_REF (Pmode
, return_address
));
9655 emit_label (return_address
);
9660 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9663 return get_exception_pointer (cfun
);
9666 /* Function descriptors are not valid except for as
9667 initialization constants, and should not be expanded. */
9671 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
);
9674 /* Here to do an ordinary binary operator, generating an instruction
9675 from the optab already placed in `this_optab'. */
9677 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9679 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
9680 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9682 if (modifier
== EXPAND_STACK_PARM
)
9684 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9685 unsignedp
, OPTAB_LIB_WIDEN
);
9691 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9692 when applied to the address of EXP produces an address known to be
9693 aligned more than BIGGEST_ALIGNMENT. */
9696 is_aligning_offset (offset
, exp
)
9700 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9701 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9702 || TREE_CODE (offset
) == NOP_EXPR
9703 || TREE_CODE (offset
) == CONVERT_EXPR
9704 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9705 offset
= TREE_OPERAND (offset
, 0);
9707 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9708 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9709 if (TREE_CODE (offset
) != BIT_AND_EXPR
9710 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9711 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9712 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9715 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9716 It must be NEGATE_EXPR. Then strip any more conversions. */
9717 offset
= TREE_OPERAND (offset
, 0);
9718 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9719 || TREE_CODE (offset
) == NOP_EXPR
9720 || TREE_CODE (offset
) == CONVERT_EXPR
)
9721 offset
= TREE_OPERAND (offset
, 0);
9723 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9726 offset
= TREE_OPERAND (offset
, 0);
9727 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9728 || TREE_CODE (offset
) == NOP_EXPR
9729 || TREE_CODE (offset
) == CONVERT_EXPR
)
9730 offset
= TREE_OPERAND (offset
, 0);
9732 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9733 whose type is the same as EXP. */
9734 return (TREE_CODE (offset
) == ADDR_EXPR
9735 && (TREE_OPERAND (offset
, 0) == exp
9736 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9737 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9738 == TREE_TYPE (exp
)))));
9741 /* Return the tree node if an ARG corresponds to a string constant or zero
9742 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9743 in bytes within the string that ARG is accessing. The type of the
9744 offset will be `sizetype'. */
9747 string_constant (arg
, ptr_offset
)
9753 if (TREE_CODE (arg
) == ADDR_EXPR
9754 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9756 *ptr_offset
= size_zero_node
;
9757 return TREE_OPERAND (arg
, 0);
9759 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9761 tree arg0
= TREE_OPERAND (arg
, 0);
9762 tree arg1
= TREE_OPERAND (arg
, 1);
9767 if (TREE_CODE (arg0
) == ADDR_EXPR
9768 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9770 *ptr_offset
= convert (sizetype
, arg1
);
9771 return TREE_OPERAND (arg0
, 0);
9773 else if (TREE_CODE (arg1
) == ADDR_EXPR
9774 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9776 *ptr_offset
= convert (sizetype
, arg0
);
9777 return TREE_OPERAND (arg1
, 0);
9784 /* Expand code for a post- or pre- increment or decrement
9785 and return the RTX for the result.
9786 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9789 expand_increment (exp
, post
, ignore
)
9795 tree incremented
= TREE_OPERAND (exp
, 0);
9796 optab this_optab
= add_optab
;
9798 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9799 int op0_is_copy
= 0;
9800 int single_insn
= 0;
9801 /* 1 means we can't store into OP0 directly,
9802 because it is a subreg narrower than a word,
9803 and we don't dare clobber the rest of the word. */
9806 /* Stabilize any component ref that might need to be
9807 evaluated more than once below. */
9809 || TREE_CODE (incremented
) == BIT_FIELD_REF
9810 || (TREE_CODE (incremented
) == COMPONENT_REF
9811 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9812 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9813 incremented
= stabilize_reference (incremented
);
9814 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9815 ones into save exprs so that they don't accidentally get evaluated
9816 more than once by the code below. */
9817 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9818 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9819 incremented
= save_expr (incremented
);
9821 /* Compute the operands as RTX.
9822 Note whether OP0 is the actual lvalue or a copy of it:
9823 I believe it is a copy iff it is a register or subreg
9824 and insns were generated in computing it. */
9826 temp
= get_last_insn ();
9827 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9829 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9830 in place but instead must do sign- or zero-extension during assignment,
9831 so we copy it into a new register and let the code below use it as
9834 Note that we can safely modify this SUBREG since it is know not to be
9835 shared (it was made by the expand_expr call above). */
9837 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9840 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9844 else if (GET_CODE (op0
) == SUBREG
9845 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9847 /* We cannot increment this SUBREG in place. If we are
9848 post-incrementing, get a copy of the old value. Otherwise,
9849 just mark that we cannot increment in place. */
9851 op0
= copy_to_reg (op0
);
9856 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9857 && temp
!= get_last_insn ());
9858 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9860 /* Decide whether incrementing or decrementing. */
9861 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9862 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9863 this_optab
= sub_optab
;
9865 /* Convert decrement by a constant into a negative increment. */
9866 if (this_optab
== sub_optab
9867 && GET_CODE (op1
) == CONST_INT
)
9869 op1
= GEN_INT (-INTVAL (op1
));
9870 this_optab
= add_optab
;
9873 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9874 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9876 /* For a preincrement, see if we can do this with a single instruction. */
9879 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9880 if (icode
!= (int) CODE_FOR_nothing
9881 /* Make sure that OP0 is valid for operands 0 and 1
9882 of the insn we want to queue. */
9883 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9884 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9885 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9889 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9890 then we cannot just increment OP0. We must therefore contrive to
9891 increment the original value. Then, for postincrement, we can return
9892 OP0 since it is a copy of the old value. For preincrement, expand here
9893 unless we can do it with a single insn.
9895 Likewise if storing directly into OP0 would clobber high bits
9896 we need to preserve (bad_subreg). */
9897 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9899 /* This is the easiest way to increment the value wherever it is.
9900 Problems with multiple evaluation of INCREMENTED are prevented
9901 because either (1) it is a component_ref or preincrement,
9902 in which case it was stabilized above, or (2) it is an array_ref
9903 with constant index in an array in a register, which is
9904 safe to reevaluate. */
9905 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9906 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9907 ? MINUS_EXPR
: PLUS_EXPR
),
9910 TREE_OPERAND (exp
, 1));
9912 while (TREE_CODE (incremented
) == NOP_EXPR
9913 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9915 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9916 incremented
= TREE_OPERAND (incremented
, 0);
9919 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9920 return post
? op0
: temp
;
9925 /* We have a true reference to the value in OP0.
9926 If there is an insn to add or subtract in this mode, queue it.
9927 Queueing the increment insn avoids the register shuffling
9928 that often results if we must increment now and first save
9929 the old value for subsequent use. */
9931 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9932 op0
= stabilize (op0
);
9935 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9936 if (icode
!= (int) CODE_FOR_nothing
9937 /* Make sure that OP0 is valid for operands 0 and 1
9938 of the insn we want to queue. */
9939 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9940 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9942 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9943 op1
= force_reg (mode
, op1
);
9945 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9947 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9949 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9950 ? force_reg (Pmode
, XEXP (op0
, 0))
9951 : copy_to_reg (XEXP (op0
, 0)));
9954 op0
= replace_equiv_address (op0
, addr
);
9955 temp
= force_reg (GET_MODE (op0
), op0
);
9956 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9957 op1
= force_reg (mode
, op1
);
9959 /* The increment queue is LIFO, thus we have to `queue'
9960 the instructions in reverse order. */
9961 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9962 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9967 /* Preincrement, or we can't increment with one simple insn. */
9969 /* Save a copy of the value before inc or dec, to return it later. */
9970 temp
= value
= copy_to_reg (op0
);
9972 /* Arrange to return the incremented value. */
9973 /* Copy the rtx because expand_binop will protect from the queue,
9974 and the results of that would be invalid for us to return
9975 if our caller does emit_queue before using our result. */
9976 temp
= copy_rtx (value
= op0
);
9978 /* Increment however we can. */
9979 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9980 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9982 /* Make sure the value is stored into OP0. */
9984 emit_move_insn (op0
, op1
);
9989 /* Generate code to calculate EXP using a store-flag instruction
9990 and return an rtx for the result. EXP is either a comparison
9991 or a TRUTH_NOT_EXPR whose operand is a comparison.
9993 If TARGET is nonzero, store the result there if convenient.
9995 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9998 Return zero if there is no suitable set-flag instruction
9999 available on this machine.
10001 Once expand_expr has been called on the arguments of the comparison,
10002 we are committed to doing the store flag, since it is not safe to
10003 re-evaluate the expression. We emit the store-flag insn by calling
10004 emit_store_flag, but only expand the arguments if we have a reason
10005 to believe that emit_store_flag will be successful. If we think that
10006 it will, but it isn't, we have to simulate the store-flag with a
10007 set/jump/set sequence. */
10010 do_store_flag (exp
, target
, mode
, only_cheap
)
10013 enum machine_mode mode
;
10016 enum rtx_code code
;
10017 tree arg0
, arg1
, type
;
10019 enum machine_mode operand_mode
;
10023 enum insn_code icode
;
10024 rtx subtarget
= target
;
10027 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10028 result at the end. We can't simply invert the test since it would
10029 have already been inverted if it were valid. This case occurs for
10030 some floating-point comparisons. */
10032 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10033 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10035 arg0
= TREE_OPERAND (exp
, 0);
10036 arg1
= TREE_OPERAND (exp
, 1);
10038 /* Don't crash if the comparison was erroneous. */
10039 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10042 type
= TREE_TYPE (arg0
);
10043 operand_mode
= TYPE_MODE (type
);
10044 unsignedp
= TREE_UNSIGNED (type
);
10046 /* We won't bother with BLKmode store-flag operations because it would mean
10047 passing a lot of information to emit_store_flag. */
10048 if (operand_mode
== BLKmode
)
10051 /* We won't bother with store-flag operations involving function pointers
10052 when function pointers must be canonicalized before comparisons. */
10053 #ifdef HAVE_canonicalize_funcptr_for_compare
10054 if (HAVE_canonicalize_funcptr_for_compare
10055 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10056 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10058 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10059 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10060 == FUNCTION_TYPE
))))
10067 /* Get the rtx comparison code to use. We know that EXP is a comparison
10068 operation of some type. Some comparisons against 1 and -1 can be
10069 converted to comparisons with zero. Do so here so that the tests
10070 below will be aware that we have a comparison with zero. These
10071 tests will not catch constants in the first operand, but constants
10072 are rarely passed as the first operand. */
10074 switch (TREE_CODE (exp
))
10083 if (integer_onep (arg1
))
10084 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10086 code
= unsignedp
? LTU
: LT
;
10089 if (! unsignedp
&& integer_all_onesp (arg1
))
10090 arg1
= integer_zero_node
, code
= LT
;
10092 code
= unsignedp
? LEU
: LE
;
10095 if (! unsignedp
&& integer_all_onesp (arg1
))
10096 arg1
= integer_zero_node
, code
= GE
;
10098 code
= unsignedp
? GTU
: GT
;
10101 if (integer_onep (arg1
))
10102 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10104 code
= unsignedp
? GEU
: GE
;
10107 case UNORDERED_EXPR
:
10133 /* Put a constant second. */
10134 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10136 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10137 code
= swap_condition (code
);
10140 /* If this is an equality or inequality test of a single bit, we can
10141 do this by shifting the bit being tested to the low-order bit and
10142 masking the result with the constant 1. If the condition was EQ,
10143 we xor it with 1. This does not require an scc insn and is faster
10144 than an scc insn even if we have it. */
10146 if ((code
== NE
|| code
== EQ
)
10147 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10148 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10150 tree inner
= TREE_OPERAND (arg0
, 0);
10151 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10154 /* If INNER is a right shift of a constant and it plus BITNUM does
10155 not overflow, adjust BITNUM and INNER. */
10157 if (TREE_CODE (inner
) == RSHIFT_EXPR
10158 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10159 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10160 && bitnum
< TYPE_PRECISION (type
)
10161 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10162 bitnum
- TYPE_PRECISION (type
)))
10164 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10165 inner
= TREE_OPERAND (inner
, 0);
10168 /* If we are going to be able to omit the AND below, we must do our
10169 operations as unsigned. If we must use the AND, we have a choice.
10170 Normally unsigned is faster, but for some machines signed is. */
10171 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10172 #ifdef LOAD_EXTEND_OP
10173 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10179 if (! get_subtarget (subtarget
)
10180 || GET_MODE (subtarget
) != operand_mode
10181 || ! safe_from_p (subtarget
, inner
, 1))
10184 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10187 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10188 size_int (bitnum
), subtarget
, ops_unsignedp
);
10190 if (GET_MODE (op0
) != mode
)
10191 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10193 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10194 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10195 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10197 /* Put the AND last so it can combine with more things. */
10198 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10199 op0
= expand_and (mode
, op0
, const1_rtx
, subtarget
);
10204 /* Now see if we are likely to be able to do this. Return if not. */
10205 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10208 icode
= setcc_gen_code
[(int) code
];
10209 if (icode
== CODE_FOR_nothing
10210 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10212 /* We can only do this if it is one of the special cases that
10213 can be handled without an scc insn. */
10214 if ((code
== LT
&& integer_zerop (arg1
))
10215 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10217 else if (BRANCH_COST
>= 0
10218 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10219 && TREE_CODE (type
) != REAL_TYPE
10220 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10221 != CODE_FOR_nothing
)
10222 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10223 != CODE_FOR_nothing
)))
10229 if (! get_subtarget (target
)
10230 || GET_MODE (subtarget
) != operand_mode
10231 || ! safe_from_p (subtarget
, arg1
, 1))
10234 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10235 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10238 target
= gen_reg_rtx (mode
);
10240 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10241 because, if the emit_store_flag does anything it will succeed and
10242 OP0 and OP1 will not be used subsequently. */
10244 result
= emit_store_flag (target
, code
,
10245 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10246 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10247 operand_mode
, unsignedp
, 1);
10252 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10253 result
, 0, OPTAB_LIB_WIDEN
);
10257 /* If this failed, we have to do this with set/compare/jump/set code. */
10258 if (GET_CODE (target
) != REG
10259 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10260 target
= gen_reg_rtx (GET_MODE (target
));
10262 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10263 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10264 operand_mode
, NULL_RTX
);
10265 if (GET_CODE (result
) == CONST_INT
)
10266 return (((result
== const0_rtx
&& ! invert
)
10267 || (result
!= const0_rtx
&& invert
))
10268 ? const0_rtx
: const1_rtx
);
10270 /* The code of RESULT may not match CODE if compare_from_rtx
10271 decided to swap its operands and reverse the original code.
10273 We know that compare_from_rtx returns either a CONST_INT or
10274 a new comparison code, so it is safe to just extract the
10275 code from RESULT. */
10276 code
= GET_CODE (result
);
10278 label
= gen_label_rtx ();
10279 if (bcc_gen_fctn
[(int) code
] == 0)
10282 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10283 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10284 emit_label (label
);
10290 /* Stubs in case we haven't got a casesi insn. */
10291 #ifndef HAVE_casesi
10292 # define HAVE_casesi 0
10293 # define gen_casesi(a, b, c, d, e) (0)
10294 # define CODE_FOR_casesi CODE_FOR_nothing
10297 /* If the machine does not have a case insn that compares the bounds,
10298 this means extra overhead for dispatch tables, which raises the
10299 threshold for using them. */
10300 #ifndef CASE_VALUES_THRESHOLD
10301 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10302 #endif /* CASE_VALUES_THRESHOLD */
10305 case_values_threshold ()
10307 return CASE_VALUES_THRESHOLD
;
10310 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10311 0 otherwise (i.e. if there is no casesi instruction). */
10313 try_casesi (index_type
, index_expr
, minval
, range
,
10314 table_label
, default_label
)
10315 tree index_type
, index_expr
, minval
, range
;
10316 rtx table_label ATTRIBUTE_UNUSED
;
10319 enum machine_mode index_mode
= SImode
;
10320 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10321 rtx op1
, op2
, index
;
10322 enum machine_mode op_mode
;
10327 /* Convert the index to SImode. */
10328 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10330 enum machine_mode omode
= TYPE_MODE (index_type
);
10331 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10333 /* We must handle the endpoints in the original mode. */
10334 index_expr
= build (MINUS_EXPR
, index_type
,
10335 index_expr
, minval
);
10336 minval
= integer_zero_node
;
10337 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10338 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10339 omode
, 1, default_label
);
10340 /* Now we can safely truncate. */
10341 index
= convert_to_mode (index_mode
, index
, 0);
10345 if (TYPE_MODE (index_type
) != index_mode
)
10347 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
10348 (index_bits
, 0), index_expr
);
10349 index_type
= TREE_TYPE (index_expr
);
10352 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10355 index
= protect_from_queue (index
, 0);
10356 do_pending_stack_adjust ();
10358 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10359 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10361 index
= copy_to_mode_reg (op_mode
, index
);
10363 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10365 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10366 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10367 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10368 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10370 op1
= copy_to_mode_reg (op_mode
, op1
);
10372 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10374 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10375 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10376 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10377 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10379 op2
= copy_to_mode_reg (op_mode
, op2
);
10381 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10382 table_label
, default_label
));
10386 /* Attempt to generate a tablejump instruction; same concept. */
10387 #ifndef HAVE_tablejump
10388 #define HAVE_tablejump 0
10389 #define gen_tablejump(x, y) (0)
10392 /* Subroutine of the next function.
10394 INDEX is the value being switched on, with the lowest value
10395 in the table already subtracted.
10396 MODE is its expected mode (needed if INDEX is constant).
10397 RANGE is the length of the jump table.
10398 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10400 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10401 index value is out of range. */
10404 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10405 rtx index
, range
, table_label
, default_label
;
10406 enum machine_mode mode
;
10410 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
10411 cfun
->max_jumptable_ents
= INTVAL (range
);
10413 /* Do an unsigned comparison (in the proper mode) between the index
10414 expression and the value which represents the length of the range.
10415 Since we just finished subtracting the lower bound of the range
10416 from the index expression, this comparison allows us to simultaneously
10417 check that the original index expression value is both greater than
10418 or equal to the minimum value of the range and less than or equal to
10419 the maximum value of the range. */
10421 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10424 /* If index is in range, it must fit in Pmode.
10425 Convert to Pmode so we can index with it. */
10427 index
= convert_to_mode (Pmode
, index
, 1);
10429 /* Don't let a MEM slip thru, because then INDEX that comes
10430 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10431 and break_out_memory_refs will go to work on it and mess it up. */
10432 #ifdef PIC_CASE_VECTOR_ADDRESS
10433 if (flag_pic
&& GET_CODE (index
) != REG
)
10434 index
= copy_to_mode_reg (Pmode
, index
);
10437 /* If flag_force_addr were to affect this address
10438 it could interfere with the tricky assumptions made
10439 about addresses that contain label-refs,
10440 which may be valid only very near the tablejump itself. */
10441 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10442 GET_MODE_SIZE, because this indicates how large insns are. The other
10443 uses should all be Pmode, because they are addresses. This code
10444 could fail if addresses and insns are not the same size. */
10445 index
= gen_rtx_PLUS (Pmode
,
10446 gen_rtx_MULT (Pmode
, index
,
10447 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10448 gen_rtx_LABEL_REF (Pmode
, table_label
));
10449 #ifdef PIC_CASE_VECTOR_ADDRESS
10451 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10454 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10455 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10456 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10457 RTX_UNCHANGING_P (vector
) = 1;
10458 MEM_NOTRAP_P (vector
) = 1;
10459 convert_move (temp
, vector
, 0);
10461 emit_jump_insn (gen_tablejump (temp
, table_label
));
10463 /* If we are generating PIC code or if the table is PC-relative, the
10464 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10465 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10470 try_tablejump (index_type
, index_expr
, minval
, range
,
10471 table_label
, default_label
)
10472 tree index_type
, index_expr
, minval
, range
;
10473 rtx table_label
, default_label
;
10477 if (! HAVE_tablejump
)
10480 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10481 convert (index_type
, index_expr
),
10482 convert (index_type
, minval
)));
10483 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10485 index
= protect_from_queue (index
, 0);
10486 do_pending_stack_adjust ();
10488 do_tablejump (index
, TYPE_MODE (index_type
),
10489 convert_modes (TYPE_MODE (index_type
),
10490 TYPE_MODE (TREE_TYPE (range
)),
10491 expand_expr (range
, NULL_RTX
,
10493 TREE_UNSIGNED (TREE_TYPE (range
))),
10494 table_label
, default_label
);
10498 /* Nonzero if the mode is a valid vector mode for this architecture.
10499 This returns nonzero even if there is no hardware support for the
10500 vector mode, but we can emulate with narrower modes. */
10503 vector_mode_valid_p (mode
)
10504 enum machine_mode mode
;
10506 enum mode_class
class = GET_MODE_CLASS (mode
);
10507 enum machine_mode innermode
;
10509 /* Doh! What's going on? */
10510 if (class != MODE_VECTOR_INT
10511 && class != MODE_VECTOR_FLOAT
)
10514 /* Hardware support. Woo hoo! */
10515 if (VECTOR_MODE_SUPPORTED_P (mode
))
10518 innermode
= GET_MODE_INNER (mode
);
10520 /* We should probably return 1 if requesting V4DI and we have no DI,
10521 but we have V2DI, but this is probably very unlikely. */
10523 /* If we have support for the inner mode, we can safely emulate it.
10524 We may not have V2DI, but me can emulate with a pair of DIs. */
10525 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
10528 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10530 const_vector_from_tree (exp
)
10536 enum machine_mode inner
, mode
;
10538 mode
= TYPE_MODE (TREE_TYPE (exp
));
10540 if (is_zeros_p (exp
))
10541 return CONST0_RTX (mode
);
10543 units
= GET_MODE_NUNITS (mode
);
10544 inner
= GET_MODE_INNER (mode
);
10546 v
= rtvec_alloc (units
);
10548 link
= TREE_VECTOR_CST_ELTS (exp
);
10549 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10551 elt
= TREE_VALUE (link
);
10553 if (TREE_CODE (elt
) == REAL_CST
)
10554 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10557 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10558 TREE_INT_CST_HIGH (elt
),
10562 /* Initialize remaining elements to 0. */
10563 for (; i
< units
; ++i
)
10564 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10566 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
10569 #include "gt-expr.h"