1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
72 #define STACK_PUSH_CODE PRE_INC
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
81 /* Convert defined/undefined to boolean. */
82 #ifdef TARGET_MEM_FUNCTIONS
83 #undef TARGET_MEM_FUNCTIONS
84 #define TARGET_MEM_FUNCTIONS 1
86 #define TARGET_MEM_FUNCTIONS 0
90 /* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
98 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
99 tree placeholder_list
= 0;
101 /* This structure is used by move_by_pieces to describe the move to
103 struct move_by_pieces
112 int explicit_inc_from
;
113 unsigned HOST_WIDE_INT len
;
114 HOST_WIDE_INT offset
;
118 /* This structure is used by store_by_pieces to describe the clear to
121 struct store_by_pieces
127 unsigned HOST_WIDE_INT len
;
128 HOST_WIDE_INT offset
;
129 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static rtx
enqueue_insn (rtx
, rtx
);
135 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
137 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
138 struct move_by_pieces
*);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
141 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
142 static tree
emit_block_move_libcall_fn (int);
143 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
144 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
145 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
146 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
147 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
148 struct store_by_pieces
*);
149 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
150 static rtx
clear_storage_via_libcall (rtx
, rtx
);
151 static tree
clear_storage_libcall_fn (int);
152 static rtx
compress_float_constant (rtx
, rtx
);
153 static rtx
get_subtarget (rtx
);
154 static int is_zeros_p (tree
);
155 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
156 HOST_WIDE_INT
, enum machine_mode
,
157 tree
, tree
, int, int);
158 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
159 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
160 tree
, enum machine_mode
, int, tree
, int);
161 static rtx
var_rtx (tree
);
163 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
164 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_type (tree
, tree
);
166 static int is_aligning_offset (tree
, tree
);
167 static rtx
expand_increment (tree
, int, int);
168 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
169 enum expand_modifier
);
170 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
172 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
174 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
175 static rtx
const_vector_from_tree (tree
);
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load
[NUM_MACHINE_MODES
];
182 static char direct_store
[NUM_MACHINE_MODES
];
184 /* Record for each mode whether we can float-extend from memory. */
186 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
195 /* This macro is used to determine whether clear_by_pieces should be
196 called to clear storage. */
197 #ifndef CLEAR_BY_PIECES_P
198 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
199 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
202 /* This macro is used to determine whether store_by_pieces should be
203 called to "memset" storage with byte values other than zero, or
204 to "memcpy" storage when the source is a constant string. */
205 #ifndef STORE_BY_PIECES_P
206 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
209 /* This array records the insn_code of insns to perform block moves. */
210 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
212 /* This array records the insn_code of insns to perform block clears. */
213 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
215 /* These arrays record the insn_code of two different kinds of insns
216 to perform block compares. */
217 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
218 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
220 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
221 struct file_stack
*expr_wfl_stack
;
223 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
225 #ifndef SLOW_UNALIGNED_ACCESS
226 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
229 /* This is run once per compilation to set up which modes can be used
230 directly in memory and to initialize the block move optab. */
233 init_expr_once (void)
236 enum machine_mode mode
;
241 /* Try indexing by frame ptr and try by stack ptr.
242 It is known that on the Convex the stack ptr isn't a valid index.
243 With luck, one or the other is valid on any machine. */
244 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
245 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
247 /* A scratch register we can modify in-place below to avoid
248 useless RTL allocations. */
249 reg
= gen_rtx_REG (VOIDmode
, -1);
251 insn
= rtx_alloc (INSN
);
252 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
253 PATTERN (insn
) = pat
;
255 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
256 mode
= (enum machine_mode
) ((int) mode
+ 1))
260 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
261 PUT_MODE (mem
, mode
);
262 PUT_MODE (mem1
, mode
);
263 PUT_MODE (reg
, mode
);
265 /* See if there is some register that can be used in this mode and
266 directly loaded or stored from memory. */
268 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
269 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
270 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
273 if (! HARD_REGNO_MODE_OK (regno
, mode
))
279 SET_DEST (pat
) = reg
;
280 if (recog (pat
, insn
, &num_clobbers
) >= 0)
281 direct_load
[(int) mode
] = 1;
283 SET_SRC (pat
) = mem1
;
284 SET_DEST (pat
) = reg
;
285 if (recog (pat
, insn
, &num_clobbers
) >= 0)
286 direct_load
[(int) mode
] = 1;
289 SET_DEST (pat
) = mem
;
290 if (recog (pat
, insn
, &num_clobbers
) >= 0)
291 direct_store
[(int) mode
] = 1;
294 SET_DEST (pat
) = mem1
;
295 if (recog (pat
, insn
, &num_clobbers
) >= 0)
296 direct_store
[(int) mode
] = 1;
300 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
302 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
303 mode
= GET_MODE_WIDER_MODE (mode
))
305 enum machine_mode srcmode
;
306 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
307 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
311 ic
= can_extend_p (mode
, srcmode
, 0);
312 if (ic
== CODE_FOR_nothing
)
315 PUT_MODE (mem
, srcmode
);
317 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
318 float_extend_from_mem
[mode
][srcmode
] = true;
323 /* This is run at the start of compiling a function. */
328 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
331 /* Small sanity check that the queue is empty at the end of a function. */
334 finish_expr_for_function (void)
340 /* Manage the queue of increment instructions to be output
341 for POSTINCREMENT_EXPR expressions, etc. */
343 /* Queue up to increment (or change) VAR later. BODY says how:
344 BODY should be the same thing you would pass to emit_insn
345 to increment right away. It will go to emit_insn later on.
347 The value is a QUEUED expression to be used in place of VAR
348 where you want to guarantee the pre-incrementation value of VAR. */
351 enqueue_insn (rtx var
, rtx body
)
353 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
354 body
, pending_chain
);
355 return pending_chain
;
358 /* Use protect_from_queue to convert a QUEUED expression
359 into something that you can put immediately into an instruction.
360 If the queued incrementation has not happened yet,
361 protect_from_queue returns the variable itself.
362 If the incrementation has happened, protect_from_queue returns a temp
363 that contains a copy of the old value of the variable.
365 Any time an rtx which might possibly be a QUEUED is to be put
366 into an instruction, it must be passed through protect_from_queue first.
367 QUEUED expressions are not meaningful in instructions.
369 Do not pass a value through protect_from_queue and then hold
370 on to it for a while before putting it in an instruction!
371 If the queue is flushed in between, incorrect code will result. */
374 protect_from_queue (rtx x
, int modify
)
376 RTX_CODE code
= GET_CODE (x
);
378 #if 0 /* A QUEUED can hang around after the queue is forced out. */
379 /* Shortcut for most common case. */
380 if (pending_chain
== 0)
386 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
387 use of autoincrement. Make a copy of the contents of the memory
388 location rather than a copy of the address, but not if the value is
389 of mode BLKmode. Don't modify X in place since it might be
391 if (code
== MEM
&& GET_MODE (x
) != BLKmode
392 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
395 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
399 rtx temp
= gen_reg_rtx (GET_MODE (x
));
401 emit_insn_before (gen_move_insn (temp
, new),
406 /* Copy the address into a pseudo, so that the returned value
407 remains correct across calls to emit_queue. */
408 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
411 /* Otherwise, recursively protect the subexpressions of all
412 the kinds of rtx's that can contain a QUEUED. */
415 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
416 if (tem
!= XEXP (x
, 0))
422 else if (code
== PLUS
|| code
== MULT
)
424 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
425 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
426 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
435 /* If the increment has not happened, use the variable itself. Copy it
436 into a new pseudo so that the value remains correct across calls to
438 if (QUEUED_INSN (x
) == 0)
439 return copy_to_reg (QUEUED_VAR (x
));
440 /* If the increment has happened and a pre-increment copy exists,
442 if (QUEUED_COPY (x
) != 0)
443 return QUEUED_COPY (x
);
444 /* The increment has happened but we haven't set up a pre-increment copy.
445 Set one up now, and use it. */
446 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
447 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
449 return QUEUED_COPY (x
);
452 /* Return nonzero if X contains a QUEUED expression:
453 if it contains anything that will be altered by a queued increment.
454 We handle only combinations of MEM, PLUS, MINUS and MULT operators
455 since memory addresses generally contain only those. */
458 queued_subexp_p (rtx x
)
460 enum rtx_code code
= GET_CODE (x
);
466 return queued_subexp_p (XEXP (x
, 0));
470 return (queued_subexp_p (XEXP (x
, 0))
471 || queued_subexp_p (XEXP (x
, 1)));
477 /* Perform all the pending incrementations. */
483 while ((p
= pending_chain
))
485 rtx body
= QUEUED_BODY (p
);
487 switch (GET_CODE (body
))
495 QUEUED_INSN (p
) = body
;
499 #ifdef ENABLE_CHECKING
506 QUEUED_INSN (p
) = emit_insn (body
);
510 pending_chain
= QUEUED_NEXT (p
);
514 /* Copy data from FROM to TO, where the machine modes are not the same.
515 Both modes may be integer, or both may be floating.
516 UNSIGNEDP should be nonzero if FROM is an unsigned type.
517 This causes zero-extension instead of sign-extension. */
520 convert_move (rtx to
, rtx from
, int unsignedp
)
522 enum machine_mode to_mode
= GET_MODE (to
);
523 enum machine_mode from_mode
= GET_MODE (from
);
524 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
525 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
529 /* rtx code for making an equivalent value. */
530 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
531 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
533 to
= protect_from_queue (to
, 1);
534 from
= protect_from_queue (from
, 0);
536 if (to_real
!= from_real
)
539 /* If FROM is a SUBREG that indicates that we have already done at least
540 the required extension, strip it. We don't handle such SUBREGs as
543 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
544 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
545 >= GET_MODE_SIZE (to_mode
))
546 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
547 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
549 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
552 if (to_mode
== from_mode
553 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
555 emit_move_insn (to
, from
);
559 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
561 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
564 if (VECTOR_MODE_P (to_mode
))
565 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
567 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
569 emit_move_insn (to
, from
);
573 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
575 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
576 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
585 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
587 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
592 /* Try converting directly if the insn is supported. */
594 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
595 if (code
!= CODE_FOR_nothing
)
597 emit_unop_insn (code
, to
, from
,
598 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
602 /* Otherwise use a libcall. */
603 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
606 /* This conversion is not implemented yet. */
610 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
612 insns
= get_insns ();
614 emit_libcall_block (insns
, to
, value
,
615 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
617 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
621 /* Handle pointer conversion. */ /* SPEE 900220. */
622 /* Targets are expected to provide conversion insns between PxImode and
623 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
624 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
626 enum machine_mode full_mode
627 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
629 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
633 if (full_mode
!= from_mode
)
634 from
= convert_to_mode (full_mode
, from
, unsignedp
);
635 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
639 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
641 enum machine_mode full_mode
642 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
644 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
648 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
650 if (to_mode
== full_mode
)
653 /* else proceed to integer conversions below */
654 from_mode
= full_mode
;
657 /* Now both modes are integers. */
659 /* Handle expanding beyond a word. */
660 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
661 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
668 enum machine_mode lowpart_mode
;
669 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
671 /* Try converting directly if the insn is supported. */
672 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
675 /* If FROM is a SUBREG, put it into a register. Do this
676 so that we always generate the same set of insns for
677 better cse'ing; if an intermediate assignment occurred,
678 we won't be doing the operation directly on the SUBREG. */
679 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
680 from
= force_reg (from_mode
, from
);
681 emit_unop_insn (code
, to
, from
, equiv_code
);
684 /* Next, try converting via full word. */
685 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
686 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
687 != CODE_FOR_nothing
))
689 if (GET_CODE (to
) == REG
)
690 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
691 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
692 emit_unop_insn (code
, to
,
693 gen_lowpart (word_mode
, to
), equiv_code
);
697 /* No special multiword conversion insn; do it by hand. */
700 /* Since we will turn this into a no conflict block, we must ensure
701 that the source does not overlap the target. */
703 if (reg_overlap_mentioned_p (to
, from
))
704 from
= force_reg (from_mode
, from
);
706 /* Get a copy of FROM widened to a word, if necessary. */
707 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
708 lowpart_mode
= word_mode
;
710 lowpart_mode
= from_mode
;
712 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
714 lowpart
= gen_lowpart (lowpart_mode
, to
);
715 emit_move_insn (lowpart
, lowfrom
);
717 /* Compute the value to put in each remaining word. */
719 fill_value
= const0_rtx
;
724 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
725 && STORE_FLAG_VALUE
== -1)
727 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
729 fill_value
= gen_reg_rtx (word_mode
);
730 emit_insn (gen_slt (fill_value
));
736 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
737 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
739 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
743 /* Fill the remaining words. */
744 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
746 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
747 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
752 if (fill_value
!= subword
)
753 emit_move_insn (subword
, fill_value
);
756 insns
= get_insns ();
759 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
760 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
764 /* Truncating multi-word to a word or less. */
765 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
766 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
768 if (!((GET_CODE (from
) == MEM
769 && ! MEM_VOLATILE_P (from
)
770 && direct_load
[(int) to_mode
]
771 && ! mode_dependent_address_p (XEXP (from
, 0)))
772 || GET_CODE (from
) == REG
773 || GET_CODE (from
) == SUBREG
))
774 from
= force_reg (from_mode
, from
);
775 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
779 /* Now follow all the conversions between integers
780 no more than a word long. */
782 /* For truncation, usually we can just refer to FROM in a narrower mode. */
783 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
784 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
785 GET_MODE_BITSIZE (from_mode
)))
787 if (!((GET_CODE (from
) == MEM
788 && ! MEM_VOLATILE_P (from
)
789 && direct_load
[(int) to_mode
]
790 && ! mode_dependent_address_p (XEXP (from
, 0)))
791 || GET_CODE (from
) == REG
792 || GET_CODE (from
) == SUBREG
))
793 from
= force_reg (from_mode
, from
);
794 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
795 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
796 from
= copy_to_reg (from
);
797 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
801 /* Handle extension. */
802 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
804 /* Convert directly if that works. */
805 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
809 from
= force_not_mem (from
);
811 emit_unop_insn (code
, to
, from
, equiv_code
);
816 enum machine_mode intermediate
;
820 /* Search for a mode to convert via. */
821 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
822 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
823 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
825 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
826 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
827 GET_MODE_BITSIZE (intermediate
))))
828 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
829 != CODE_FOR_nothing
))
831 convert_move (to
, convert_to_mode (intermediate
, from
,
832 unsignedp
), unsignedp
);
836 /* No suitable intermediate mode.
837 Generate what we need with shifts. */
838 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
839 - GET_MODE_BITSIZE (from_mode
), 0);
840 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
841 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
843 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
846 emit_move_insn (to
, tmp
);
851 /* Support special truncate insns for certain modes. */
852 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
854 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
859 /* Handle truncation of volatile memrefs, and so on;
860 the things that couldn't be truncated directly,
861 and for which there was no special instruction.
863 ??? Code above formerly short-circuited this, for most integer
864 mode pairs, with a force_reg in from_mode followed by a recursive
865 call to this routine. Appears always to have been wrong. */
866 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
868 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
869 emit_move_insn (to
, temp
);
873 /* Mode combination is not recognized. */
877 /* Return an rtx for a value that would result
878 from converting X to mode MODE.
879 Both X and MODE may be floating, or both integer.
880 UNSIGNEDP is nonzero if X is an unsigned value.
881 This can be done by referring to a part of X in place
882 or by copying to a new temporary with conversion.
884 This function *must not* call protect_from_queue
885 except when putting X into an insn (in which case convert_move does it). */
888 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
890 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
893 /* Return an rtx for a value that would result
894 from converting X from mode OLDMODE to mode MODE.
895 Both modes may be floating, or both integer.
896 UNSIGNEDP is nonzero if X is an unsigned value.
898 This can be done by referring to a part of X in place
899 or by copying to a new temporary with conversion.
901 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
903 This function *must not* call protect_from_queue
904 except when putting X into an insn (in which case convert_move does it). */
907 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
911 /* If FROM is a SUBREG that indicates that we have already done at least
912 the required extension, strip it. */
914 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
915 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
916 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
917 x
= gen_lowpart (mode
, x
);
919 if (GET_MODE (x
) != VOIDmode
)
920 oldmode
= GET_MODE (x
);
925 /* There is one case that we must handle specially: If we are converting
926 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
927 we are to interpret the constant as unsigned, gen_lowpart will do
928 the wrong if the constant appears negative. What we want to do is
929 make the high-order word of the constant zero, not all ones. */
931 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
932 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
933 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
935 HOST_WIDE_INT val
= INTVAL (x
);
937 if (oldmode
!= VOIDmode
938 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
940 int width
= GET_MODE_BITSIZE (oldmode
);
942 /* We need to zero extend VAL. */
943 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
946 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
949 /* We can do this with a gen_lowpart if both desired and current modes
950 are integer, and this is either a constant integer, a register, or a
951 non-volatile MEM. Except for the constant case where MODE is no
952 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
954 if ((GET_CODE (x
) == CONST_INT
955 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
956 || (GET_MODE_CLASS (mode
) == MODE_INT
957 && GET_MODE_CLASS (oldmode
) == MODE_INT
958 && (GET_CODE (x
) == CONST_DOUBLE
959 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
960 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
961 && direct_load
[(int) mode
])
962 || (GET_CODE (x
) == REG
963 && (! HARD_REGISTER_P (x
)
964 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
965 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
966 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
968 /* ?? If we don't know OLDMODE, we have to assume here that
969 X does not need sign- or zero-extension. This may not be
970 the case, but it's the best we can do. */
971 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
972 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
974 HOST_WIDE_INT val
= INTVAL (x
);
975 int width
= GET_MODE_BITSIZE (oldmode
);
977 /* We must sign or zero-extend in this case. Start by
978 zero-extending, then sign extend if we need to. */
979 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
981 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
982 val
|= (HOST_WIDE_INT
) (-1) << width
;
984 return gen_int_mode (val
, mode
);
987 return gen_lowpart (mode
, x
);
990 /* Converting from integer constant into mode is always equivalent to an
992 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
994 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
996 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
999 temp
= gen_reg_rtx (mode
);
1000 convert_move (temp
, x
, unsignedp
);
1004 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1005 store efficiently. Due to internal GCC limitations, this is
1006 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1007 for an immediate constant. */
1009 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1011 /* Determine whether the LEN bytes can be moved by using several move
1012 instructions. Return nonzero if a call to move_by_pieces should
1016 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1017 unsigned int align ATTRIBUTE_UNUSED
)
1019 return MOVE_BY_PIECES_P (len
, align
);
1022 /* Generate several move instructions to copy LEN bytes from block FROM to
1023 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1024 and TO through protect_from_queue before calling.
1026 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1027 used to push FROM to the stack.
1029 ALIGN is maximum stack alignment we can assume.
1031 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1032 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1036 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1037 unsigned int align
, int endp
)
1039 struct move_by_pieces data
;
1040 rtx to_addr
, from_addr
= XEXP (from
, 0);
1041 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1042 enum machine_mode mode
= VOIDmode
, tmode
;
1043 enum insn_code icode
;
1045 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1048 data
.from_addr
= from_addr
;
1051 to_addr
= XEXP (to
, 0);
1054 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1055 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1057 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1064 #ifdef STACK_GROWS_DOWNWARD
1070 data
.to_addr
= to_addr
;
1073 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1074 || GET_CODE (from_addr
) == POST_INC
1075 || GET_CODE (from_addr
) == POST_DEC
);
1077 data
.explicit_inc_from
= 0;
1078 data
.explicit_inc_to
= 0;
1079 if (data
.reverse
) data
.offset
= len
;
1082 /* If copying requires more than two move insns,
1083 copy addresses to registers (to make displacements shorter)
1084 and use post-increment if available. */
1085 if (!(data
.autinc_from
&& data
.autinc_to
)
1086 && move_by_pieces_ninsns (len
, align
) > 2)
1088 /* Find the mode of the largest move... */
1089 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1090 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1091 if (GET_MODE_SIZE (tmode
) < max_size
)
1094 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1096 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1097 data
.autinc_from
= 1;
1098 data
.explicit_inc_from
= -1;
1100 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1102 data
.from_addr
= copy_addr_to_reg (from_addr
);
1103 data
.autinc_from
= 1;
1104 data
.explicit_inc_from
= 1;
1106 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1107 data
.from_addr
= copy_addr_to_reg (from_addr
);
1108 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1110 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1112 data
.explicit_inc_to
= -1;
1114 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1116 data
.to_addr
= copy_addr_to_reg (to_addr
);
1118 data
.explicit_inc_to
= 1;
1120 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1121 data
.to_addr
= copy_addr_to_reg (to_addr
);
1124 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1125 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1126 align
= MOVE_MAX
* BITS_PER_UNIT
;
1128 /* First move what we can in the largest integer mode, then go to
1129 successively smaller modes. */
1131 while (max_size
> 1)
1133 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1134 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1135 if (GET_MODE_SIZE (tmode
) < max_size
)
1138 if (mode
== VOIDmode
)
1141 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1142 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1143 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1145 max_size
= GET_MODE_SIZE (mode
);
1148 /* The code above should have handled everything. */
1162 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1163 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1165 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1168 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1175 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1183 /* Return number of insns required to move L bytes by pieces.
1184 ALIGN (in bits) is maximum alignment we can assume. */
1186 static unsigned HOST_WIDE_INT
1187 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1189 unsigned HOST_WIDE_INT n_insns
= 0;
1190 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1192 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1193 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1194 align
= MOVE_MAX
* BITS_PER_UNIT
;
1196 while (max_size
> 1)
1198 enum machine_mode mode
= VOIDmode
, tmode
;
1199 enum insn_code icode
;
1201 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1202 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1203 if (GET_MODE_SIZE (tmode
) < max_size
)
1206 if (mode
== VOIDmode
)
1209 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1210 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1211 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1213 max_size
= GET_MODE_SIZE (mode
);
1221 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1222 with move instructions for mode MODE. GENFUN is the gen_... function
1223 to make a move insn for that mode. DATA has all the other info. */
1226 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1227 struct move_by_pieces
*data
)
1229 unsigned int size
= GET_MODE_SIZE (mode
);
1230 rtx to1
= NULL_RTX
, from1
;
1232 while (data
->len
>= size
)
1235 data
->offset
-= size
;
1239 if (data
->autinc_to
)
1240 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1243 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1246 if (data
->autinc_from
)
1247 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1250 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1252 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1253 emit_insn (gen_add2_insn (data
->to_addr
,
1254 GEN_INT (-(HOST_WIDE_INT
)size
)));
1255 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1256 emit_insn (gen_add2_insn (data
->from_addr
,
1257 GEN_INT (-(HOST_WIDE_INT
)size
)));
1260 emit_insn ((*genfun
) (to1
, from1
));
1263 #ifdef PUSH_ROUNDING
1264 emit_single_push_insn (mode
, from1
, NULL
);
1270 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1271 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1272 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1273 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1275 if (! data
->reverse
)
1276 data
->offset
+= size
;
1282 /* Emit code to move a block Y to a block X. This may be done with
1283 string-move instructions, with multiple scalar move instructions,
1284 or with a library call.
1286 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1287 SIZE is an rtx that says how long they are.
1288 ALIGN is the maximum alignment we can assume they have.
1289 METHOD describes what kind of copy this is, and what mechanisms may be used.
1291 Return the address of the new block, if memcpy is called and returns it,
1295 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1303 case BLOCK_OP_NORMAL
:
1304 may_use_call
= true;
1307 case BLOCK_OP_CALL_PARM
:
1308 may_use_call
= block_move_libcall_safe_for_call_parm ();
1310 /* Make inhibit_defer_pop nonzero around the library call
1311 to force it to pop the arguments right away. */
1315 case BLOCK_OP_NO_LIBCALL
:
1316 may_use_call
= false;
1323 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1325 if (GET_MODE (x
) != BLKmode
)
1327 if (GET_MODE (y
) != BLKmode
)
1330 x
= protect_from_queue (x
, 1);
1331 y
= protect_from_queue (y
, 0);
1332 size
= protect_from_queue (size
, 0);
1334 if (GET_CODE (x
) != MEM
)
1336 if (GET_CODE (y
) != MEM
)
1341 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1342 can be incorrect is coming from __builtin_memcpy. */
1343 if (GET_CODE (size
) == CONST_INT
)
1345 if (INTVAL (size
) == 0)
1348 x
= shallow_copy_rtx (x
);
1349 y
= shallow_copy_rtx (y
);
1350 set_mem_size (x
, size
);
1351 set_mem_size (y
, size
);
1354 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1355 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1356 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1358 else if (may_use_call
)
1359 retval
= emit_block_move_via_libcall (x
, y
, size
);
1361 emit_block_move_via_loop (x
, y
, size
, align
);
1363 if (method
== BLOCK_OP_CALL_PARM
)
1369 /* A subroutine of emit_block_move. Returns true if calling the
1370 block move libcall will not clobber any parameters which may have
1371 already been placed on the stack. */
1374 block_move_libcall_safe_for_call_parm (void)
1376 /* If arguments are pushed on the stack, then they're safe. */
1380 /* If registers go on the stack anyway, any argument is sure to clobber
1381 an outgoing argument. */
1382 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1384 tree fn
= emit_block_move_libcall_fn (false);
1386 if (REG_PARM_STACK_SPACE (fn
) != 0)
1391 /* If any argument goes in memory, then it might clobber an outgoing
1394 CUMULATIVE_ARGS args_so_far
;
1397 fn
= emit_block_move_libcall_fn (false);
1398 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1400 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1401 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1403 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1404 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1405 if (!tmp
|| !REG_P (tmp
))
1407 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1408 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1412 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1418 /* A subroutine of emit_block_move. Expand a movstr pattern;
1419 return true if successful. */
1422 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1424 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1425 enum machine_mode mode
;
1427 /* Since this is a move insn, we don't care about volatility. */
1430 /* Try the most limited insn first, because there's no point
1431 including more than one in the machine description unless
1432 the more limited one has some advantage. */
1434 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1435 mode
= GET_MODE_WIDER_MODE (mode
))
1437 enum insn_code code
= movstr_optab
[(int) mode
];
1438 insn_operand_predicate_fn pred
;
1440 if (code
!= CODE_FOR_nothing
1441 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1442 here because if SIZE is less than the mode mask, as it is
1443 returned by the macro, it will definitely be less than the
1444 actual mode mask. */
1445 && ((GET_CODE (size
) == CONST_INT
1446 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1447 <= (GET_MODE_MASK (mode
) >> 1)))
1448 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1449 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1450 || (*pred
) (x
, BLKmode
))
1451 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1452 || (*pred
) (y
, BLKmode
))
1453 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1454 || (*pred
) (opalign
, VOIDmode
)))
1457 rtx last
= get_last_insn ();
1460 op2
= convert_to_mode (mode
, size
, 1);
1461 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1462 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1463 op2
= copy_to_mode_reg (mode
, op2
);
1465 /* ??? When called via emit_block_move_for_call, it'd be
1466 nice if there were some way to inform the backend, so
1467 that it doesn't fail the expansion because it thinks
1468 emitting the libcall would be more efficient. */
1470 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1478 delete_insns_since (last
);
1486 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1487 Return the return value from memcpy, 0 otherwise. */
1490 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1492 rtx dst_addr
, src_addr
;
1493 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1494 enum machine_mode size_mode
;
1497 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1499 It is unsafe to save the value generated by protect_from_queue and reuse
1500 it later. Consider what happens if emit_queue is called before the
1501 return value from protect_from_queue is used.
1503 Expansion of the CALL_EXPR below will call emit_queue before we are
1504 finished emitting RTL for argument setup. So if we are not careful we
1505 could get the wrong value for an argument.
1507 To avoid this problem we go ahead and emit code to copy the addresses of
1508 DST and SRC and SIZE into new pseudos. We can then place those new
1509 pseudos into an RTL_EXPR and use them later, even after a call to
1512 Note this is not strictly needed for library calls since they do not call
1513 emit_queue before loading their arguments. However, we may need to have
1514 library calls call emit_queue in the future since failing to do so could
1515 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1516 arguments in registers. */
1518 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1519 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1521 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1522 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1524 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1525 src_tree
= make_tree (ptr_type_node
, src_addr
);
1527 if (TARGET_MEM_FUNCTIONS
)
1528 size_mode
= TYPE_MODE (sizetype
);
1530 size_mode
= TYPE_MODE (unsigned_type_node
);
1532 size
= convert_to_mode (size_mode
, size
, 1);
1533 size
= copy_to_mode_reg (size_mode
, size
);
1535 /* It is incorrect to use the libcall calling conventions to call
1536 memcpy in this context. This could be a user call to memcpy and
1537 the user may wish to examine the return value from memcpy. For
1538 targets where libcalls and normal calls have different conventions
1539 for returning pointers, we could end up generating incorrect code.
1541 For convenience, we generate the call to bcopy this way as well. */
1543 if (TARGET_MEM_FUNCTIONS
)
1544 size_tree
= make_tree (sizetype
, size
);
1546 size_tree
= make_tree (unsigned_type_node
, size
);
1548 fn
= emit_block_move_libcall_fn (true);
1549 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1550 if (TARGET_MEM_FUNCTIONS
)
1552 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1553 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1557 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1558 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1561 /* Now we have to build up the CALL_EXPR itself. */
1562 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1563 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1564 call_expr
, arg_list
, NULL_TREE
);
1566 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1568 /* If we are initializing a readonly value, show the above call clobbered
1569 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1570 the delay slot scheduler might overlook conflicts and take nasty
1572 if (RTX_UNCHANGING_P (dst
))
1573 add_function_usage_to
1574 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1575 gen_rtx_CLOBBER (VOIDmode
, dst
),
1578 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
1581 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1582 for the function we use for block copies. The first time FOR_CALL
1583 is true, we call assemble_external. */
1585 static GTY(()) tree block_move_fn
;
1588 init_block_move_fn (const char *asmspec
)
1594 if (TARGET_MEM_FUNCTIONS
)
1596 fn
= get_identifier ("memcpy");
1597 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1598 const_ptr_type_node
, sizetype
,
1603 fn
= get_identifier ("bcopy");
1604 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
1605 ptr_type_node
, unsigned_type_node
,
1609 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1610 DECL_EXTERNAL (fn
) = 1;
1611 TREE_PUBLIC (fn
) = 1;
1612 DECL_ARTIFICIAL (fn
) = 1;
1613 TREE_NOTHROW (fn
) = 1;
1620 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
1621 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
1626 emit_block_move_libcall_fn (int for_call
)
1628 static bool emitted_extern
;
1631 init_block_move_fn (NULL
);
1633 if (for_call
&& !emitted_extern
)
1635 emitted_extern
= true;
1636 make_decl_rtl (block_move_fn
, NULL
);
1637 assemble_external (block_move_fn
);
1640 return block_move_fn
;
1643 /* A subroutine of emit_block_move. Copy the data via an explicit
1644 loop. This is used only when libcalls are forbidden. */
1645 /* ??? It'd be nice to copy in hunks larger than QImode. */
1648 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1649 unsigned int align ATTRIBUTE_UNUSED
)
1651 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1652 enum machine_mode iter_mode
;
1654 iter_mode
= GET_MODE (size
);
1655 if (iter_mode
== VOIDmode
)
1656 iter_mode
= word_mode
;
1658 top_label
= gen_label_rtx ();
1659 cmp_label
= gen_label_rtx ();
1660 iter
= gen_reg_rtx (iter_mode
);
1662 emit_move_insn (iter
, const0_rtx
);
1664 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1665 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1666 do_pending_stack_adjust ();
1668 emit_note (NOTE_INSN_LOOP_BEG
);
1670 emit_jump (cmp_label
);
1671 emit_label (top_label
);
1673 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1674 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1675 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1676 x
= change_address (x
, QImode
, x_addr
);
1677 y
= change_address (y
, QImode
, y_addr
);
1679 emit_move_insn (x
, y
);
1681 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1682 true, OPTAB_LIB_WIDEN
);
1684 emit_move_insn (iter
, tmp
);
1686 emit_note (NOTE_INSN_LOOP_CONT
);
1687 emit_label (cmp_label
);
1689 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1692 emit_note (NOTE_INSN_LOOP_END
);
1695 /* Copy all or part of a value X into registers starting at REGNO.
1696 The number of registers to be filled is NREGS. */
1699 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1702 #ifdef HAVE_load_multiple
1710 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1711 x
= validize_mem (force_const_mem (mode
, x
));
1713 /* See if the machine can do this with a load multiple insn. */
1714 #ifdef HAVE_load_multiple
1715 if (HAVE_load_multiple
)
1717 last
= get_last_insn ();
1718 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1726 delete_insns_since (last
);
1730 for (i
= 0; i
< nregs
; i
++)
1731 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1732 operand_subword_force (x
, i
, mode
));
1735 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1736 The number of registers to be filled is NREGS. */
1739 move_block_from_reg (int regno
, rtx x
, int nregs
)
1746 /* See if the machine can do this with a store multiple insn. */
1747 #ifdef HAVE_store_multiple
1748 if (HAVE_store_multiple
)
1750 rtx last
= get_last_insn ();
1751 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1759 delete_insns_since (last
);
1763 for (i
= 0; i
< nregs
; i
++)
1765 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1770 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1774 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1775 ORIG, where ORIG is a non-consecutive group of registers represented by
1776 a PARALLEL. The clone is identical to the original except in that the
1777 original set of registers is replaced by a new set of pseudo registers.
1778 The new set has the same modes as the original set. */
1781 gen_group_rtx (rtx orig
)
1786 if (GET_CODE (orig
) != PARALLEL
)
1789 length
= XVECLEN (orig
, 0);
1790 tmps
= alloca (sizeof (rtx
) * length
);
1792 /* Skip a NULL entry in first slot. */
1793 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1798 for (; i
< length
; i
++)
1800 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1801 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1803 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1806 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1809 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1810 where DST is non-consecutive registers represented by a PARALLEL.
1811 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1815 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1820 if (GET_CODE (dst
) != PARALLEL
)
1823 /* Check for a NULL entry, used to indicate that the parameter goes
1824 both on the stack and in registers. */
1825 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1830 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1832 /* Process the pieces. */
1833 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1835 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1836 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1837 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1840 /* Handle trailing fragments that run over the size of the struct. */
1841 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1843 /* Arrange to shift the fragment to where it belongs.
1844 extract_bit_field loads to the lsb of the reg. */
1846 #ifdef BLOCK_REG_PADDING
1847 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1848 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1853 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1854 bytelen
= ssize
- bytepos
;
1859 /* If we won't be loading directly from memory, protect the real source
1860 from strange tricks we might play; but make sure that the source can
1861 be loaded directly into the destination. */
1863 if (GET_CODE (orig_src
) != MEM
1864 && (!CONSTANT_P (orig_src
)
1865 || (GET_MODE (orig_src
) != mode
1866 && GET_MODE (orig_src
) != VOIDmode
)))
1868 if (GET_MODE (orig_src
) == VOIDmode
)
1869 src
= gen_reg_rtx (mode
);
1871 src
= gen_reg_rtx (GET_MODE (orig_src
));
1873 emit_move_insn (src
, orig_src
);
1876 /* Optimize the access just a bit. */
1877 if (GET_CODE (src
) == MEM
1878 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1879 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1880 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1881 && bytelen
== GET_MODE_SIZE (mode
))
1883 tmps
[i
] = gen_reg_rtx (mode
);
1884 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1886 else if (GET_CODE (src
) == CONCAT
)
1888 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1889 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1891 if ((bytepos
== 0 && bytelen
== slen0
)
1892 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1894 /* The following assumes that the concatenated objects all
1895 have the same size. In this case, a simple calculation
1896 can be used to determine the object and the bit field
1898 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1899 if (! CONSTANT_P (tmps
[i
])
1900 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
1901 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1902 (bytepos
% slen0
) * BITS_PER_UNIT
,
1903 1, NULL_RTX
, mode
, mode
, ssize
);
1905 else if (bytepos
== 0)
1907 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1908 emit_move_insn (mem
, src
);
1909 tmps
[i
] = adjust_address (mem
, mode
, 0);
1914 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1915 SIMD register, which is currently broken. While we get GCC
1916 to emit proper RTL for these cases, let's dump to memory. */
1917 else if (VECTOR_MODE_P (GET_MODE (dst
))
1918 && GET_CODE (src
) == REG
)
1920 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1923 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1924 emit_move_insn (mem
, src
);
1925 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1927 else if (CONSTANT_P (src
)
1928 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
1931 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1932 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1936 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1937 tmps
[i
], 0, OPTAB_WIDEN
);
1942 /* Copy the extracted pieces into the proper (probable) hard regs. */
1943 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1944 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1947 /* Emit code to move a block SRC to block DST, where SRC and DST are
1948 non-consecutive groups of registers, each represented by a PARALLEL. */
1951 emit_group_move (rtx dst
, rtx src
)
1955 if (GET_CODE (src
) != PARALLEL
1956 || GET_CODE (dst
) != PARALLEL
1957 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1960 /* Skip first entry if NULL. */
1961 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1962 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1963 XEXP (XVECEXP (src
, 0, i
), 0));
1966 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1967 where SRC is non-consecutive registers represented by a PARALLEL.
1968 SSIZE represents the total size of block ORIG_DST, or -1 if not
1972 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1977 if (GET_CODE (src
) != PARALLEL
)
1980 /* Check for a NULL entry, used to indicate that the parameter goes
1981 both on the stack and in registers. */
1982 if (XEXP (XVECEXP (src
, 0, 0), 0))
1987 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1989 /* Copy the (probable) hard regs into pseudos. */
1990 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1992 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1993 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1994 emit_move_insn (tmps
[i
], reg
);
1998 /* If we won't be storing directly into memory, protect the real destination
1999 from strange tricks we might play. */
2001 if (GET_CODE (dst
) == PARALLEL
)
2005 /* We can get a PARALLEL dst if there is a conditional expression in
2006 a return statement. In that case, the dst and src are the same,
2007 so no action is necessary. */
2008 if (rtx_equal_p (dst
, src
))
2011 /* It is unclear if we can ever reach here, but we may as well handle
2012 it. Allocate a temporary, and split this into a store/load to/from
2015 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2016 emit_group_store (temp
, src
, type
, ssize
);
2017 emit_group_load (dst
, temp
, type
, ssize
);
2020 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2022 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2023 /* Make life a bit easier for combine. */
2024 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2027 /* Process the pieces. */
2028 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2030 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2031 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2032 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2035 /* Handle trailing fragments that run over the size of the struct. */
2036 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2038 /* store_bit_field always takes its value from the lsb.
2039 Move the fragment to the lsb if it's not already there. */
2041 #ifdef BLOCK_REG_PADDING
2042 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2043 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2049 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2050 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2051 tmps
[i
], 0, OPTAB_WIDEN
);
2053 bytelen
= ssize
- bytepos
;
2056 if (GET_CODE (dst
) == CONCAT
)
2058 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2059 dest
= XEXP (dst
, 0);
2060 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2062 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2063 dest
= XEXP (dst
, 1);
2065 else if (bytepos
== 0 && XVECLEN (src
, 0))
2067 dest
= assign_stack_temp (GET_MODE (dest
),
2068 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2069 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2078 /* Optimize the access just a bit. */
2079 if (GET_CODE (dest
) == MEM
2080 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2081 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2082 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2083 && bytelen
== GET_MODE_SIZE (mode
))
2084 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2086 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2087 mode
, tmps
[i
], ssize
);
2092 /* Copy from the pseudo into the (probable) hard reg. */
2093 if (orig_dst
!= dst
)
2094 emit_move_insn (orig_dst
, dst
);
2097 /* Generate code to copy a BLKmode object of TYPE out of a
2098 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2099 is null, a stack temporary is created. TGTBLK is returned.
2101 The purpose of this routine is to handle functions that return
2102 BLKmode structures in registers. Some machines (the PA for example)
2103 want to return all small structures in registers regardless of the
2104 structure's alignment. */
2107 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2109 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2110 rtx src
= NULL
, dst
= NULL
;
2111 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2112 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2116 tgtblk
= assign_temp (build_qualified_type (type
,
2118 | TYPE_QUAL_CONST
)),
2120 preserve_temp_slots (tgtblk
);
2123 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2124 into a new pseudo which is a full word. */
2126 if (GET_MODE (srcreg
) != BLKmode
2127 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2128 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2130 /* If the structure doesn't take up a whole number of words, see whether
2131 SRCREG is padded on the left or on the right. If it's on the left,
2132 set PADDING_CORRECTION to the number of bits to skip.
2134 In most ABIs, the structure will be returned at the least end of
2135 the register, which translates to right padding on little-endian
2136 targets and left padding on big-endian targets. The opposite
2137 holds if the structure is returned at the most significant
2138 end of the register. */
2139 if (bytes
% UNITS_PER_WORD
!= 0
2140 && (targetm
.calls
.return_in_msb (type
)
2142 : BYTES_BIG_ENDIAN
))
2144 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2146 /* Copy the structure BITSIZE bites at a time.
2148 We could probably emit more efficient code for machines which do not use
2149 strict alignment, but it doesn't seem worth the effort at the current
2151 for (bitpos
= 0, xbitpos
= padding_correction
;
2152 bitpos
< bytes
* BITS_PER_UNIT
;
2153 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2155 /* We need a new source operand each time xbitpos is on a
2156 word boundary and when xbitpos == padding_correction
2157 (the first time through). */
2158 if (xbitpos
% BITS_PER_WORD
== 0
2159 || xbitpos
== padding_correction
)
2160 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2163 /* We need a new destination operand each time bitpos is on
2165 if (bitpos
% BITS_PER_WORD
== 0)
2166 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2168 /* Use xbitpos for the source extraction (right justified) and
2169 xbitpos for the destination store (left justified). */
2170 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2171 extract_bit_field (src
, bitsize
,
2172 xbitpos
% BITS_PER_WORD
, 1,
2173 NULL_RTX
, word_mode
, word_mode
,
2181 /* Add a USE expression for REG to the (possibly empty) list pointed
2182 to by CALL_FUSAGE. REG must denote a hard register. */
2185 use_reg (rtx
*call_fusage
, rtx reg
)
2187 if (GET_CODE (reg
) != REG
2188 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2192 = gen_rtx_EXPR_LIST (VOIDmode
,
2193 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2196 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2197 starting at REGNO. All of these registers must be hard registers. */
2200 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2204 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2207 for (i
= 0; i
< nregs
; i
++)
2208 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2211 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2212 PARALLEL REGS. This is for calls that pass values in multiple
2213 non-contiguous locations. The Irix 6 ABI has examples of this. */
2216 use_group_regs (rtx
*call_fusage
, rtx regs
)
2220 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2222 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2224 /* A NULL entry means the parameter goes both on the stack and in
2225 registers. This can also be a MEM for targets that pass values
2226 partially on the stack and partially in registers. */
2227 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2228 use_reg (call_fusage
, reg
);
2233 /* Determine whether the LEN bytes generated by CONSTFUN can be
2234 stored to memory using several move instructions. CONSTFUNDATA is
2235 a pointer which will be passed as argument in every CONSTFUN call.
2236 ALIGN is maximum alignment we can assume. Return nonzero if a
2237 call to store_by_pieces should succeed. */
2240 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2241 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2242 void *constfundata
, unsigned int align
)
2244 unsigned HOST_WIDE_INT max_size
, l
;
2245 HOST_WIDE_INT offset
= 0;
2246 enum machine_mode mode
, tmode
;
2247 enum insn_code icode
;
2254 if (! STORE_BY_PIECES_P (len
, align
))
2257 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2258 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2259 align
= MOVE_MAX
* BITS_PER_UNIT
;
2261 /* We would first store what we can in the largest integer mode, then go to
2262 successively smaller modes. */
2265 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2270 max_size
= STORE_MAX_PIECES
+ 1;
2271 while (max_size
> 1)
2273 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2274 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2275 if (GET_MODE_SIZE (tmode
) < max_size
)
2278 if (mode
== VOIDmode
)
2281 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2282 if (icode
!= CODE_FOR_nothing
2283 && align
>= GET_MODE_ALIGNMENT (mode
))
2285 unsigned int size
= GET_MODE_SIZE (mode
);
2292 cst
= (*constfun
) (constfundata
, offset
, mode
);
2293 if (!LEGITIMATE_CONSTANT_P (cst
))
2303 max_size
= GET_MODE_SIZE (mode
);
2306 /* The code above should have handled everything. */
2314 /* Generate several move instructions to store LEN bytes generated by
2315 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2316 pointer which will be passed as argument in every CONSTFUN call.
2317 ALIGN is maximum alignment we can assume.
2318 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2319 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2323 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2324 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2325 void *constfundata
, unsigned int align
, int endp
)
2327 struct store_by_pieces data
;
2336 if (! STORE_BY_PIECES_P (len
, align
))
2338 to
= protect_from_queue (to
, 1);
2339 data
.constfun
= constfun
;
2340 data
.constfundata
= constfundata
;
2343 store_by_pieces_1 (&data
, align
);
2354 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2355 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2357 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2360 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2367 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2375 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2376 rtx with BLKmode). The caller must pass TO through protect_from_queue
2377 before calling. ALIGN is maximum alignment we can assume. */
2380 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2382 struct store_by_pieces data
;
2387 data
.constfun
= clear_by_pieces_1
;
2388 data
.constfundata
= NULL
;
2391 store_by_pieces_1 (&data
, align
);
2394 /* Callback routine for clear_by_pieces.
2395 Return const0_rtx unconditionally. */
2398 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2399 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2400 enum machine_mode mode ATTRIBUTE_UNUSED
)
2405 /* Subroutine of clear_by_pieces and store_by_pieces.
2406 Generate several move instructions to store LEN bytes of block TO. (A MEM
2407 rtx with BLKmode). The caller must pass TO through protect_from_queue
2408 before calling. ALIGN is maximum alignment we can assume. */
2411 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2412 unsigned int align ATTRIBUTE_UNUSED
)
2414 rtx to_addr
= XEXP (data
->to
, 0);
2415 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2416 enum machine_mode mode
= VOIDmode
, tmode
;
2417 enum insn_code icode
;
2420 data
->to_addr
= to_addr
;
2422 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2423 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2425 data
->explicit_inc_to
= 0;
2427 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2429 data
->offset
= data
->len
;
2431 /* If storing requires more than two move insns,
2432 copy addresses to registers (to make displacements shorter)
2433 and use post-increment if available. */
2434 if (!data
->autinc_to
2435 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2437 /* Determine the main mode we'll be using. */
2438 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2439 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2440 if (GET_MODE_SIZE (tmode
) < max_size
)
2443 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2445 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2446 data
->autinc_to
= 1;
2447 data
->explicit_inc_to
= -1;
2450 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2451 && ! data
->autinc_to
)
2453 data
->to_addr
= copy_addr_to_reg (to_addr
);
2454 data
->autinc_to
= 1;
2455 data
->explicit_inc_to
= 1;
2458 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2459 data
->to_addr
= copy_addr_to_reg (to_addr
);
2462 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2463 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2464 align
= MOVE_MAX
* BITS_PER_UNIT
;
2466 /* First store what we can in the largest integer mode, then go to
2467 successively smaller modes. */
2469 while (max_size
> 1)
2471 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2472 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2473 if (GET_MODE_SIZE (tmode
) < max_size
)
2476 if (mode
== VOIDmode
)
2479 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2480 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2481 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2483 max_size
= GET_MODE_SIZE (mode
);
2486 /* The code above should have handled everything. */
2491 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2492 with move instructions for mode MODE. GENFUN is the gen_... function
2493 to make a move insn for that mode. DATA has all the other info. */
2496 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2497 struct store_by_pieces
*data
)
2499 unsigned int size
= GET_MODE_SIZE (mode
);
2502 while (data
->len
>= size
)
2505 data
->offset
-= size
;
2507 if (data
->autinc_to
)
2508 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2511 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2513 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2514 emit_insn (gen_add2_insn (data
->to_addr
,
2515 GEN_INT (-(HOST_WIDE_INT
) size
)));
2517 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2518 emit_insn ((*genfun
) (to1
, cst
));
2520 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2521 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2523 if (! data
->reverse
)
2524 data
->offset
+= size
;
2530 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2531 its length in bytes. */
2534 clear_storage (rtx object
, rtx size
)
2537 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2538 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2540 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2541 just move a zero. Otherwise, do this a piece at a time. */
2542 if (GET_MODE (object
) != BLKmode
2543 && GET_CODE (size
) == CONST_INT
2544 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2545 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2548 object
= protect_from_queue (object
, 1);
2549 size
= protect_from_queue (size
, 0);
2551 if (size
== const0_rtx
)
2553 else if (GET_CODE (size
) == CONST_INT
2554 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2555 clear_by_pieces (object
, INTVAL (size
), align
);
2556 else if (clear_storage_via_clrstr (object
, size
, align
))
2559 retval
= clear_storage_via_libcall (object
, size
);
2565 /* A subroutine of clear_storage. Expand a clrstr pattern;
2566 return true if successful. */
2569 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2571 /* Try the most limited insn first, because there's no point
2572 including more than one in the machine description unless
2573 the more limited one has some advantage. */
2575 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2576 enum machine_mode mode
;
2578 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2579 mode
= GET_MODE_WIDER_MODE (mode
))
2581 enum insn_code code
= clrstr_optab
[(int) mode
];
2582 insn_operand_predicate_fn pred
;
2584 if (code
!= CODE_FOR_nothing
2585 /* We don't need MODE to be narrower than
2586 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2587 the mode mask, as it is returned by the macro, it will
2588 definitely be less than the actual mode mask. */
2589 && ((GET_CODE (size
) == CONST_INT
2590 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2591 <= (GET_MODE_MASK (mode
) >> 1)))
2592 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2593 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2594 || (*pred
) (object
, BLKmode
))
2595 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2596 || (*pred
) (opalign
, VOIDmode
)))
2599 rtx last
= get_last_insn ();
2602 op1
= convert_to_mode (mode
, size
, 1);
2603 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2604 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2605 op1
= copy_to_mode_reg (mode
, op1
);
2607 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2614 delete_insns_since (last
);
2621 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2622 Return the return value of memset, 0 otherwise. */
2625 clear_storage_via_libcall (rtx object
, rtx size
)
2627 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2628 enum machine_mode size_mode
;
2631 /* OBJECT or SIZE may have been passed through protect_from_queue.
2633 It is unsafe to save the value generated by protect_from_queue
2634 and reuse it later. Consider what happens if emit_queue is
2635 called before the return value from protect_from_queue is used.
2637 Expansion of the CALL_EXPR below will call emit_queue before
2638 we are finished emitting RTL for argument setup. So if we are
2639 not careful we could get the wrong value for an argument.
2641 To avoid this problem we go ahead and emit code to copy OBJECT
2642 and SIZE into new pseudos. We can then place those new pseudos
2643 into an RTL_EXPR and use them later, even after a call to
2646 Note this is not strictly needed for library calls since they
2647 do not call emit_queue before loading their arguments. However,
2648 we may need to have library calls call emit_queue in the future
2649 since failing to do so could cause problems for targets which
2650 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2652 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2654 if (TARGET_MEM_FUNCTIONS
)
2655 size_mode
= TYPE_MODE (sizetype
);
2657 size_mode
= TYPE_MODE (unsigned_type_node
);
2658 size
= convert_to_mode (size_mode
, size
, 1);
2659 size
= copy_to_mode_reg (size_mode
, size
);
2661 /* It is incorrect to use the libcall calling conventions to call
2662 memset in this context. This could be a user call to memset and
2663 the user may wish to examine the return value from memset. For
2664 targets where libcalls and normal calls have different conventions
2665 for returning pointers, we could end up generating incorrect code.
2667 For convenience, we generate the call to bzero this way as well. */
2669 object_tree
= make_tree (ptr_type_node
, object
);
2670 if (TARGET_MEM_FUNCTIONS
)
2671 size_tree
= make_tree (sizetype
, size
);
2673 size_tree
= make_tree (unsigned_type_node
, size
);
2675 fn
= clear_storage_libcall_fn (true);
2676 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2677 if (TARGET_MEM_FUNCTIONS
)
2678 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2679 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2681 /* Now we have to build up the CALL_EXPR itself. */
2682 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2683 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2684 call_expr
, arg_list
, NULL_TREE
);
2686 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2688 /* If we are initializing a readonly value, show the above call
2689 clobbered it. Otherwise, a load from it may erroneously be
2690 hoisted from a loop. */
2691 if (RTX_UNCHANGING_P (object
))
2692 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2694 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
2697 /* A subroutine of clear_storage_via_libcall. Create the tree node
2698 for the function we use for block clears. The first time FOR_CALL
2699 is true, we call assemble_external. */
2701 static GTY(()) tree block_clear_fn
;
2704 init_block_clear_fn (const char *asmspec
)
2706 if (!block_clear_fn
)
2710 if (TARGET_MEM_FUNCTIONS
)
2712 fn
= get_identifier ("memset");
2713 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2714 integer_type_node
, sizetype
,
2719 fn
= get_identifier ("bzero");
2720 args
= build_function_type_list (void_type_node
, ptr_type_node
,
2721 unsigned_type_node
, NULL_TREE
);
2724 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2725 DECL_EXTERNAL (fn
) = 1;
2726 TREE_PUBLIC (fn
) = 1;
2727 DECL_ARTIFICIAL (fn
) = 1;
2728 TREE_NOTHROW (fn
) = 1;
2730 block_clear_fn
= fn
;
2735 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
2736 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
2741 clear_storage_libcall_fn (int for_call
)
2743 static bool emitted_extern
;
2745 if (!block_clear_fn
)
2746 init_block_clear_fn (NULL
);
2748 if (for_call
&& !emitted_extern
)
2750 emitted_extern
= true;
2751 make_decl_rtl (block_clear_fn
, NULL
);
2752 assemble_external (block_clear_fn
);
2755 return block_clear_fn
;
2758 /* Generate code to copy Y into X.
2759 Both Y and X must have the same mode, except that
2760 Y can be a constant with VOIDmode.
2761 This mode cannot be BLKmode; use emit_block_move for that.
2763 Return the last instruction emitted. */
2766 emit_move_insn (rtx x
, rtx y
)
2768 enum machine_mode mode
= GET_MODE (x
);
2769 rtx y_cst
= NULL_RTX
;
2772 x
= protect_from_queue (x
, 1);
2773 y
= protect_from_queue (y
, 0);
2775 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2778 /* Never force constant_p_rtx to memory. */
2779 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2781 else if (CONSTANT_P (y
))
2784 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2785 && (last_insn
= compress_float_constant (x
, y
)))
2790 if (!LEGITIMATE_CONSTANT_P (y
))
2792 y
= force_const_mem (mode
, y
);
2794 /* If the target's cannot_force_const_mem prevented the spill,
2795 assume that the target's move expanders will also take care
2796 of the non-legitimate constant. */
2802 /* If X or Y are memory references, verify that their addresses are valid
2804 if (GET_CODE (x
) == MEM
2805 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2806 && ! push_operand (x
, GET_MODE (x
)))
2808 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2809 x
= validize_mem (x
);
2811 if (GET_CODE (y
) == MEM
2812 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2814 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2815 y
= validize_mem (y
);
2817 if (mode
== BLKmode
)
2820 last_insn
= emit_move_insn_1 (x
, y
);
2822 if (y_cst
&& GET_CODE (x
) == REG
2823 && (set
= single_set (last_insn
)) != NULL_RTX
2824 && SET_DEST (set
) == x
2825 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2826 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2831 /* Low level part of emit_move_insn.
2832 Called just like emit_move_insn, but assumes X and Y
2833 are basically valid. */
2836 emit_move_insn_1 (rtx x
, rtx y
)
2838 enum machine_mode mode
= GET_MODE (x
);
2839 enum machine_mode submode
;
2840 enum mode_class
class = GET_MODE_CLASS (mode
);
2842 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2845 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2847 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2849 /* Expand complex moves by moving real part and imag part, if possible. */
2850 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2851 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2852 && (mov_optab
->handlers
[(int) submode
].insn_code
2853 != CODE_FOR_nothing
))
2855 /* Don't split destination if it is a stack push. */
2856 int stack
= push_operand (x
, GET_MODE (x
));
2858 #ifdef PUSH_ROUNDING
2859 /* In case we output to the stack, but the size is smaller than the
2860 machine can push exactly, we need to use move instructions. */
2862 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2863 != GET_MODE_SIZE (submode
)))
2866 HOST_WIDE_INT offset1
, offset2
;
2868 /* Do not use anti_adjust_stack, since we don't want to update
2869 stack_pointer_delta. */
2870 temp
= expand_binop (Pmode
,
2871 #ifdef STACK_GROWS_DOWNWARD
2879 (GET_MODE_SIZE (GET_MODE (x
)))),
2880 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2882 if (temp
!= stack_pointer_rtx
)
2883 emit_move_insn (stack_pointer_rtx
, temp
);
2885 #ifdef STACK_GROWS_DOWNWARD
2887 offset2
= GET_MODE_SIZE (submode
);
2889 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2890 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2891 + GET_MODE_SIZE (submode
));
2894 emit_move_insn (change_address (x
, submode
,
2895 gen_rtx_PLUS (Pmode
,
2897 GEN_INT (offset1
))),
2898 gen_realpart (submode
, y
));
2899 emit_move_insn (change_address (x
, submode
,
2900 gen_rtx_PLUS (Pmode
,
2902 GEN_INT (offset2
))),
2903 gen_imagpart (submode
, y
));
2907 /* If this is a stack, push the highpart first, so it
2908 will be in the argument order.
2910 In that case, change_address is used only to convert
2911 the mode, not to change the address. */
2914 /* Note that the real part always precedes the imag part in memory
2915 regardless of machine's endianness. */
2916 #ifdef STACK_GROWS_DOWNWARD
2917 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2918 gen_imagpart (submode
, y
));
2919 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2920 gen_realpart (submode
, y
));
2922 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2923 gen_realpart (submode
, y
));
2924 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2925 gen_imagpart (submode
, y
));
2930 rtx realpart_x
, realpart_y
;
2931 rtx imagpart_x
, imagpart_y
;
2933 /* If this is a complex value with each part being smaller than a
2934 word, the usual calling sequence will likely pack the pieces into
2935 a single register. Unfortunately, SUBREG of hard registers only
2936 deals in terms of words, so we have a problem converting input
2937 arguments to the CONCAT of two registers that is used elsewhere
2938 for complex values. If this is before reload, we can copy it into
2939 memory and reload. FIXME, we should see about using extract and
2940 insert on integer registers, but complex short and complex char
2941 variables should be rarely used. */
2942 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2943 && (reload_in_progress
| reload_completed
) == 0)
2946 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2948 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2950 if (packed_dest_p
|| packed_src_p
)
2952 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2953 ? MODE_FLOAT
: MODE_INT
);
2955 enum machine_mode reg_mode
2956 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2958 if (reg_mode
!= BLKmode
)
2960 rtx mem
= assign_stack_temp (reg_mode
,
2961 GET_MODE_SIZE (mode
), 0);
2962 rtx cmem
= adjust_address (mem
, mode
, 0);
2965 = N_("function using short complex types cannot be inline");
2969 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2971 emit_move_insn_1 (cmem
, y
);
2972 return emit_move_insn_1 (sreg
, mem
);
2976 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2978 emit_move_insn_1 (mem
, sreg
);
2979 return emit_move_insn_1 (x
, cmem
);
2985 realpart_x
= gen_realpart (submode
, x
);
2986 realpart_y
= gen_realpart (submode
, y
);
2987 imagpart_x
= gen_imagpart (submode
, x
);
2988 imagpart_y
= gen_imagpart (submode
, y
);
2990 /* Show the output dies here. This is necessary for SUBREGs
2991 of pseudos since we cannot track their lifetimes correctly;
2992 hard regs shouldn't appear here except as return values.
2993 We never want to emit such a clobber after reload. */
2995 && ! (reload_in_progress
|| reload_completed
)
2996 && (GET_CODE (realpart_x
) == SUBREG
2997 || GET_CODE (imagpart_x
) == SUBREG
))
2998 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3000 emit_move_insn (realpart_x
, realpart_y
);
3001 emit_move_insn (imagpart_x
, imagpart_y
);
3004 return get_last_insn ();
3007 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3008 find a mode to do it in. If we have a movcc, use it. Otherwise,
3009 find the MODE_INT mode of the same width. */
3010 else if (GET_MODE_CLASS (mode
) == MODE_CC
3011 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3013 enum insn_code insn_code
;
3014 enum machine_mode tmode
= VOIDmode
;
3018 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3021 for (tmode
= QImode
; tmode
!= VOIDmode
;
3022 tmode
= GET_MODE_WIDER_MODE (tmode
))
3023 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3026 if (tmode
== VOIDmode
)
3029 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3030 may call change_address which is not appropriate if we were
3031 called when a reload was in progress. We don't have to worry
3032 about changing the address since the size in bytes is supposed to
3033 be the same. Copy the MEM to change the mode and move any
3034 substitutions from the old MEM to the new one. */
3036 if (reload_in_progress
)
3038 x
= gen_lowpart_common (tmode
, x1
);
3039 if (x
== 0 && GET_CODE (x1
) == MEM
)
3041 x
= adjust_address_nv (x1
, tmode
, 0);
3042 copy_replacements (x1
, x
);
3045 y
= gen_lowpart_common (tmode
, y1
);
3046 if (y
== 0 && GET_CODE (y1
) == MEM
)
3048 y
= adjust_address_nv (y1
, tmode
, 0);
3049 copy_replacements (y1
, y
);
3054 x
= gen_lowpart (tmode
, x
);
3055 y
= gen_lowpart (tmode
, y
);
3058 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3059 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3062 /* Try using a move pattern for the corresponding integer mode. This is
3063 only safe when simplify_subreg can convert MODE constants into integer
3064 constants. At present, it can only do this reliably if the value
3065 fits within a HOST_WIDE_INT. */
3066 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3067 && (submode
= int_mode_for_mode (mode
)) != BLKmode
3068 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
3069 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
3070 (simplify_gen_subreg (submode
, x
, mode
, 0),
3071 simplify_gen_subreg (submode
, y
, mode
, 0)));
3073 /* This will handle any multi-word or full-word mode that lacks a move_insn
3074 pattern. However, you will get better code if you define such patterns,
3075 even if they must turn into multiple assembler instructions. */
3076 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3083 #ifdef PUSH_ROUNDING
3085 /* If X is a push on the stack, do the push now and replace
3086 X with a reference to the stack pointer. */
3087 if (push_operand (x
, GET_MODE (x
)))
3092 /* Do not use anti_adjust_stack, since we don't want to update
3093 stack_pointer_delta. */
3094 temp
= expand_binop (Pmode
,
3095 #ifdef STACK_GROWS_DOWNWARD
3103 (GET_MODE_SIZE (GET_MODE (x
)))),
3104 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3106 if (temp
!= stack_pointer_rtx
)
3107 emit_move_insn (stack_pointer_rtx
, temp
);
3109 code
= GET_CODE (XEXP (x
, 0));
3111 /* Just hope that small offsets off SP are OK. */
3112 if (code
== POST_INC
)
3113 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3114 GEN_INT (-((HOST_WIDE_INT
)
3115 GET_MODE_SIZE (GET_MODE (x
)))));
3116 else if (code
== POST_DEC
)
3117 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3118 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3120 temp
= stack_pointer_rtx
;
3122 x
= change_address (x
, VOIDmode
, temp
);
3126 /* If we are in reload, see if either operand is a MEM whose address
3127 is scheduled for replacement. */
3128 if (reload_in_progress
&& GET_CODE (x
) == MEM
3129 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3130 x
= replace_equiv_address_nv (x
, inner
);
3131 if (reload_in_progress
&& GET_CODE (y
) == MEM
3132 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3133 y
= replace_equiv_address_nv (y
, inner
);
3139 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3142 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3143 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3145 /* If we can't get a part of Y, put Y into memory if it is a
3146 constant. Otherwise, force it into a register. If we still
3147 can't get a part of Y, abort. */
3148 if (ypart
== 0 && CONSTANT_P (y
))
3150 y
= force_const_mem (mode
, y
);
3151 ypart
= operand_subword (y
, i
, 1, mode
);
3153 else if (ypart
== 0)
3154 ypart
= operand_subword_force (y
, i
, mode
);
3156 if (xpart
== 0 || ypart
== 0)
3159 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3161 last_insn
= emit_move_insn (xpart
, ypart
);
3167 /* Show the output dies here. This is necessary for SUBREGs
3168 of pseudos since we cannot track their lifetimes correctly;
3169 hard regs shouldn't appear here except as return values.
3170 We never want to emit such a clobber after reload. */
3172 && ! (reload_in_progress
|| reload_completed
)
3173 && need_clobber
!= 0)
3174 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3184 /* If Y is representable exactly in a narrower mode, and the target can
3185 perform the extension directly from constant or memory, then emit the
3186 move as an extension. */
3189 compress_float_constant (rtx x
, rtx y
)
3191 enum machine_mode dstmode
= GET_MODE (x
);
3192 enum machine_mode orig_srcmode
= GET_MODE (y
);
3193 enum machine_mode srcmode
;
3196 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3198 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3199 srcmode
!= orig_srcmode
;
3200 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3203 rtx trunc_y
, last_insn
;
3205 /* Skip if the target can't extend this way. */
3206 ic
= can_extend_p (dstmode
, srcmode
, 0);
3207 if (ic
== CODE_FOR_nothing
)
3210 /* Skip if the narrowed value isn't exact. */
3211 if (! exact_real_truncate (srcmode
, &r
))
3214 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3216 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3218 /* Skip if the target needs extra instructions to perform
3220 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3223 else if (float_extend_from_mem
[dstmode
][srcmode
])
3224 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3228 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3229 last_insn
= get_last_insn ();
3231 if (GET_CODE (x
) == REG
)
3232 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3240 /* Pushing data onto the stack. */
3242 /* Push a block of length SIZE (perhaps variable)
3243 and return an rtx to address the beginning of the block.
3244 Note that it is not possible for the value returned to be a QUEUED.
3245 The value may be virtual_outgoing_args_rtx.
3247 EXTRA is the number of bytes of padding to push in addition to SIZE.
3248 BELOW nonzero means this padding comes at low addresses;
3249 otherwise, the padding comes at high addresses. */
3252 push_block (rtx size
, int extra
, int below
)
3256 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3257 if (CONSTANT_P (size
))
3258 anti_adjust_stack (plus_constant (size
, extra
));
3259 else if (GET_CODE (size
) == REG
&& extra
== 0)
3260 anti_adjust_stack (size
);
3263 temp
= copy_to_mode_reg (Pmode
, size
);
3265 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3266 temp
, 0, OPTAB_LIB_WIDEN
);
3267 anti_adjust_stack (temp
);
3270 #ifndef STACK_GROWS_DOWNWARD
3276 temp
= virtual_outgoing_args_rtx
;
3277 if (extra
!= 0 && below
)
3278 temp
= plus_constant (temp
, extra
);
3282 if (GET_CODE (size
) == CONST_INT
)
3283 temp
= plus_constant (virtual_outgoing_args_rtx
,
3284 -INTVAL (size
) - (below
? 0 : extra
));
3285 else if (extra
!= 0 && !below
)
3286 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3287 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3289 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3290 negate_rtx (Pmode
, size
));
3293 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3296 #ifdef PUSH_ROUNDING
3298 /* Emit single push insn. */
3301 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3304 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3306 enum insn_code icode
;
3307 insn_operand_predicate_fn pred
;
3309 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3310 /* If there is push pattern, use it. Otherwise try old way of throwing
3311 MEM representing push operation to move expander. */
3312 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3313 if (icode
!= CODE_FOR_nothing
)
3315 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3316 && !((*pred
) (x
, mode
))))
3317 x
= force_reg (mode
, x
);
3318 emit_insn (GEN_FCN (icode
) (x
));
3321 if (GET_MODE_SIZE (mode
) == rounded_size
)
3322 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3323 /* If we are to pad downward, adjust the stack pointer first and
3324 then store X into the stack location using an offset. This is
3325 because emit_move_insn does not know how to pad; it does not have
3327 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3329 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3330 HOST_WIDE_INT offset
;
3332 emit_move_insn (stack_pointer_rtx
,
3333 expand_binop (Pmode
,
3334 #ifdef STACK_GROWS_DOWNWARD
3340 GEN_INT (rounded_size
),
3341 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3343 offset
= (HOST_WIDE_INT
) padding_size
;
3344 #ifdef STACK_GROWS_DOWNWARD
3345 if (STACK_PUSH_CODE
== POST_DEC
)
3346 /* We have already decremented the stack pointer, so get the
3348 offset
+= (HOST_WIDE_INT
) rounded_size
;
3350 if (STACK_PUSH_CODE
== POST_INC
)
3351 /* We have already incremented the stack pointer, so get the
3353 offset
-= (HOST_WIDE_INT
) rounded_size
;
3355 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3359 #ifdef STACK_GROWS_DOWNWARD
3360 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3361 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3362 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3364 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3365 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3366 GEN_INT (rounded_size
));
3368 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3371 dest
= gen_rtx_MEM (mode
, dest_addr
);
3375 set_mem_attributes (dest
, type
, 1);
3377 if (flag_optimize_sibling_calls
)
3378 /* Function incoming arguments may overlap with sibling call
3379 outgoing arguments and we cannot allow reordering of reads
3380 from function arguments with stores to outgoing arguments
3381 of sibling calls. */
3382 set_mem_alias_set (dest
, 0);
3384 emit_move_insn (dest
, x
);
3388 /* Generate code to push X onto the stack, assuming it has mode MODE and
3390 MODE is redundant except when X is a CONST_INT (since they don't
3392 SIZE is an rtx for the size of data to be copied (in bytes),
3393 needed only if X is BLKmode.
3395 ALIGN (in bits) is maximum alignment we can assume.
3397 If PARTIAL and REG are both nonzero, then copy that many of the first
3398 words of X into registers starting with REG, and push the rest of X.
3399 The amount of space pushed is decreased by PARTIAL words,
3400 rounded *down* to a multiple of PARM_BOUNDARY.
3401 REG must be a hard register in this case.
3402 If REG is zero but PARTIAL is not, take any all others actions for an
3403 argument partially in registers, but do not actually load any
3406 EXTRA is the amount in bytes of extra space to leave next to this arg.
3407 This is ignored if an argument block has already been allocated.
3409 On a machine that lacks real push insns, ARGS_ADDR is the address of
3410 the bottom of the argument block for this call. We use indexing off there
3411 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3412 argument block has not been preallocated.
3414 ARGS_SO_FAR is the size of args previously pushed for this call.
3416 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3417 for arguments passed in registers. If nonzero, it will be the number
3418 of bytes required. */
3421 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3422 unsigned int align
, int partial
, rtx reg
, int extra
,
3423 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3427 enum direction stack_direction
3428 #ifdef STACK_GROWS_DOWNWARD
3434 /* Decide where to pad the argument: `downward' for below,
3435 `upward' for above, or `none' for don't pad it.
3436 Default is below for small data on big-endian machines; else above. */
3437 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3439 /* Invert direction if stack is post-decrement.
3441 if (STACK_PUSH_CODE
== POST_DEC
)
3442 if (where_pad
!= none
)
3443 where_pad
= (where_pad
== downward
? upward
: downward
);
3445 xinner
= x
= protect_from_queue (x
, 0);
3447 if (mode
== BLKmode
)
3449 /* Copy a block into the stack, entirely or partially. */
3452 int used
= partial
* UNITS_PER_WORD
;
3456 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3458 /* Use the size of the elt to compute offset. */
3459 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3460 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3461 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3464 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3471 /* USED is now the # of bytes we need not copy to the stack
3472 because registers will take care of them. */
3475 xinner
= adjust_address (xinner
, BLKmode
, used
);
3477 /* If the partial register-part of the arg counts in its stack size,
3478 skip the part of stack space corresponding to the registers.
3479 Otherwise, start copying to the beginning of the stack space,
3480 by setting SKIP to 0. */
3481 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3483 #ifdef PUSH_ROUNDING
3484 /* Do it with several push insns if that doesn't take lots of insns
3485 and if there is no difficulty with push insns that skip bytes
3486 on the stack for alignment purposes. */
3489 && GET_CODE (size
) == CONST_INT
3491 && MEM_ALIGN (xinner
) >= align
3492 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3493 /* Here we avoid the case of a structure whose weak alignment
3494 forces many pushes of a small amount of data,
3495 and such small pushes do rounding that causes trouble. */
3496 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3497 || align
>= BIGGEST_ALIGNMENT
3498 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3499 == (align
/ BITS_PER_UNIT
)))
3500 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3502 /* Push padding now if padding above and stack grows down,
3503 or if padding below and stack grows up.
3504 But if space already allocated, this has already been done. */
3505 if (extra
&& args_addr
== 0
3506 && where_pad
!= none
&& where_pad
!= stack_direction
)
3507 anti_adjust_stack (GEN_INT (extra
));
3509 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3512 #endif /* PUSH_ROUNDING */
3516 /* Otherwise make space on the stack and copy the data
3517 to the address of that space. */
3519 /* Deduct words put into registers from the size we must copy. */
3522 if (GET_CODE (size
) == CONST_INT
)
3523 size
= GEN_INT (INTVAL (size
) - used
);
3525 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3526 GEN_INT (used
), NULL_RTX
, 0,
3530 /* Get the address of the stack space.
3531 In this case, we do not deal with EXTRA separately.
3532 A single stack adjust will do. */
3535 temp
= push_block (size
, extra
, where_pad
== downward
);
3538 else if (GET_CODE (args_so_far
) == CONST_INT
)
3539 temp
= memory_address (BLKmode
,
3540 plus_constant (args_addr
,
3541 skip
+ INTVAL (args_so_far
)));
3543 temp
= memory_address (BLKmode
,
3544 plus_constant (gen_rtx_PLUS (Pmode
,
3549 if (!ACCUMULATE_OUTGOING_ARGS
)
3551 /* If the source is referenced relative to the stack pointer,
3552 copy it to another register to stabilize it. We do not need
3553 to do this if we know that we won't be changing sp. */
3555 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3556 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3557 temp
= copy_to_reg (temp
);
3560 target
= gen_rtx_MEM (BLKmode
, temp
);
3564 set_mem_attributes (target
, type
, 1);
3565 /* Function incoming arguments may overlap with sibling call
3566 outgoing arguments and we cannot allow reordering of reads
3567 from function arguments with stores to outgoing arguments
3568 of sibling calls. */
3569 set_mem_alias_set (target
, 0);
3572 /* ALIGN may well be better aligned than TYPE, e.g. due to
3573 PARM_BOUNDARY. Assume the caller isn't lying. */
3574 set_mem_align (target
, align
);
3576 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3579 else if (partial
> 0)
3581 /* Scalar partly in registers. */
3583 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3586 /* # words of start of argument
3587 that we must make space for but need not store. */
3588 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3589 int args_offset
= INTVAL (args_so_far
);
3592 /* Push padding now if padding above and stack grows down,
3593 or if padding below and stack grows up.
3594 But if space already allocated, this has already been done. */
3595 if (extra
&& args_addr
== 0
3596 && where_pad
!= none
&& where_pad
!= stack_direction
)
3597 anti_adjust_stack (GEN_INT (extra
));
3599 /* If we make space by pushing it, we might as well push
3600 the real data. Otherwise, we can leave OFFSET nonzero
3601 and leave the space uninitialized. */
3605 /* Now NOT_STACK gets the number of words that we don't need to
3606 allocate on the stack. */
3607 not_stack
= partial
- offset
;
3609 /* If the partial register-part of the arg counts in its stack size,
3610 skip the part of stack space corresponding to the registers.
3611 Otherwise, start copying to the beginning of the stack space,
3612 by setting SKIP to 0. */
3613 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3615 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3616 x
= validize_mem (force_const_mem (mode
, x
));
3618 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3619 SUBREGs of such registers are not allowed. */
3620 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3621 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3622 x
= copy_to_reg (x
);
3624 /* Loop over all the words allocated on the stack for this arg. */
3625 /* We can do it by words, because any scalar bigger than a word
3626 has a size a multiple of a word. */
3627 #ifndef PUSH_ARGS_REVERSED
3628 for (i
= not_stack
; i
< size
; i
++)
3630 for (i
= size
- 1; i
>= not_stack
; i
--)
3632 if (i
>= not_stack
+ offset
)
3633 emit_push_insn (operand_subword_force (x
, i
, mode
),
3634 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3636 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3638 reg_parm_stack_space
, alignment_pad
);
3645 /* Push padding now if padding above and stack grows down,
3646 or if padding below and stack grows up.
3647 But if space already allocated, this has already been done. */
3648 if (extra
&& args_addr
== 0
3649 && where_pad
!= none
&& where_pad
!= stack_direction
)
3650 anti_adjust_stack (GEN_INT (extra
));
3652 #ifdef PUSH_ROUNDING
3653 if (args_addr
== 0 && PUSH_ARGS
)
3654 emit_single_push_insn (mode
, x
, type
);
3658 if (GET_CODE (args_so_far
) == CONST_INT
)
3660 = memory_address (mode
,
3661 plus_constant (args_addr
,
3662 INTVAL (args_so_far
)));
3664 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3666 dest
= gen_rtx_MEM (mode
, addr
);
3669 set_mem_attributes (dest
, type
, 1);
3670 /* Function incoming arguments may overlap with sibling call
3671 outgoing arguments and we cannot allow reordering of reads
3672 from function arguments with stores to outgoing arguments
3673 of sibling calls. */
3674 set_mem_alias_set (dest
, 0);
3677 emit_move_insn (dest
, x
);
3681 /* If part should go in registers, copy that part
3682 into the appropriate registers. Do this now, at the end,
3683 since mem-to-mem copies above may do function calls. */
3684 if (partial
> 0 && reg
!= 0)
3686 /* Handle calls that pass values in multiple non-contiguous locations.
3687 The Irix 6 ABI has examples of this. */
3688 if (GET_CODE (reg
) == PARALLEL
)
3689 emit_group_load (reg
, x
, type
, -1);
3691 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3694 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3695 anti_adjust_stack (GEN_INT (extra
));
3697 if (alignment_pad
&& args_addr
== 0)
3698 anti_adjust_stack (alignment_pad
);
3701 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3705 get_subtarget (rtx x
)
3708 /* Only registers can be subtargets. */
3709 || GET_CODE (x
) != REG
3710 /* If the register is readonly, it can't be set more than once. */
3711 || RTX_UNCHANGING_P (x
)
3712 /* Don't use hard regs to avoid extending their life. */
3713 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3714 /* Avoid subtargets inside loops,
3715 since they hide some invariant expressions. */
3716 || preserve_subexpressions_p ())
3720 /* Expand an assignment that stores the value of FROM into TO.
3721 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3722 (This may contain a QUEUED rtx;
3723 if the value is constant, this rtx is a constant.)
3724 Otherwise, the returned value is NULL_RTX. */
3727 expand_assignment (tree to
, tree from
, int want_value
)
3732 /* Don't crash if the lhs of the assignment was erroneous. */
3734 if (TREE_CODE (to
) == ERROR_MARK
)
3736 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3737 return want_value
? result
: NULL_RTX
;
3740 /* Assignment of a structure component needs special treatment
3741 if the structure component's rtx is not simply a MEM.
3742 Assignment of an array element at a constant index, and assignment of
3743 an array element in an unaligned packed structure field, has the same
3746 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3747 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3748 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3750 enum machine_mode mode1
;
3751 HOST_WIDE_INT bitsize
, bitpos
;
3759 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3760 &unsignedp
, &volatilep
);
3762 /* If we are going to use store_bit_field and extract_bit_field,
3763 make sure to_rtx will be safe for multiple use. */
3765 if (mode1
== VOIDmode
&& want_value
)
3766 tem
= stabilize_reference (tem
);
3768 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3772 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3774 if (GET_CODE (to_rtx
) != MEM
)
3777 #ifdef POINTERS_EXTEND_UNSIGNED
3778 if (GET_MODE (offset_rtx
) != Pmode
)
3779 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3781 if (GET_MODE (offset_rtx
) != ptr_mode
)
3782 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3785 /* A constant address in TO_RTX can have VOIDmode, we must not try
3786 to call force_reg for that case. Avoid that case. */
3787 if (GET_CODE (to_rtx
) == MEM
3788 && GET_MODE (to_rtx
) == BLKmode
3789 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3791 && (bitpos
% bitsize
) == 0
3792 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3793 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3795 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3799 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3800 highest_pow2_factor_for_type (TREE_TYPE (to
),
3804 if (GET_CODE (to_rtx
) == MEM
)
3806 /* If the field is at offset zero, we could have been given the
3807 DECL_RTX of the parent struct. Don't munge it. */
3808 to_rtx
= shallow_copy_rtx (to_rtx
);
3810 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3813 /* Deal with volatile and readonly fields. The former is only done
3814 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3815 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3817 if (to_rtx
== orig_to_rtx
)
3818 to_rtx
= copy_rtx (to_rtx
);
3819 MEM_VOLATILE_P (to_rtx
) = 1;
3822 if (TREE_CODE (to
) == COMPONENT_REF
3823 && TREE_READONLY (TREE_OPERAND (to
, 1))
3824 /* We can't assert that a MEM won't be set more than once
3825 if the component is not addressable because another
3826 non-addressable component may be referenced by the same MEM. */
3827 && ! (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
)))
3829 if (to_rtx
== orig_to_rtx
)
3830 to_rtx
= copy_rtx (to_rtx
);
3831 RTX_UNCHANGING_P (to_rtx
) = 1;
3834 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3836 if (to_rtx
== orig_to_rtx
)
3837 to_rtx
= copy_rtx (to_rtx
);
3838 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3841 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3843 /* Spurious cast for HPUX compiler. */
3844 ? ((enum machine_mode
)
3845 TYPE_MODE (TREE_TYPE (to
)))
3847 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3849 preserve_temp_slots (result
);
3853 /* If the value is meaningful, convert RESULT to the proper mode.
3854 Otherwise, return nothing. */
3855 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3856 TYPE_MODE (TREE_TYPE (from
)),
3858 TREE_UNSIGNED (TREE_TYPE (to
)))
3862 /* If the rhs is a function call and its value is not an aggregate,
3863 call the function before we start to compute the lhs.
3864 This is needed for correct code for cases such as
3865 val = setjmp (buf) on machines where reference to val
3866 requires loading up part of an address in a separate insn.
3868 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3869 since it might be a promoted variable where the zero- or sign- extension
3870 needs to be done. Handling this in the normal way is safe because no
3871 computation is done before the call. */
3872 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3874 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3875 && GET_CODE (DECL_RTL (to
)) == REG
))
3880 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3882 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3884 /* Handle calls that return values in multiple non-contiguous locations.
3885 The Irix 6 ABI has examples of this. */
3886 if (GET_CODE (to_rtx
) == PARALLEL
)
3887 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3888 int_size_in_bytes (TREE_TYPE (from
)));
3889 else if (GET_MODE (to_rtx
) == BLKmode
)
3890 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3893 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3894 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3895 emit_move_insn (to_rtx
, value
);
3897 preserve_temp_slots (to_rtx
);
3900 return want_value
? to_rtx
: NULL_RTX
;
3903 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3904 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3907 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3909 /* Don't move directly into a return register. */
3910 if (TREE_CODE (to
) == RESULT_DECL
3911 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3916 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3918 if (GET_CODE (to_rtx
) == PARALLEL
)
3919 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3920 int_size_in_bytes (TREE_TYPE (from
)));
3922 emit_move_insn (to_rtx
, temp
);
3924 preserve_temp_slots (to_rtx
);
3927 return want_value
? to_rtx
: NULL_RTX
;
3930 /* In case we are returning the contents of an object which overlaps
3931 the place the value is being stored, use a safe function when copying
3932 a value through a pointer into a structure value return block. */
3933 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3934 && current_function_returns_struct
3935 && !current_function_returns_pcc_struct
)
3940 size
= expr_size (from
);
3941 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3943 if (TARGET_MEM_FUNCTIONS
)
3944 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3945 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3946 XEXP (from_rtx
, 0), Pmode
,
3947 convert_to_mode (TYPE_MODE (sizetype
),
3948 size
, TREE_UNSIGNED (sizetype
)),
3949 TYPE_MODE (sizetype
));
3951 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3952 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3953 XEXP (to_rtx
, 0), Pmode
,
3954 convert_to_mode (TYPE_MODE (integer_type_node
),
3956 TREE_UNSIGNED (integer_type_node
)),
3957 TYPE_MODE (integer_type_node
));
3959 preserve_temp_slots (to_rtx
);
3962 return want_value
? to_rtx
: NULL_RTX
;
3965 /* Compute FROM and store the value in the rtx we got. */
3968 result
= store_expr (from
, to_rtx
, want_value
);
3969 preserve_temp_slots (result
);
3972 return want_value
? result
: NULL_RTX
;
3975 /* Generate code for computing expression EXP,
3976 and storing the value into TARGET.
3977 TARGET may contain a QUEUED rtx.
3979 If WANT_VALUE & 1 is nonzero, return a copy of the value
3980 not in TARGET, so that we can be sure to use the proper
3981 value in a containing expression even if TARGET has something
3982 else stored in it. If possible, we copy the value through a pseudo
3983 and return that pseudo. Or, if the value is constant, we try to
3984 return the constant. In some cases, we return a pseudo
3985 copied *from* TARGET.
3987 If the mode is BLKmode then we may return TARGET itself.
3988 It turns out that in BLKmode it doesn't cause a problem.
3989 because C has no operators that could combine two different
3990 assignments into the same BLKmode object with different values
3991 with no sequence point. Will other languages need this to
3994 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3995 to catch quickly any cases where the caller uses the value
3996 and fails to set WANT_VALUE.
3998 If WANT_VALUE & 2 is set, this is a store into a call param on the
3999 stack, and block moves may need to be treated specially. */
4002 store_expr (tree exp
, rtx target
, int want_value
)
4005 rtx alt_rtl
= NULL_RTX
;
4006 int dont_return_target
= 0;
4007 int dont_store_target
= 0;
4009 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4011 /* C++ can generate ?: expressions with a throw expression in one
4012 branch and an rvalue in the other. Here, we resolve attempts to
4013 store the throw expression's nonexistent result. */
4016 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4019 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4021 /* Perform first part of compound expression, then assign from second
4023 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4024 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4026 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4028 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4030 /* For conditional expression, get safe form of the target. Then
4031 test the condition, doing the appropriate assignment on either
4032 side. This avoids the creation of unnecessary temporaries.
4033 For non-BLKmode, it is more efficient not to do this. */
4035 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4038 target
= protect_from_queue (target
, 1);
4040 do_pending_stack_adjust ();
4042 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4043 start_cleanup_deferral ();
4044 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4045 end_cleanup_deferral ();
4047 emit_jump_insn (gen_jump (lab2
));
4050 start_cleanup_deferral ();
4051 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4052 end_cleanup_deferral ();
4057 return want_value
& 1 ? target
: NULL_RTX
;
4059 else if (queued_subexp_p (target
))
4060 /* If target contains a postincrement, let's not risk
4061 using it as the place to generate the rhs. */
4063 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4065 /* Expand EXP into a new pseudo. */
4066 temp
= gen_reg_rtx (GET_MODE (target
));
4067 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4069 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4072 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4074 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4076 /* If target is volatile, ANSI requires accessing the value
4077 *from* the target, if it is accessed. So make that happen.
4078 In no case return the target itself. */
4079 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4080 dont_return_target
= 1;
4082 else if ((want_value
& 1) != 0
4083 && GET_CODE (target
) == MEM
4084 && ! MEM_VOLATILE_P (target
)
4085 && GET_MODE (target
) != BLKmode
)
4086 /* If target is in memory and caller wants value in a register instead,
4087 arrange that. Pass TARGET as target for expand_expr so that,
4088 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4089 We know expand_expr will not use the target in that case.
4090 Don't do this if TARGET is volatile because we are supposed
4091 to write it and then read it. */
4093 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4094 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4095 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4097 /* If TEMP is already in the desired TARGET, only copy it from
4098 memory and don't store it there again. */
4100 || (rtx_equal_p (temp
, target
)
4101 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4102 dont_store_target
= 1;
4103 temp
= copy_to_reg (temp
);
4105 dont_return_target
= 1;
4107 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4108 /* If this is a scalar in a register that is stored in a wider mode
4109 than the declared mode, compute the result into its declared mode
4110 and then convert to the wider mode. Our value is the computed
4113 rtx inner_target
= 0;
4115 /* If we don't want a value, we can do the conversion inside EXP,
4116 which will often result in some optimizations. Do the conversion
4117 in two steps: first change the signedness, if needed, then
4118 the extend. But don't do this if the type of EXP is a subtype
4119 of something else since then the conversion might involve
4120 more than just converting modes. */
4121 if ((want_value
& 1) == 0
4122 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4123 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4125 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4126 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4128 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4129 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4131 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4132 (GET_MODE (SUBREG_REG (target
)),
4133 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4136 inner_target
= SUBREG_REG (target
);
4139 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4140 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4142 /* If TEMP is a MEM and we want a result value, make the access
4143 now so it gets done only once. Strictly speaking, this is
4144 only necessary if the MEM is volatile, or if the address
4145 overlaps TARGET. But not performing the load twice also
4146 reduces the amount of rtl we generate and then have to CSE. */
4147 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4148 temp
= copy_to_reg (temp
);
4150 /* If TEMP is a VOIDmode constant, use convert_modes to make
4151 sure that we properly convert it. */
4152 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4154 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4155 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4156 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4157 GET_MODE (target
), temp
,
4158 SUBREG_PROMOTED_UNSIGNED_P (target
));
4161 convert_move (SUBREG_REG (target
), temp
,
4162 SUBREG_PROMOTED_UNSIGNED_P (target
));
4164 /* If we promoted a constant, change the mode back down to match
4165 target. Otherwise, the caller might get confused by a result whose
4166 mode is larger than expected. */
4168 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4170 if (GET_MODE (temp
) != VOIDmode
)
4172 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4173 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4174 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4175 SUBREG_PROMOTED_UNSIGNED_P (target
));
4178 temp
= convert_modes (GET_MODE (target
),
4179 GET_MODE (SUBREG_REG (target
)),
4180 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4183 return want_value
& 1 ? temp
: NULL_RTX
;
4187 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4189 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4191 /* Return TARGET if it's a specified hardware register.
4192 If TARGET is a volatile mem ref, either return TARGET
4193 or return a reg copied *from* TARGET; ANSI requires this.
4195 Otherwise, if TEMP is not TARGET, return TEMP
4196 if it is constant (for efficiency),
4197 or if we really want the correct value. */
4198 if (!(target
&& GET_CODE (target
) == REG
4199 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4200 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4201 && ! rtx_equal_p (temp
, target
)
4202 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4203 dont_return_target
= 1;
4206 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4207 the same as that of TARGET, adjust the constant. This is needed, for
4208 example, in case it is a CONST_DOUBLE and we want only a word-sized
4210 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4211 && TREE_CODE (exp
) != ERROR_MARK
4212 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4213 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4214 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4216 /* If value was not generated in the target, store it there.
4217 Convert the value to TARGET's type first if necessary.
4218 If TEMP and TARGET compare equal according to rtx_equal_p, but
4219 one or both of them are volatile memory refs, we have to distinguish
4221 - expand_expr has used TARGET. In this case, we must not generate
4222 another copy. This can be detected by TARGET being equal according
4224 - expand_expr has not used TARGET - that means that the source just
4225 happens to have the same RTX form. Since temp will have been created
4226 by expand_expr, it will compare unequal according to == .
4227 We must generate a copy in this case, to reach the correct number
4228 of volatile memory references. */
4230 if ((! rtx_equal_p (temp
, target
)
4231 || (temp
!= target
&& (side_effects_p (temp
)
4232 || side_effects_p (target
))))
4233 && TREE_CODE (exp
) != ERROR_MARK
4234 && ! dont_store_target
4235 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4236 but TARGET is not valid memory reference, TEMP will differ
4237 from TARGET although it is really the same location. */
4238 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4239 /* If there's nothing to copy, don't bother. Don't call expr_size
4240 unless necessary, because some front-ends (C++) expr_size-hook
4241 aborts on objects that are not supposed to be bit-copied or
4243 && expr_size (exp
) != const0_rtx
)
4245 target
= protect_from_queue (target
, 1);
4246 if (GET_MODE (temp
) != GET_MODE (target
)
4247 && GET_MODE (temp
) != VOIDmode
)
4249 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4250 if (dont_return_target
)
4252 /* In this case, we will return TEMP,
4253 so make sure it has the proper mode.
4254 But don't forget to store the value into TARGET. */
4255 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4256 emit_move_insn (target
, temp
);
4259 convert_move (target
, temp
, unsignedp
);
4262 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4264 /* Handle copying a string constant into an array. The string
4265 constant may be shorter than the array. So copy just the string's
4266 actual length, and clear the rest. First get the size of the data
4267 type of the string, which is actually the size of the target. */
4268 rtx size
= expr_size (exp
);
4270 if (GET_CODE (size
) == CONST_INT
4271 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4272 emit_block_move (target
, temp
, size
,
4274 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4277 /* Compute the size of the data to copy from the string. */
4279 = size_binop (MIN_EXPR
,
4280 make_tree (sizetype
, size
),
4281 size_int (TREE_STRING_LENGTH (exp
)));
4283 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4285 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4288 /* Copy that much. */
4289 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4290 TREE_UNSIGNED (sizetype
));
4291 emit_block_move (target
, temp
, copy_size_rtx
,
4293 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4295 /* Figure out how much is left in TARGET that we have to clear.
4296 Do all calculations in ptr_mode. */
4297 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4299 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4300 target
= adjust_address (target
, BLKmode
,
4301 INTVAL (copy_size_rtx
));
4305 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4306 copy_size_rtx
, NULL_RTX
, 0,
4309 #ifdef POINTERS_EXTEND_UNSIGNED
4310 if (GET_MODE (copy_size_rtx
) != Pmode
)
4311 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4312 TREE_UNSIGNED (sizetype
));
4315 target
= offset_address (target
, copy_size_rtx
,
4316 highest_pow2_factor (copy_size
));
4317 label
= gen_label_rtx ();
4318 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4319 GET_MODE (size
), 0, label
);
4322 if (size
!= const0_rtx
)
4323 clear_storage (target
, size
);
4329 /* Handle calls that return values in multiple non-contiguous locations.
4330 The Irix 6 ABI has examples of this. */
4331 else if (GET_CODE (target
) == PARALLEL
)
4332 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4333 int_size_in_bytes (TREE_TYPE (exp
)));
4334 else if (GET_MODE (temp
) == BLKmode
)
4335 emit_block_move (target
, temp
, expr_size (exp
),
4337 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4339 emit_move_insn (target
, temp
);
4342 /* If we don't want a value, return NULL_RTX. */
4343 if ((want_value
& 1) == 0)
4346 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4347 ??? The latter test doesn't seem to make sense. */
4348 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4351 /* Return TARGET itself if it is a hard register. */
4352 else if ((want_value
& 1) != 0
4353 && GET_MODE (target
) != BLKmode
4354 && ! (GET_CODE (target
) == REG
4355 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4356 return copy_to_reg (target
);
4362 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4365 is_zeros_p (tree exp
)
4369 switch (TREE_CODE (exp
))
4373 case NON_LVALUE_EXPR
:
4374 case VIEW_CONVERT_EXPR
:
4375 return is_zeros_p (TREE_OPERAND (exp
, 0));
4378 return integer_zerop (exp
);
4382 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4385 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4388 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4389 elt
= TREE_CHAIN (elt
))
4390 if (!is_zeros_p (TREE_VALUE (elt
)))
4396 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4397 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4398 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4399 if (! is_zeros_p (TREE_VALUE (elt
)))
4409 /* Return 1 if EXP contains mostly (3/4) zeros. */
4412 mostly_zeros_p (tree exp
)
4414 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4416 int elts
= 0, zeros
= 0;
4417 tree elt
= CONSTRUCTOR_ELTS (exp
);
4418 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4420 /* If there are no ranges of true bits, it is all zero. */
4421 return elt
== NULL_TREE
;
4423 for (; elt
; elt
= TREE_CHAIN (elt
))
4425 /* We do not handle the case where the index is a RANGE_EXPR,
4426 so the statistic will be somewhat inaccurate.
4427 We do make a more accurate count in store_constructor itself,
4428 so since this function is only used for nested array elements,
4429 this should be close enough. */
4430 if (mostly_zeros_p (TREE_VALUE (elt
)))
4435 return 4 * zeros
>= 3 * elts
;
4438 return is_zeros_p (exp
);
4441 /* Helper function for store_constructor.
4442 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4443 TYPE is the type of the CONSTRUCTOR, not the element type.
4444 CLEARED is as for store_constructor.
4445 ALIAS_SET is the alias set to use for any stores.
4447 This provides a recursive shortcut back to store_constructor when it isn't
4448 necessary to go through store_field. This is so that we can pass through
4449 the cleared field to let store_constructor know that we may not have to
4450 clear a substructure if the outer structure has already been cleared. */
4453 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4454 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4455 tree exp
, tree type
, int cleared
, int alias_set
)
4457 if (TREE_CODE (exp
) == CONSTRUCTOR
4458 && bitpos
% BITS_PER_UNIT
== 0
4459 /* If we have a nonzero bitpos for a register target, then we just
4460 let store_field do the bitfield handling. This is unlikely to
4461 generate unnecessary clear instructions anyways. */
4462 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4464 if (GET_CODE (target
) == MEM
)
4466 = adjust_address (target
,
4467 GET_MODE (target
) == BLKmode
4469 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4470 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4473 /* Update the alias set, if required. */
4474 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4475 && MEM_ALIAS_SET (target
) != 0)
4477 target
= copy_rtx (target
);
4478 set_mem_alias_set (target
, alias_set
);
4481 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4484 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4488 /* Store the value of constructor EXP into the rtx TARGET.
4489 TARGET is either a REG or a MEM; we know it cannot conflict, since
4490 safe_from_p has been called.
4491 CLEARED is true if TARGET is known to have been zero'd.
4492 SIZE is the number of bytes of TARGET we are allowed to modify: this
4493 may not be the same as the size of EXP if we are assigning to a field
4494 which has been packed to exclude padding bits. */
4497 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4499 tree type
= TREE_TYPE (exp
);
4500 #ifdef WORD_REGISTER_OPERATIONS
4501 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4504 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4505 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4509 /* If size is zero or the target is already cleared, do nothing. */
4510 if (size
== 0 || cleared
)
4512 /* We either clear the aggregate or indicate the value is dead. */
4513 else if ((TREE_CODE (type
) == UNION_TYPE
4514 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4515 && ! CONSTRUCTOR_ELTS (exp
))
4516 /* If the constructor is empty, clear the union. */
4518 clear_storage (target
, expr_size (exp
));
4522 /* If we are building a static constructor into a register,
4523 set the initial value as zero so we can fold the value into
4524 a constant. But if more than one register is involved,
4525 this probably loses. */
4526 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4527 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4529 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4533 /* If the constructor has fewer fields than the structure
4534 or if we are initializing the structure to mostly zeros,
4535 clear the whole structure first. Don't do this if TARGET is a
4536 register whose mode size isn't equal to SIZE since clear_storage
4537 can't handle this case. */
4538 else if (((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4539 || mostly_zeros_p (exp
))
4540 && (GET_CODE (target
) != REG
4541 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4544 rtx xtarget
= target
;
4546 if (readonly_fields_p (type
))
4548 xtarget
= copy_rtx (xtarget
);
4549 RTX_UNCHANGING_P (xtarget
) = 1;
4552 clear_storage (xtarget
, GEN_INT (size
));
4557 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4559 /* Store each element of the constructor into
4560 the corresponding field of TARGET. */
4562 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4564 tree field
= TREE_PURPOSE (elt
);
4565 tree value
= TREE_VALUE (elt
);
4566 enum machine_mode mode
;
4567 HOST_WIDE_INT bitsize
;
4568 HOST_WIDE_INT bitpos
= 0;
4570 rtx to_rtx
= target
;
4572 /* Just ignore missing fields.
4573 We cleared the whole structure, above,
4574 if any fields are missing. */
4578 if (cleared
&& is_zeros_p (value
))
4581 if (host_integerp (DECL_SIZE (field
), 1))
4582 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4586 mode
= DECL_MODE (field
);
4587 if (DECL_BIT_FIELD (field
))
4590 offset
= DECL_FIELD_OFFSET (field
);
4591 if (host_integerp (offset
, 0)
4592 && host_integerp (bit_position (field
), 0))
4594 bitpos
= int_bit_position (field
);
4598 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4604 if (CONTAINS_PLACEHOLDER_P (offset
))
4605 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4606 offset
, make_tree (TREE_TYPE (exp
), target
));
4608 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4609 if (GET_CODE (to_rtx
) != MEM
)
4612 #ifdef POINTERS_EXTEND_UNSIGNED
4613 if (GET_MODE (offset_rtx
) != Pmode
)
4614 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4616 if (GET_MODE (offset_rtx
) != ptr_mode
)
4617 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4620 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4621 highest_pow2_factor (offset
));
4624 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4625 on the MEM might lead to scheduling the clearing after the
4627 if (TREE_READONLY (field
) && !cleared
)
4629 if (GET_CODE (to_rtx
) == MEM
)
4630 to_rtx
= copy_rtx (to_rtx
);
4632 RTX_UNCHANGING_P (to_rtx
) = 1;
4635 #ifdef WORD_REGISTER_OPERATIONS
4636 /* If this initializes a field that is smaller than a word, at the
4637 start of a word, try to widen it to a full word.
4638 This special case allows us to output C++ member function
4639 initializations in a form that the optimizers can understand. */
4640 if (GET_CODE (target
) == REG
4641 && bitsize
< BITS_PER_WORD
4642 && bitpos
% BITS_PER_WORD
== 0
4643 && GET_MODE_CLASS (mode
) == MODE_INT
4644 && TREE_CODE (value
) == INTEGER_CST
4646 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4648 tree type
= TREE_TYPE (value
);
4650 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4652 type
= (*lang_hooks
.types
.type_for_size
)
4653 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4654 value
= convert (type
, value
);
4657 if (BYTES_BIG_ENDIAN
)
4659 = fold (build (LSHIFT_EXPR
, type
, value
,
4660 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4661 bitsize
= BITS_PER_WORD
;
4666 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4667 && DECL_NONADDRESSABLE_P (field
))
4669 to_rtx
= copy_rtx (to_rtx
);
4670 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4673 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4674 value
, type
, cleared
,
4675 get_alias_set (TREE_TYPE (field
)));
4678 else if (TREE_CODE (type
) == ARRAY_TYPE
4679 || TREE_CODE (type
) == VECTOR_TYPE
)
4684 tree domain
= TYPE_DOMAIN (type
);
4685 tree elttype
= TREE_TYPE (type
);
4687 HOST_WIDE_INT minelt
= 0;
4688 HOST_WIDE_INT maxelt
= 0;
4692 unsigned n_elts
= 0;
4694 /* Vectors are like arrays, but the domain is stored via an array
4696 if (TREE_CODE (type
) == VECTOR_TYPE
)
4698 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4699 the same field as TYPE_DOMAIN, we are not guaranteed that
4701 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4702 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4703 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
4705 enum machine_mode mode
= GET_MODE (target
);
4707 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
4708 if (icode
!= CODE_FOR_nothing
)
4712 elt_size
= GET_MODE_SIZE (GET_MODE_INNER (mode
));
4713 n_elts
= (GET_MODE_SIZE (mode
) / elt_size
);
4714 vector
= alloca (n_elts
);
4715 for (i
= 0; i
< n_elts
; i
++)
4716 vector
[i
] = CONST0_RTX (GET_MODE_INNER (mode
));
4721 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4722 && TYPE_MAX_VALUE (domain
)
4723 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4724 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4726 /* If we have constant bounds for the range of the type, get them. */
4729 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4730 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4733 /* If the constructor has fewer elements than the array,
4734 clear the whole array first. Similarly if this is
4735 static constructor of a non-BLKmode object. */
4736 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4740 HOST_WIDE_INT count
= 0, zero_count
= 0;
4741 need_to_clear
= ! const_bounds_p
;
4743 /* This loop is a more accurate version of the loop in
4744 mostly_zeros_p (it handles RANGE_EXPR in an index).
4745 It is also needed to check for missing elements. */
4746 for (elt
= CONSTRUCTOR_ELTS (exp
);
4747 elt
!= NULL_TREE
&& ! need_to_clear
;
4748 elt
= TREE_CHAIN (elt
))
4750 tree index
= TREE_PURPOSE (elt
);
4751 HOST_WIDE_INT this_node_count
;
4753 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4755 tree lo_index
= TREE_OPERAND (index
, 0);
4756 tree hi_index
= TREE_OPERAND (index
, 1);
4758 if (! host_integerp (lo_index
, 1)
4759 || ! host_integerp (hi_index
, 1))
4765 this_node_count
= (tree_low_cst (hi_index
, 1)
4766 - tree_low_cst (lo_index
, 1) + 1);
4769 this_node_count
= 1;
4771 count
+= this_node_count
;
4772 if (mostly_zeros_p (TREE_VALUE (elt
)))
4773 zero_count
+= this_node_count
;
4776 /* Clear the entire array first if there are any missing elements,
4777 or if the incidence of zero elements is >= 75%. */
4779 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4783 if (need_to_clear
&& size
> 0 && !vector
)
4788 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4790 clear_storage (target
, GEN_INT (size
));
4794 else if (REG_P (target
))
4795 /* Inform later passes that the old value is dead. */
4796 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4798 /* Store each element of the constructor into
4799 the corresponding element of TARGET, determined
4800 by counting the elements. */
4801 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4803 elt
= TREE_CHAIN (elt
), i
++)
4805 enum machine_mode mode
;
4806 HOST_WIDE_INT bitsize
;
4807 HOST_WIDE_INT bitpos
;
4809 tree value
= TREE_VALUE (elt
);
4810 tree index
= TREE_PURPOSE (elt
);
4811 rtx xtarget
= target
;
4813 if (cleared
&& is_zeros_p (value
))
4816 unsignedp
= TREE_UNSIGNED (elttype
);
4817 mode
= TYPE_MODE (elttype
);
4818 if (mode
== BLKmode
)
4819 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4820 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4823 bitsize
= GET_MODE_BITSIZE (mode
);
4825 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4827 tree lo_index
= TREE_OPERAND (index
, 0);
4828 tree hi_index
= TREE_OPERAND (index
, 1);
4829 rtx index_r
, pos_rtx
, loop_end
;
4830 struct nesting
*loop
;
4831 HOST_WIDE_INT lo
, hi
, count
;
4837 /* If the range is constant and "small", unroll the loop. */
4839 && host_integerp (lo_index
, 0)
4840 && host_integerp (hi_index
, 0)
4841 && (lo
= tree_low_cst (lo_index
, 0),
4842 hi
= tree_low_cst (hi_index
, 0),
4843 count
= hi
- lo
+ 1,
4844 (GET_CODE (target
) != MEM
4846 || (host_integerp (TYPE_SIZE (elttype
), 1)
4847 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4850 lo
-= minelt
; hi
-= minelt
;
4851 for (; lo
<= hi
; lo
++)
4853 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4855 if (GET_CODE (target
) == MEM
4856 && !MEM_KEEP_ALIAS_SET_P (target
)
4857 && TREE_CODE (type
) == ARRAY_TYPE
4858 && TYPE_NONALIASED_COMPONENT (type
))
4860 target
= copy_rtx (target
);
4861 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4864 store_constructor_field
4865 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4866 get_alias_set (elttype
));
4871 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4872 loop_end
= gen_label_rtx ();
4874 unsignedp
= TREE_UNSIGNED (domain
);
4876 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4879 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4881 SET_DECL_RTL (index
, index_r
);
4882 if (TREE_CODE (value
) == SAVE_EXPR
4883 && SAVE_EXPR_RTL (value
) == 0)
4885 /* Make sure value gets expanded once before the
4887 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4890 store_expr (lo_index
, index_r
, 0);
4891 loop
= expand_start_loop (0);
4893 /* Assign value to element index. */
4895 = convert (ssizetype
,
4896 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4897 index
, TYPE_MIN_VALUE (domain
))));
4898 position
= size_binop (MULT_EXPR
, position
,
4900 TYPE_SIZE_UNIT (elttype
)));
4902 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4903 xtarget
= offset_address (target
, pos_rtx
,
4904 highest_pow2_factor (position
));
4905 xtarget
= adjust_address (xtarget
, mode
, 0);
4906 if (TREE_CODE (value
) == CONSTRUCTOR
)
4907 store_constructor (value
, xtarget
, cleared
,
4908 bitsize
/ BITS_PER_UNIT
);
4910 store_expr (value
, xtarget
, 0);
4912 expand_exit_loop_if_false (loop
,
4913 build (LT_EXPR
, integer_type_node
,
4916 expand_increment (build (PREINCREMENT_EXPR
,
4918 index
, integer_one_node
), 0, 0);
4920 emit_label (loop_end
);
4923 else if ((index
!= 0 && ! host_integerp (index
, 0))
4924 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4932 index
= ssize_int (1);
4935 index
= convert (ssizetype
,
4936 fold (build (MINUS_EXPR
, index
,
4937 TYPE_MIN_VALUE (domain
))));
4939 position
= size_binop (MULT_EXPR
, index
,
4941 TYPE_SIZE_UNIT (elttype
)));
4942 xtarget
= offset_address (target
,
4943 expand_expr (position
, 0, VOIDmode
, 0),
4944 highest_pow2_factor (position
));
4945 xtarget
= adjust_address (xtarget
, mode
, 0);
4946 store_expr (value
, xtarget
, 0);
4953 pos
= tree_low_cst (index
, 0) - minelt
;
4956 vector
[pos
] = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
4961 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4962 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4964 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4966 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4967 && TREE_CODE (type
) == ARRAY_TYPE
4968 && TYPE_NONALIASED_COMPONENT (type
))
4970 target
= copy_rtx (target
);
4971 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4973 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4974 type
, cleared
, get_alias_set (elttype
));
4979 emit_insn (GEN_FCN (icode
) (target
,
4980 gen_rtx_PARALLEL (GET_MODE (target
),
4981 gen_rtvec_v (n_elts
, vector
))));
4985 /* Set constructor assignments. */
4986 else if (TREE_CODE (type
) == SET_TYPE
)
4988 tree elt
= CONSTRUCTOR_ELTS (exp
);
4989 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4990 tree domain
= TYPE_DOMAIN (type
);
4991 tree domain_min
, domain_max
, bitlength
;
4993 /* The default implementation strategy is to extract the constant
4994 parts of the constructor, use that to initialize the target,
4995 and then "or" in whatever non-constant ranges we need in addition.
4997 If a large set is all zero or all ones, it is
4998 probably better to set it using memset (if available) or bzero.
4999 Also, if a large set has just a single range, it may also be
5000 better to first clear all the first clear the set (using
5001 bzero/memset), and set the bits we want. */
5003 /* Check for all zeros. */
5004 if (elt
== NULL_TREE
&& size
> 0)
5007 clear_storage (target
, GEN_INT (size
));
5011 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5012 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5013 bitlength
= size_binop (PLUS_EXPR
,
5014 size_diffop (domain_max
, domain_min
),
5017 nbits
= tree_low_cst (bitlength
, 1);
5019 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5020 are "complicated" (more than one range), initialize (the
5021 constant parts) by copying from a constant. */
5022 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5023 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5025 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5026 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5027 char *bit_buffer
= alloca (nbits
);
5028 HOST_WIDE_INT word
= 0;
5029 unsigned int bit_pos
= 0;
5030 unsigned int ibit
= 0;
5031 unsigned int offset
= 0; /* In bytes from beginning of set. */
5033 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5036 if (bit_buffer
[ibit
])
5038 if (BYTES_BIG_ENDIAN
)
5039 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5041 word
|= 1 << bit_pos
;
5045 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5047 if (word
!= 0 || ! cleared
)
5049 rtx datum
= GEN_INT (word
);
5052 /* The assumption here is that it is safe to use
5053 XEXP if the set is multi-word, but not if
5054 it's single-word. */
5055 if (GET_CODE (target
) == MEM
)
5056 to_rtx
= adjust_address (target
, mode
, offset
);
5057 else if (offset
== 0)
5061 emit_move_insn (to_rtx
, datum
);
5068 offset
+= set_word_size
/ BITS_PER_UNIT
;
5073 /* Don't bother clearing storage if the set is all ones. */
5074 if (TREE_CHAIN (elt
) != NULL_TREE
5075 || (TREE_PURPOSE (elt
) == NULL_TREE
5077 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5078 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5079 || (tree_low_cst (TREE_VALUE (elt
), 0)
5080 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5081 != (HOST_WIDE_INT
) nbits
))))
5082 clear_storage (target
, expr_size (exp
));
5084 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5086 /* Start of range of element or NULL. */
5087 tree startbit
= TREE_PURPOSE (elt
);
5088 /* End of range of element, or element value. */
5089 tree endbit
= TREE_VALUE (elt
);
5090 HOST_WIDE_INT startb
, endb
;
5091 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5093 bitlength_rtx
= expand_expr (bitlength
,
5094 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5096 /* Handle non-range tuple element like [ expr ]. */
5097 if (startbit
== NULL_TREE
)
5099 startbit
= save_expr (endbit
);
5103 startbit
= convert (sizetype
, startbit
);
5104 endbit
= convert (sizetype
, endbit
);
5105 if (! integer_zerop (domain_min
))
5107 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5108 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5110 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5111 EXPAND_CONST_ADDRESS
);
5112 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5113 EXPAND_CONST_ADDRESS
);
5119 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5120 (GET_MODE (target
), 0),
5123 emit_move_insn (targetx
, target
);
5126 else if (GET_CODE (target
) == MEM
)
5131 /* Optimization: If startbit and endbit are constants divisible
5132 by BITS_PER_UNIT, call memset instead. */
5133 if (TARGET_MEM_FUNCTIONS
5134 && TREE_CODE (startbit
) == INTEGER_CST
5135 && TREE_CODE (endbit
) == INTEGER_CST
5136 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5137 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5139 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5141 plus_constant (XEXP (targetx
, 0),
5142 startb
/ BITS_PER_UNIT
),
5144 constm1_rtx
, TYPE_MODE (integer_type_node
),
5145 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5146 TYPE_MODE (sizetype
));
5149 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5150 VOIDmode
, 4, XEXP (targetx
, 0),
5151 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5152 startbit_rtx
, TYPE_MODE (sizetype
),
5153 endbit_rtx
, TYPE_MODE (sizetype
));
5156 emit_move_insn (target
, targetx
);
5164 /* Store the value of EXP (an expression tree)
5165 into a subfield of TARGET which has mode MODE and occupies
5166 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5167 If MODE is VOIDmode, it means that we are storing into a bit-field.
5169 If VALUE_MODE is VOIDmode, return nothing in particular.
5170 UNSIGNEDP is not used in this case.
5172 Otherwise, return an rtx for the value stored. This rtx
5173 has mode VALUE_MODE if that is convenient to do.
5174 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5176 TYPE is the type of the underlying object,
5178 ALIAS_SET is the alias set for the destination. This value will
5179 (in general) be different from that for TARGET, since TARGET is a
5180 reference to the containing structure. */
5183 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5184 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5185 int unsignedp
, tree type
, int alias_set
)
5187 HOST_WIDE_INT width_mask
= 0;
5189 if (TREE_CODE (exp
) == ERROR_MARK
)
5192 /* If we have nothing to store, do nothing unless the expression has
5195 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5196 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5197 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5199 /* If we are storing into an unaligned field of an aligned union that is
5200 in a register, we may have the mode of TARGET being an integer mode but
5201 MODE == BLKmode. In that case, get an aligned object whose size and
5202 alignment are the same as TARGET and store TARGET into it (we can avoid
5203 the store if the field being stored is the entire width of TARGET). Then
5204 call ourselves recursively to store the field into a BLKmode version of
5205 that object. Finally, load from the object into TARGET. This is not
5206 very efficient in general, but should only be slightly more expensive
5207 than the otherwise-required unaligned accesses. Perhaps this can be
5208 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5209 twice, once with emit_move_insn and once via store_field. */
5212 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5214 rtx object
= assign_temp (type
, 0, 1, 1);
5215 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5217 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5218 emit_move_insn (object
, target
);
5220 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5223 emit_move_insn (target
, object
);
5225 /* We want to return the BLKmode version of the data. */
5229 if (GET_CODE (target
) == CONCAT
)
5231 /* We're storing into a struct containing a single __complex. */
5235 return store_expr (exp
, target
, 0);
5238 /* If the structure is in a register or if the component
5239 is a bit field, we cannot use addressing to access it.
5240 Use bit-field techniques or SUBREG to store in it. */
5242 if (mode
== VOIDmode
5243 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5244 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5245 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5246 || GET_CODE (target
) == REG
5247 || GET_CODE (target
) == SUBREG
5248 /* If the field isn't aligned enough to store as an ordinary memref,
5249 store it as a bit field. */
5251 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5252 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5253 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5254 || (bitpos
% BITS_PER_UNIT
!= 0)))
5255 /* If the RHS and field are a constant size and the size of the
5256 RHS isn't the same size as the bitfield, we must use bitfield
5259 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5260 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5262 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5264 /* If BITSIZE is narrower than the size of the type of EXP
5265 we will be narrowing TEMP. Normally, what's wanted are the
5266 low-order bits. However, if EXP's type is a record and this is
5267 big-endian machine, we want the upper BITSIZE bits. */
5268 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5269 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5270 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5271 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5272 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5276 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5278 if (mode
!= VOIDmode
&& mode
!= BLKmode
5279 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5280 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5282 /* If the modes of TARGET and TEMP are both BLKmode, both
5283 must be in memory and BITPOS must be aligned on a byte
5284 boundary. If so, we simply do a block copy. */
5285 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5287 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5288 || bitpos
% BITS_PER_UNIT
!= 0)
5291 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5292 emit_block_move (target
, temp
,
5293 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5297 return value_mode
== VOIDmode
? const0_rtx
: target
;
5300 /* Store the value in the bitfield. */
5301 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5302 int_size_in_bytes (type
));
5304 if (value_mode
!= VOIDmode
)
5306 /* The caller wants an rtx for the value.
5307 If possible, avoid refetching from the bitfield itself. */
5309 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5312 enum machine_mode tmode
;
5314 tmode
= GET_MODE (temp
);
5315 if (tmode
== VOIDmode
)
5319 return expand_and (tmode
, temp
,
5320 gen_int_mode (width_mask
, tmode
),
5323 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5324 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5325 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5328 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5329 NULL_RTX
, value_mode
, VOIDmode
,
5330 int_size_in_bytes (type
));
5336 rtx addr
= XEXP (target
, 0);
5337 rtx to_rtx
= target
;
5339 /* If a value is wanted, it must be the lhs;
5340 so make the address stable for multiple use. */
5342 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5343 && ! CONSTANT_ADDRESS_P (addr
)
5344 /* A frame-pointer reference is already stable. */
5345 && ! (GET_CODE (addr
) == PLUS
5346 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5347 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5348 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5349 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5351 /* Now build a reference to just the desired component. */
5353 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5355 if (to_rtx
== target
)
5356 to_rtx
= copy_rtx (to_rtx
);
5358 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5359 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5360 set_mem_alias_set (to_rtx
, alias_set
);
5362 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5366 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5367 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5368 codes and find the ultimate containing object, which we return.
5370 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5371 bit position, and *PUNSIGNEDP to the signedness of the field.
5372 If the position of the field is variable, we store a tree
5373 giving the variable offset (in units) in *POFFSET.
5374 This offset is in addition to the bit position.
5375 If the position is not variable, we store 0 in *POFFSET.
5377 If any of the extraction expressions is volatile,
5378 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5380 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5381 is a mode that can be used to access the field. In that case, *PBITSIZE
5384 If the field describes a variable-sized object, *PMODE is set to
5385 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5386 this case, but the address of the object can be found. */
5389 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5390 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5391 enum machine_mode
*pmode
, int *punsignedp
,
5395 enum machine_mode mode
= VOIDmode
;
5396 tree offset
= size_zero_node
;
5397 tree bit_offset
= bitsize_zero_node
;
5398 tree placeholder_ptr
= 0;
5401 /* First get the mode, signedness, and size. We do this from just the
5402 outermost expression. */
5403 if (TREE_CODE (exp
) == COMPONENT_REF
)
5405 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5406 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5407 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5409 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5411 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5413 size_tree
= TREE_OPERAND (exp
, 1);
5414 *punsignedp
= TREE_UNSIGNED (exp
);
5418 mode
= TYPE_MODE (TREE_TYPE (exp
));
5419 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5421 if (mode
== BLKmode
)
5422 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5424 *pbitsize
= GET_MODE_BITSIZE (mode
);
5429 if (! host_integerp (size_tree
, 1))
5430 mode
= BLKmode
, *pbitsize
= -1;
5432 *pbitsize
= tree_low_cst (size_tree
, 1);
5435 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5436 and find the ultimate containing object. */
5439 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5440 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5441 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5443 tree field
= TREE_OPERAND (exp
, 1);
5444 tree this_offset
= DECL_FIELD_OFFSET (field
);
5446 /* If this field hasn't been filled in yet, don't go
5447 past it. This should only happen when folding expressions
5448 made during type construction. */
5449 if (this_offset
== 0)
5451 else if (CONTAINS_PLACEHOLDER_P (this_offset
))
5452 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5454 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5455 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5456 DECL_FIELD_BIT_OFFSET (field
));
5458 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5461 else if (TREE_CODE (exp
) == ARRAY_REF
5462 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5464 tree index
= TREE_OPERAND (exp
, 1);
5465 tree array
= TREE_OPERAND (exp
, 0);
5466 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5467 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5468 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5470 /* We assume all arrays have sizes that are a multiple of a byte.
5471 First subtract the lower bound, if any, in the type of the
5472 index, then convert to sizetype and multiply by the size of the
5474 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5475 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5478 /* If the index has a self-referential type, pass it to a
5479 WITH_RECORD_EXPR; if the component size is, pass our
5480 component to one. */
5481 if (CONTAINS_PLACEHOLDER_P (index
))
5482 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5483 if (CONTAINS_PLACEHOLDER_P (unit_size
))
5484 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5486 offset
= size_binop (PLUS_EXPR
, offset
,
5487 size_binop (MULT_EXPR
,
5488 convert (sizetype
, index
),
5492 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5494 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5496 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5497 We might have been called from tree optimization where we
5498 haven't set up an object yet. */
5507 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5508 conversions that don't change the mode, and all view conversions
5509 except those that need to "step up" the alignment. */
5510 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5511 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5512 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5513 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5515 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5516 < BIGGEST_ALIGNMENT
)
5517 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5518 || TYPE_ALIGN_OK (TREE_TYPE
5519 (TREE_OPERAND (exp
, 0))))))
5520 && ! ((TREE_CODE (exp
) == NOP_EXPR
5521 || TREE_CODE (exp
) == CONVERT_EXPR
)
5522 && (TYPE_MODE (TREE_TYPE (exp
))
5523 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5526 /* If any reference in the chain is volatile, the effect is volatile. */
5527 if (TREE_THIS_VOLATILE (exp
))
5530 exp
= TREE_OPERAND (exp
, 0);
5533 /* If OFFSET is constant, see if we can return the whole thing as a
5534 constant bit position. Otherwise, split it up. */
5535 if (host_integerp (offset
, 0)
5536 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5538 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5539 && host_integerp (tem
, 0))
5540 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5542 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5548 /* Return 1 if T is an expression that get_inner_reference handles. */
5551 handled_component_p (tree t
)
5553 switch (TREE_CODE (t
))
5558 case ARRAY_RANGE_REF
:
5559 case NON_LVALUE_EXPR
:
5560 case VIEW_CONVERT_EXPR
:
5563 /* ??? Sure they are handled, but get_inner_reference may return
5564 a different PBITSIZE, depending upon whether the expression is
5565 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5568 return (TYPE_MODE (TREE_TYPE (t
))
5569 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5576 /* Given an rtx VALUE that may contain additions and multiplications, return
5577 an equivalent value that just refers to a register, memory, or constant.
5578 This is done by generating instructions to perform the arithmetic and
5579 returning a pseudo-register containing the value.
5581 The returned value may be a REG, SUBREG, MEM or constant. */
5584 force_operand (rtx value
, rtx target
)
5587 /* Use subtarget as the target for operand 0 of a binary operation. */
5588 rtx subtarget
= get_subtarget (target
);
5589 enum rtx_code code
= GET_CODE (value
);
5591 /* Check for a PIC address load. */
5592 if ((code
== PLUS
|| code
== MINUS
)
5593 && XEXP (value
, 0) == pic_offset_table_rtx
5594 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5595 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5596 || GET_CODE (XEXP (value
, 1)) == CONST
))
5599 subtarget
= gen_reg_rtx (GET_MODE (value
));
5600 emit_move_insn (subtarget
, value
);
5604 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5607 target
= gen_reg_rtx (GET_MODE (value
));
5608 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5609 code
== ZERO_EXTEND
);
5613 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5615 op2
= XEXP (value
, 1);
5616 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5618 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5621 op2
= negate_rtx (GET_MODE (value
), op2
);
5624 /* Check for an addition with OP2 a constant integer and our first
5625 operand a PLUS of a virtual register and something else. In that
5626 case, we want to emit the sum of the virtual register and the
5627 constant first and then add the other value. This allows virtual
5628 register instantiation to simply modify the constant rather than
5629 creating another one around this addition. */
5630 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5631 && GET_CODE (XEXP (value
, 0)) == PLUS
5632 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5633 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5634 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5636 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5637 XEXP (XEXP (value
, 0), 0), op2
,
5638 subtarget
, 0, OPTAB_LIB_WIDEN
);
5639 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5640 force_operand (XEXP (XEXP (value
,
5642 target
, 0, OPTAB_LIB_WIDEN
);
5645 op1
= force_operand (XEXP (value
, 0), subtarget
);
5646 op2
= force_operand (op2
, NULL_RTX
);
5650 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5652 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5653 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5654 target
, 1, OPTAB_LIB_WIDEN
);
5656 return expand_divmod (0,
5657 FLOAT_MODE_P (GET_MODE (value
))
5658 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5659 GET_MODE (value
), op1
, op2
, target
, 0);
5662 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5666 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5670 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5674 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5675 target
, 0, OPTAB_LIB_WIDEN
);
5678 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5679 target
, 1, OPTAB_LIB_WIDEN
);
5682 if (GET_RTX_CLASS (code
) == '1')
5684 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5685 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5688 #ifdef INSN_SCHEDULING
5689 /* On machines that have insn scheduling, we want all memory reference to be
5690 explicit, so we need to deal with such paradoxical SUBREGs. */
5691 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5692 && (GET_MODE_SIZE (GET_MODE (value
))
5693 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5695 = simplify_gen_subreg (GET_MODE (value
),
5696 force_reg (GET_MODE (SUBREG_REG (value
)),
5697 force_operand (SUBREG_REG (value
),
5699 GET_MODE (SUBREG_REG (value
)),
5700 SUBREG_BYTE (value
));
5706 /* Subroutine of expand_expr: return nonzero iff there is no way that
5707 EXP can reference X, which is being modified. TOP_P is nonzero if this
5708 call is going to be used to determine whether we need a temporary
5709 for EXP, as opposed to a recursive call to this function.
5711 It is always safe for this routine to return zero since it merely
5712 searches for optimization opportunities. */
5715 safe_from_p (rtx x
, tree exp
, int top_p
)
5719 static tree save_expr_list
;
5722 /* If EXP has varying size, we MUST use a target since we currently
5723 have no way of allocating temporaries of variable size
5724 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5725 So we assume here that something at a higher level has prevented a
5726 clash. This is somewhat bogus, but the best we can do. Only
5727 do this when X is BLKmode and when we are at the top level. */
5728 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5729 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5730 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5731 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5732 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5734 && GET_MODE (x
) == BLKmode
)
5735 /* If X is in the outgoing argument area, it is always safe. */
5736 || (GET_CODE (x
) == MEM
5737 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5738 || (GET_CODE (XEXP (x
, 0)) == PLUS
5739 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5742 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5743 find the underlying pseudo. */
5744 if (GET_CODE (x
) == SUBREG
)
5747 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5751 /* A SAVE_EXPR might appear many times in the expression passed to the
5752 top-level safe_from_p call, and if it has a complex subexpression,
5753 examining it multiple times could result in a combinatorial explosion.
5754 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5755 with optimization took about 28 minutes to compile -- even though it was
5756 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5757 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5758 we have processed. Note that the only test of top_p was above. */
5767 rtn
= safe_from_p (x
, exp
, 0);
5769 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5770 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5775 /* Now look at our tree code and possibly recurse. */
5776 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5779 exp_rtl
= DECL_RTL_IF_SET (exp
);
5786 if (TREE_CODE (exp
) == TREE_LIST
)
5790 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5792 exp
= TREE_CHAIN (exp
);
5795 if (TREE_CODE (exp
) != TREE_LIST
)
5796 return safe_from_p (x
, exp
, 0);
5799 else if (TREE_CODE (exp
) == ERROR_MARK
)
5800 return 1; /* An already-visited SAVE_EXPR? */
5806 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5811 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5815 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5816 the expression. If it is set, we conflict iff we are that rtx or
5817 both are in memory. Otherwise, we check all operands of the
5818 expression recursively. */
5820 switch (TREE_CODE (exp
))
5823 /* If the operand is static or we are static, we can't conflict.
5824 Likewise if we don't conflict with the operand at all. */
5825 if (staticp (TREE_OPERAND (exp
, 0))
5826 || TREE_STATIC (exp
)
5827 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5830 /* Otherwise, the only way this can conflict is if we are taking
5831 the address of a DECL a that address if part of X, which is
5833 exp
= TREE_OPERAND (exp
, 0);
5836 if (!DECL_RTL_SET_P (exp
)
5837 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5840 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5845 if (GET_CODE (x
) == MEM
5846 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5847 get_alias_set (exp
)))
5852 /* Assume that the call will clobber all hard registers and
5854 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5855 || GET_CODE (x
) == MEM
)
5860 /* If a sequence exists, we would have to scan every instruction
5861 in the sequence to see if it was safe. This is probably not
5863 if (RTL_EXPR_SEQUENCE (exp
))
5866 exp_rtl
= RTL_EXPR_RTL (exp
);
5869 case WITH_CLEANUP_EXPR
:
5870 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5873 case CLEANUP_POINT_EXPR
:
5874 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5877 exp_rtl
= SAVE_EXPR_RTL (exp
);
5881 /* If we've already scanned this, don't do it again. Otherwise,
5882 show we've scanned it and record for clearing the flag if we're
5884 if (TREE_PRIVATE (exp
))
5887 TREE_PRIVATE (exp
) = 1;
5888 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5890 TREE_PRIVATE (exp
) = 0;
5894 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5898 /* The only operand we look at is operand 1. The rest aren't
5899 part of the expression. */
5900 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5906 /* If we have an rtx, we do not need to scan our operands. */
5910 nops
= first_rtl_op (TREE_CODE (exp
));
5911 for (i
= 0; i
< nops
; i
++)
5912 if (TREE_OPERAND (exp
, i
) != 0
5913 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5916 /* If this is a language-specific tree code, it may require
5917 special handling. */
5918 if ((unsigned int) TREE_CODE (exp
)
5919 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5920 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5924 /* If we have an rtl, find any enclosed object. Then see if we conflict
5928 if (GET_CODE (exp_rtl
) == SUBREG
)
5930 exp_rtl
= SUBREG_REG (exp_rtl
);
5931 if (GET_CODE (exp_rtl
) == REG
5932 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5936 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5937 are memory and they conflict. */
5938 return ! (rtx_equal_p (x
, exp_rtl
)
5939 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5940 && true_dependence (exp_rtl
, VOIDmode
, x
,
5941 rtx_addr_varies_p
)));
5944 /* If we reach here, it is safe. */
5948 /* Subroutine of expand_expr: return rtx if EXP is a
5949 variable or parameter; else return 0. */
5955 switch (TREE_CODE (exp
))
5959 return DECL_RTL (exp
);
5965 /* Return the highest power of two that EXP is known to be a multiple of.
5966 This is used in updating alignment of MEMs in array references. */
5968 static unsigned HOST_WIDE_INT
5969 highest_pow2_factor (tree exp
)
5971 unsigned HOST_WIDE_INT c0
, c1
;
5973 switch (TREE_CODE (exp
))
5976 /* We can find the lowest bit that's a one. If the low
5977 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5978 We need to handle this case since we can find it in a COND_EXPR,
5979 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5980 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5982 if (TREE_CONSTANT_OVERFLOW (exp
))
5983 return BIGGEST_ALIGNMENT
;
5986 /* Note: tree_low_cst is intentionally not used here,
5987 we don't care about the upper bits. */
5988 c0
= TREE_INT_CST_LOW (exp
);
5990 return c0
? c0
: BIGGEST_ALIGNMENT
;
5994 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
5995 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5996 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5997 return MIN (c0
, c1
);
6000 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6001 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6004 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6006 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6007 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6009 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6010 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6011 return MAX (1, c0
/ c1
);
6015 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6016 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6017 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6020 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6023 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6024 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6025 return MIN (c0
, c1
);
6034 /* Similar, except that it is known that the expression must be a multiple
6035 of the alignment of TYPE. */
6037 static unsigned HOST_WIDE_INT
6038 highest_pow2_factor_for_type (tree type
, tree exp
)
6040 unsigned HOST_WIDE_INT type_align
, factor
;
6042 factor
= highest_pow2_factor (exp
);
6043 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6044 return MAX (factor
, type_align
);
6047 /* Return an object on the placeholder list that matches EXP, a
6048 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6049 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6050 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6051 is a location which initially points to a starting location in the
6052 placeholder list (zero means start of the list) and where a pointer into
6053 the placeholder list at which the object is found is placed. */
6056 find_placeholder (tree exp
, tree
*plist
)
6058 tree type
= TREE_TYPE (exp
);
6059 tree placeholder_expr
;
6061 for (placeholder_expr
6062 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6063 placeholder_expr
!= 0;
6064 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6066 tree need_type
= TYPE_MAIN_VARIANT (type
);
6069 /* Find the outermost reference that is of the type we want. If none,
6070 see if any object has a type that is a pointer to the type we
6072 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6073 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6074 || TREE_CODE (elt
) == COND_EXPR
)
6075 ? TREE_OPERAND (elt
, 1)
6076 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6077 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6078 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6079 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6080 ? TREE_OPERAND (elt
, 0) : 0))
6081 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6084 *plist
= placeholder_expr
;
6088 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6090 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6091 || TREE_CODE (elt
) == COND_EXPR
)
6092 ? TREE_OPERAND (elt
, 1)
6093 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6094 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6095 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6096 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6097 ? TREE_OPERAND (elt
, 0) : 0))
6098 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6099 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6103 *plist
= placeholder_expr
;
6104 return build1 (INDIRECT_REF
, need_type
, elt
);
6111 /* Subroutine of expand_expr. Expand the two operands of a binary
6112 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6113 The value may be stored in TARGET if TARGET is nonzero. The
6114 MODIFIER argument is as documented by expand_expr. */
6117 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6118 enum expand_modifier modifier
)
6120 if (! safe_from_p (target
, exp1
, 1))
6122 if (operand_equal_p (exp0
, exp1
, 0))
6124 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6125 *op1
= copy_rtx (*op0
);
6129 /* If we need to preserve evaluation order, copy exp0 into its own
6130 temporary variable so that it can't be clobbered by exp1. */
6131 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6132 exp0
= save_expr (exp0
);
6133 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6134 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6139 /* expand_expr: generate code for computing expression EXP.
6140 An rtx for the computed value is returned. The value is never null.
6141 In the case of a void EXP, const0_rtx is returned.
6143 The value may be stored in TARGET if TARGET is nonzero.
6144 TARGET is just a suggestion; callers must assume that
6145 the rtx returned may not be the same as TARGET.
6147 If TARGET is CONST0_RTX, it means that the value will be ignored.
6149 If TMODE is not VOIDmode, it suggests generating the
6150 result in mode TMODE. But this is done only when convenient.
6151 Otherwise, TMODE is ignored and the value generated in its natural mode.
6152 TMODE is just a suggestion; callers must assume that
6153 the rtx returned may not have mode TMODE.
6155 Note that TARGET may have neither TMODE nor MODE. In that case, it
6156 probably will not be used.
6158 If MODIFIER is EXPAND_SUM then when EXP is an addition
6159 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6160 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6161 products as above, or REG or MEM, or constant.
6162 Ordinarily in such cases we would output mul or add instructions
6163 and then return a pseudo reg containing the sum.
6165 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6166 it also marks a label as absolutely required (it can't be dead).
6167 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6168 This is used for outputting expressions used in initializers.
6170 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6171 with a constant address even if that address is not normally legitimate.
6172 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6174 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6175 a call parameter. Such targets require special care as we haven't yet
6176 marked TARGET so that it's safe from being trashed by libcalls. We
6177 don't want to use TARGET for anything but the final result;
6178 Intermediate values must go elsewhere. Additionally, calls to
6179 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6181 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6182 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6183 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6184 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6188 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6189 enum expand_modifier modifier
, rtx
*alt_rtl
)
6192 tree type
= TREE_TYPE (exp
);
6193 int unsignedp
= TREE_UNSIGNED (type
);
6194 enum machine_mode mode
;
6195 enum tree_code code
= TREE_CODE (exp
);
6197 rtx subtarget
, original_target
;
6201 /* Handle ERROR_MARK before anybody tries to access its type. */
6202 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6204 op0
= CONST0_RTX (tmode
);
6210 mode
= TYPE_MODE (type
);
6211 /* Use subtarget as the target for operand 0 of a binary operation. */
6212 subtarget
= get_subtarget (target
);
6213 original_target
= target
;
6214 ignore
= (target
== const0_rtx
6215 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6216 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6217 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6218 && TREE_CODE (type
) == VOID_TYPE
));
6220 /* If we are going to ignore this result, we need only do something
6221 if there is a side-effect somewhere in the expression. If there
6222 is, short-circuit the most common cases here. Note that we must
6223 not call expand_expr with anything but const0_rtx in case this
6224 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6228 if (! TREE_SIDE_EFFECTS (exp
))
6231 /* Ensure we reference a volatile object even if value is ignored, but
6232 don't do this if all we are doing is taking its address. */
6233 if (TREE_THIS_VOLATILE (exp
)
6234 && TREE_CODE (exp
) != FUNCTION_DECL
6235 && mode
!= VOIDmode
&& mode
!= BLKmode
6236 && modifier
!= EXPAND_CONST_ADDRESS
)
6238 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6239 if (GET_CODE (temp
) == MEM
)
6240 temp
= copy_to_reg (temp
);
6244 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6245 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6246 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6249 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6250 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6252 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6253 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6256 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6257 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6258 /* If the second operand has no side effects, just evaluate
6260 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6262 else if (code
== BIT_FIELD_REF
)
6264 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6265 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6266 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6273 /* If will do cse, generate all results into pseudo registers
6274 since 1) that allows cse to find more things
6275 and 2) otherwise cse could produce an insn the machine
6276 cannot support. An exception is a CONSTRUCTOR into a multi-word
6277 MEM: that's much more likely to be most efficient into the MEM.
6278 Another is a CALL_EXPR which must return in memory. */
6280 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6281 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6282 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6283 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6290 tree function
= decl_function_context (exp
);
6291 /* Labels in containing functions, or labels used from initializers,
6293 if (modifier
== EXPAND_INITIALIZER
6294 || (function
!= current_function_decl
6295 && function
!= inline_function_decl
6297 temp
= force_label_rtx (exp
);
6299 temp
= label_rtx (exp
);
6301 temp
= gen_rtx_MEM (FUNCTION_MODE
, gen_rtx_LABEL_REF (Pmode
, temp
));
6302 if (function
!= current_function_decl
6303 && function
!= inline_function_decl
&& function
!= 0)
6304 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6309 if (!DECL_RTL_SET_P (exp
))
6311 error ("%Jprior parameter's size depends on '%D'", exp
, exp
);
6312 return CONST0_RTX (mode
);
6315 /* ... fall through ... */
6318 /* If a static var's type was incomplete when the decl was written,
6319 but the type is complete now, lay out the decl now. */
6320 if (DECL_SIZE (exp
) == 0
6321 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6322 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6323 layout_decl (exp
, 0);
6325 /* ... fall through ... */
6329 if (DECL_RTL (exp
) == 0)
6332 /* Ensure variable marked as used even if it doesn't go through
6333 a parser. If it hasn't be used yet, write out an external
6335 if (! TREE_USED (exp
))
6337 assemble_external (exp
);
6338 TREE_USED (exp
) = 1;
6341 /* Show we haven't gotten RTL for this yet. */
6344 /* Handle variables inherited from containing functions. */
6345 context
= decl_function_context (exp
);
6347 /* We treat inline_function_decl as an alias for the current function
6348 because that is the inline function whose vars, types, etc.
6349 are being merged into the current function.
6350 See expand_inline_function. */
6352 if (context
!= 0 && context
!= current_function_decl
6353 && context
!= inline_function_decl
6354 /* If var is static, we don't need a static chain to access it. */
6355 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6356 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6360 /* Mark as non-local and addressable. */
6361 DECL_NONLOCAL (exp
) = 1;
6362 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6364 (*lang_hooks
.mark_addressable
) (exp
);
6365 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6367 addr
= XEXP (DECL_RTL (exp
), 0);
6368 if (GET_CODE (addr
) == MEM
)
6370 = replace_equiv_address (addr
,
6371 fix_lexical_addr (XEXP (addr
, 0), exp
));
6373 addr
= fix_lexical_addr (addr
, exp
);
6375 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6378 /* This is the case of an array whose size is to be determined
6379 from its initializer, while the initializer is still being parsed.
6382 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6383 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6384 temp
= validize_mem (DECL_RTL (exp
));
6386 /* If DECL_RTL is memory, we are in the normal case and either
6387 the address is not valid or it is not a register and -fforce-addr
6388 is specified, get the address into a register. */
6390 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6391 && modifier
!= EXPAND_CONST_ADDRESS
6392 && modifier
!= EXPAND_SUM
6393 && modifier
!= EXPAND_INITIALIZER
6394 && (! memory_address_p (DECL_MODE (exp
),
6395 XEXP (DECL_RTL (exp
), 0))
6397 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6400 *alt_rtl
= DECL_RTL (exp
);
6401 temp
= replace_equiv_address (DECL_RTL (exp
),
6402 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6405 /* If we got something, return it. But first, set the alignment
6406 if the address is a register. */
6409 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6410 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6415 /* If the mode of DECL_RTL does not match that of the decl, it
6416 must be a promoted value. We return a SUBREG of the wanted mode,
6417 but mark it so that we know that it was already extended. */
6419 if (GET_CODE (DECL_RTL (exp
)) == REG
6420 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6422 /* Get the signedness used for this variable. Ensure we get the
6423 same mode we got when the variable was declared. */
6424 if (GET_MODE (DECL_RTL (exp
))
6425 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6426 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6429 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6430 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6431 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6435 return DECL_RTL (exp
);
6438 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6439 TREE_INT_CST_HIGH (exp
), mode
);
6441 /* ??? If overflow is set, fold will have done an incomplete job,
6442 which can result in (plus xx (const_int 0)), which can get
6443 simplified by validate_replace_rtx during virtual register
6444 instantiation, which can result in unrecognizable insns.
6445 Avoid this by forcing all overflows into registers. */
6446 if (TREE_CONSTANT_OVERFLOW (exp
)
6447 && modifier
!= EXPAND_INITIALIZER
)
6448 temp
= force_reg (mode
, temp
);
6453 return const_vector_from_tree (exp
);
6456 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6459 /* If optimized, generate immediate CONST_DOUBLE
6460 which will be turned into memory by reload if necessary.
6462 We used to force a register so that loop.c could see it. But
6463 this does not allow gen_* patterns to perform optimizations with
6464 the constants. It also produces two insns in cases like "x = 1.0;".
6465 On most machines, floating-point constants are not permitted in
6466 many insns, so we'd end up copying it to a register in any case.
6468 Now, we do the copying in expand_binop, if appropriate. */
6469 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6470 TYPE_MODE (TREE_TYPE (exp
)));
6473 /* Handle evaluating a complex constant in a CONCAT target. */
6474 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6476 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6479 rtarg
= XEXP (original_target
, 0);
6480 itarg
= XEXP (original_target
, 1);
6482 /* Move the real and imaginary parts separately. */
6483 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6484 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6487 emit_move_insn (rtarg
, op0
);
6489 emit_move_insn (itarg
, op1
);
6491 return original_target
;
6494 /* ... fall through ... */
6497 temp
= output_constant_def (exp
, 1);
6499 /* temp contains a constant address.
6500 On RISC machines where a constant address isn't valid,
6501 make some insns to get that address into a register. */
6502 if (modifier
!= EXPAND_CONST_ADDRESS
6503 && modifier
!= EXPAND_INITIALIZER
6504 && modifier
!= EXPAND_SUM
6505 && (! memory_address_p (mode
, XEXP (temp
, 0))
6506 || flag_force_addr
))
6507 return replace_equiv_address (temp
,
6508 copy_rtx (XEXP (temp
, 0)));
6511 case EXPR_WITH_FILE_LOCATION
:
6514 struct file_stack fs
;
6516 fs
.location
= input_location
;
6517 fs
.next
= expr_wfl_stack
;
6518 input_filename
= EXPR_WFL_FILENAME (exp
);
6519 input_line
= EXPR_WFL_LINENO (exp
);
6520 expr_wfl_stack
= &fs
;
6521 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6522 emit_line_note (input_location
);
6523 /* Possibly avoid switching back and forth here. */
6524 to_return
= expand_expr (EXPR_WFL_NODE (exp
),
6525 (ignore
? const0_rtx
: target
),
6527 if (expr_wfl_stack
!= &fs
)
6529 input_location
= fs
.location
;
6530 expr_wfl_stack
= fs
.next
;
6535 context
= decl_function_context (exp
);
6537 /* If this SAVE_EXPR was at global context, assume we are an
6538 initialization function and move it into our context. */
6540 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6542 /* We treat inline_function_decl as an alias for the current function
6543 because that is the inline function whose vars, types, etc.
6544 are being merged into the current function.
6545 See expand_inline_function. */
6546 if (context
== current_function_decl
|| context
== inline_function_decl
)
6549 /* If this is non-local, handle it. */
6552 /* The following call just exists to abort if the context is
6553 not of a containing function. */
6554 find_function_data (context
);
6556 temp
= SAVE_EXPR_RTL (exp
);
6557 if (temp
&& GET_CODE (temp
) == REG
)
6559 put_var_into_stack (exp
, /*rescan=*/true);
6560 temp
= SAVE_EXPR_RTL (exp
);
6562 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6565 replace_equiv_address (temp
,
6566 fix_lexical_addr (XEXP (temp
, 0), exp
));
6568 if (SAVE_EXPR_RTL (exp
) == 0)
6570 if (mode
== VOIDmode
)
6573 temp
= assign_temp (build_qualified_type (type
,
6575 | TYPE_QUAL_CONST
)),
6578 SAVE_EXPR_RTL (exp
) = temp
;
6579 if (!optimize
&& GET_CODE (temp
) == REG
)
6580 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6583 /* If the mode of TEMP does not match that of the expression, it
6584 must be a promoted value. We pass store_expr a SUBREG of the
6585 wanted mode but mark it so that we know that it was already
6588 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6590 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6591 promote_mode (type
, mode
, &unsignedp
, 0);
6592 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6593 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6596 if (temp
== const0_rtx
)
6597 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6599 store_expr (TREE_OPERAND (exp
, 0), temp
,
6600 modifier
== EXPAND_STACK_PARM
? 2 : 0);
6602 TREE_USED (exp
) = 1;
6605 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6606 must be a promoted value. We return a SUBREG of the wanted mode,
6607 but mark it so that we know that it was already extended. */
6609 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6610 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6612 /* Compute the signedness and make the proper SUBREG. */
6613 promote_mode (type
, mode
, &unsignedp
, 0);
6614 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6615 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6616 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6620 return SAVE_EXPR_RTL (exp
);
6625 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6626 TREE_OPERAND (exp
, 0)
6627 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6631 case PLACEHOLDER_EXPR
:
6633 tree old_list
= placeholder_list
;
6634 tree placeholder_expr
= 0;
6636 exp
= find_placeholder (exp
, &placeholder_expr
);
6640 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6641 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6642 placeholder_list
= old_list
;
6646 case WITH_RECORD_EXPR
:
6647 /* Put the object on the placeholder list, expand our first operand,
6648 and pop the list. */
6649 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6651 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6653 placeholder_list
= TREE_CHAIN (placeholder_list
);
6657 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6658 expand_goto (TREE_OPERAND (exp
, 0));
6660 expand_computed_goto (TREE_OPERAND (exp
, 0));
6664 expand_exit_loop_if_false (NULL
,
6665 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6668 case LABELED_BLOCK_EXPR
:
6669 if (LABELED_BLOCK_BODY (exp
))
6670 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6671 /* Should perhaps use expand_label, but this is simpler and safer. */
6672 do_pending_stack_adjust ();
6673 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6676 case EXIT_BLOCK_EXPR
:
6677 if (EXIT_BLOCK_RETURN (exp
))
6678 sorry ("returned value in block_exit_expr");
6679 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6684 expand_start_loop (1);
6685 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6693 tree vars
= TREE_OPERAND (exp
, 0);
6695 /* Need to open a binding contour here because
6696 if there are any cleanups they must be contained here. */
6697 expand_start_bindings (2);
6699 /* Mark the corresponding BLOCK for output in its proper place. */
6700 if (TREE_OPERAND (exp
, 2) != 0
6701 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6702 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6704 /* If VARS have not yet been expanded, expand them now. */
6707 if (!DECL_RTL_SET_P (vars
))
6709 expand_decl_init (vars
);
6710 vars
= TREE_CHAIN (vars
);
6713 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6715 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6721 if (RTL_EXPR_SEQUENCE (exp
))
6723 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6725 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6726 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6728 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6729 free_temps_for_rtl_expr (exp
);
6731 *alt_rtl
= RTL_EXPR_ALT_RTL (exp
);
6732 return RTL_EXPR_RTL (exp
);
6735 /* If we don't need the result, just ensure we evaluate any
6741 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6742 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6747 /* All elts simple constants => refer to a constant in memory. But
6748 if this is a non-BLKmode mode, let it store a field at a time
6749 since that should make a CONST_INT or CONST_DOUBLE when we
6750 fold. Likewise, if we have a target we can use, it is best to
6751 store directly into the target unless the type is large enough
6752 that memcpy will be used. If we are making an initializer and
6753 all operands are constant, put it in memory as well.
6755 FIXME: Avoid trying to fill vector constructors piece-meal.
6756 Output them with output_constant_def below unless we're sure
6757 they're zeros. This should go away when vector initializers
6758 are treated like VECTOR_CST instead of arrays.
6760 else if ((TREE_STATIC (exp
)
6761 && ((mode
== BLKmode
6762 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6763 || TREE_ADDRESSABLE (exp
)
6764 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6765 && (! MOVE_BY_PIECES_P
6766 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6768 && ((TREE_CODE (type
) == VECTOR_TYPE
6769 && !is_zeros_p (exp
))
6770 || ! mostly_zeros_p (exp
)))))
6771 || ((modifier
== EXPAND_INITIALIZER
6772 || modifier
== EXPAND_CONST_ADDRESS
)
6773 && TREE_CONSTANT (exp
)))
6775 rtx constructor
= output_constant_def (exp
, 1);
6777 if (modifier
!= EXPAND_CONST_ADDRESS
6778 && modifier
!= EXPAND_INITIALIZER
6779 && modifier
!= EXPAND_SUM
)
6780 constructor
= validize_mem (constructor
);
6786 /* Handle calls that pass values in multiple non-contiguous
6787 locations. The Irix 6 ABI has examples of this. */
6788 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6789 || GET_CODE (target
) == PARALLEL
6790 || modifier
== EXPAND_STACK_PARM
)
6792 = assign_temp (build_qualified_type (type
,
6794 | (TREE_READONLY (exp
)
6795 * TYPE_QUAL_CONST
))),
6796 0, TREE_ADDRESSABLE (exp
), 1);
6798 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6804 tree exp1
= TREE_OPERAND (exp
, 0);
6806 tree string
= string_constant (exp1
, &index
);
6808 /* Try to optimize reads from const strings. */
6810 && TREE_CODE (string
) == STRING_CST
6811 && TREE_CODE (index
) == INTEGER_CST
6812 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6813 && GET_MODE_CLASS (mode
) == MODE_INT
6814 && GET_MODE_SIZE (mode
) == 1
6815 && modifier
!= EXPAND_WRITE
)
6816 return gen_int_mode (TREE_STRING_POINTER (string
)
6817 [TREE_INT_CST_LOW (index
)], mode
);
6819 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6820 op0
= memory_address (mode
, op0
);
6821 temp
= gen_rtx_MEM (mode
, op0
);
6822 set_mem_attributes (temp
, exp
, 0);
6824 /* If we are writing to this object and its type is a record with
6825 readonly fields, we must mark it as readonly so it will
6826 conflict with readonly references to those fields. */
6827 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6828 RTX_UNCHANGING_P (temp
) = 1;
6834 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6838 tree array
= TREE_OPERAND (exp
, 0);
6839 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6840 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6841 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6844 /* Optimize the special-case of a zero lower bound.
6846 We convert the low_bound to sizetype to avoid some problems
6847 with constant folding. (E.g. suppose the lower bound is 1,
6848 and its mode is QI. Without the conversion, (ARRAY
6849 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6850 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6852 if (! integer_zerop (low_bound
))
6853 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6855 /* Fold an expression like: "foo"[2].
6856 This is not done in fold so it won't happen inside &.
6857 Don't fold if this is for wide characters since it's too
6858 difficult to do correctly and this is a very rare case. */
6860 if (modifier
!= EXPAND_CONST_ADDRESS
6861 && modifier
!= EXPAND_INITIALIZER
6862 && modifier
!= EXPAND_MEMORY
6863 && TREE_CODE (array
) == STRING_CST
6864 && TREE_CODE (index
) == INTEGER_CST
6865 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6866 && GET_MODE_CLASS (mode
) == MODE_INT
6867 && GET_MODE_SIZE (mode
) == 1)
6868 return gen_int_mode (TREE_STRING_POINTER (array
)
6869 [TREE_INT_CST_LOW (index
)], mode
);
6871 /* If this is a constant index into a constant array,
6872 just get the value from the array. Handle both the cases when
6873 we have an explicit constructor and when our operand is a variable
6874 that was declared const. */
6876 if (modifier
!= EXPAND_CONST_ADDRESS
6877 && modifier
!= EXPAND_INITIALIZER
6878 && modifier
!= EXPAND_MEMORY
6879 && TREE_CODE (array
) == CONSTRUCTOR
6880 && ! TREE_SIDE_EFFECTS (array
)
6881 && TREE_CODE (index
) == INTEGER_CST
6882 && 0 > compare_tree_int (index
,
6883 list_length (CONSTRUCTOR_ELTS
6884 (TREE_OPERAND (exp
, 0)))))
6888 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6889 i
= TREE_INT_CST_LOW (index
);
6890 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6894 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6898 else if (optimize
>= 1
6899 && modifier
!= EXPAND_CONST_ADDRESS
6900 && modifier
!= EXPAND_INITIALIZER
6901 && modifier
!= EXPAND_MEMORY
6902 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6903 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6904 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
6905 && targetm
.binds_local_p (array
))
6907 if (TREE_CODE (index
) == INTEGER_CST
)
6909 tree init
= DECL_INITIAL (array
);
6911 if (TREE_CODE (init
) == CONSTRUCTOR
)
6915 for (elem
= CONSTRUCTOR_ELTS (init
);
6917 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6918 elem
= TREE_CHAIN (elem
))
6921 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6922 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6925 else if (TREE_CODE (init
) == STRING_CST
6926 && 0 > compare_tree_int (index
,
6927 TREE_STRING_LENGTH (init
)))
6929 tree type
= TREE_TYPE (TREE_TYPE (init
));
6930 enum machine_mode mode
= TYPE_MODE (type
);
6932 if (GET_MODE_CLASS (mode
) == MODE_INT
6933 && GET_MODE_SIZE (mode
) == 1)
6934 return gen_int_mode (TREE_STRING_POINTER (init
)
6935 [TREE_INT_CST_LOW (index
)], mode
);
6940 goto normal_inner_ref
;
6943 /* If the operand is a CONSTRUCTOR, we can just extract the
6944 appropriate field if it is present. */
6945 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
6949 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6950 elt
= TREE_CHAIN (elt
))
6951 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6952 /* We can normally use the value of the field in the
6953 CONSTRUCTOR. However, if this is a bitfield in
6954 an integral mode that we can fit in a HOST_WIDE_INT,
6955 we must mask only the number of bits in the bitfield,
6956 since this is done implicitly by the constructor. If
6957 the bitfield does not meet either of those conditions,
6958 we can't do this optimization. */
6959 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6960 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6962 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6963 <= HOST_BITS_PER_WIDE_INT
))))
6965 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6966 && modifier
== EXPAND_STACK_PARM
)
6968 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6969 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6971 HOST_WIDE_INT bitsize
6972 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6973 enum machine_mode imode
6974 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6976 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6978 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6979 op0
= expand_and (imode
, op0
, op1
, target
);
6984 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6987 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6989 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6997 goto normal_inner_ref
;
7000 case ARRAY_RANGE_REF
:
7003 enum machine_mode mode1
;
7004 HOST_WIDE_INT bitsize
, bitpos
;
7007 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7008 &mode1
, &unsignedp
, &volatilep
);
7011 /* If we got back the original object, something is wrong. Perhaps
7012 we are evaluating an expression too early. In any event, don't
7013 infinitely recurse. */
7017 /* If TEM's type is a union of variable size, pass TARGET to the inner
7018 computation, since it will need a temporary and TARGET is known
7019 to have to do. This occurs in unchecked conversion in Ada. */
7023 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7024 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7026 && modifier
!= EXPAND_STACK_PARM
7027 ? target
: NULL_RTX
),
7029 (modifier
== EXPAND_INITIALIZER
7030 || modifier
== EXPAND_CONST_ADDRESS
7031 || modifier
== EXPAND_STACK_PARM
)
7032 ? modifier
: EXPAND_NORMAL
);
7034 /* If this is a constant, put it into a register if it is a
7035 legitimate constant and OFFSET is 0 and memory if it isn't. */
7036 if (CONSTANT_P (op0
))
7038 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7039 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7041 op0
= force_reg (mode
, op0
);
7043 op0
= validize_mem (force_const_mem (mode
, op0
));
7046 /* Otherwise, if this object not in memory and we either have an
7047 offset or a BLKmode result, put it there. This case can't occur in
7048 C, but can in Ada if we have unchecked conversion of an expression
7049 from a scalar type to an array or record type or for an
7050 ARRAY_RANGE_REF whose type is BLKmode. */
7051 else if (GET_CODE (op0
) != MEM
7053 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7055 /* If the operand is a SAVE_EXPR, we can deal with this by
7056 forcing the SAVE_EXPR into memory. */
7057 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7059 put_var_into_stack (TREE_OPERAND (exp
, 0),
7061 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7066 = build_qualified_type (TREE_TYPE (tem
),
7067 (TYPE_QUALS (TREE_TYPE (tem
))
7068 | TYPE_QUAL_CONST
));
7069 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7071 emit_move_insn (memloc
, op0
);
7078 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7081 if (GET_CODE (op0
) != MEM
)
7084 #ifdef POINTERS_EXTEND_UNSIGNED
7085 if (GET_MODE (offset_rtx
) != Pmode
)
7086 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7088 if (GET_MODE (offset_rtx
) != ptr_mode
)
7089 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7092 if (GET_MODE (op0
) == BLKmode
7093 /* A constant address in OP0 can have VOIDmode, we must
7094 not try to call force_reg in that case. */
7095 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7097 && (bitpos
% bitsize
) == 0
7098 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7099 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7101 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7105 op0
= offset_address (op0
, offset_rtx
,
7106 highest_pow2_factor (offset
));
7109 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7110 record its alignment as BIGGEST_ALIGNMENT. */
7111 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7112 && is_aligning_offset (offset
, tem
))
7113 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7115 /* Don't forget about volatility even if this is a bitfield. */
7116 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7118 if (op0
== orig_op0
)
7119 op0
= copy_rtx (op0
);
7121 MEM_VOLATILE_P (op0
) = 1;
7124 /* The following code doesn't handle CONCAT.
7125 Assume only bitpos == 0 can be used for CONCAT, due to
7126 one element arrays having the same mode as its element. */
7127 if (GET_CODE (op0
) == CONCAT
)
7129 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7134 /* In cases where an aligned union has an unaligned object
7135 as a field, we might be extracting a BLKmode value from
7136 an integer-mode (e.g., SImode) object. Handle this case
7137 by doing the extract into an object as wide as the field
7138 (which we know to be the width of a basic mode), then
7139 storing into memory, and changing the mode to BLKmode. */
7140 if (mode1
== VOIDmode
7141 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7142 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7143 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7144 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7145 && modifier
!= EXPAND_CONST_ADDRESS
7146 && modifier
!= EXPAND_INITIALIZER
)
7147 /* If the field isn't aligned enough to fetch as a memref,
7148 fetch it as a bit field. */
7149 || (mode1
!= BLKmode
7150 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7151 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7152 || (GET_CODE (op0
) == MEM
7153 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7154 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7155 && ((modifier
== EXPAND_CONST_ADDRESS
7156 || modifier
== EXPAND_INITIALIZER
)
7158 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7159 || (bitpos
% BITS_PER_UNIT
!= 0)))
7160 /* If the type and the field are a constant size and the
7161 size of the type isn't the same size as the bitfield,
7162 we must use bitfield operations. */
7164 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7166 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7169 enum machine_mode ext_mode
= mode
;
7171 if (ext_mode
== BLKmode
7172 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7173 && GET_CODE (target
) == MEM
7174 && bitpos
% BITS_PER_UNIT
== 0))
7175 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7177 if (ext_mode
== BLKmode
)
7180 target
= assign_temp (type
, 0, 1, 1);
7185 /* In this case, BITPOS must start at a byte boundary and
7186 TARGET, if specified, must be a MEM. */
7187 if (GET_CODE (op0
) != MEM
7188 || (target
!= 0 && GET_CODE (target
) != MEM
)
7189 || bitpos
% BITS_PER_UNIT
!= 0)
7192 emit_block_move (target
,
7193 adjust_address (op0
, VOIDmode
,
7194 bitpos
/ BITS_PER_UNIT
),
7195 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7197 (modifier
== EXPAND_STACK_PARM
7198 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7203 op0
= validize_mem (op0
);
7205 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7206 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7208 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7209 (modifier
== EXPAND_STACK_PARM
7210 ? NULL_RTX
: target
),
7212 int_size_in_bytes (TREE_TYPE (tem
)));
7214 /* If the result is a record type and BITSIZE is narrower than
7215 the mode of OP0, an integral mode, and this is a big endian
7216 machine, we must put the field into the high-order bits. */
7217 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7218 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7219 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7220 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7221 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7225 if (mode
== BLKmode
)
7227 rtx
new = assign_temp (build_qualified_type
7228 ((*lang_hooks
.types
.type_for_mode
)
7230 TYPE_QUAL_CONST
), 0, 1, 1);
7232 emit_move_insn (new, op0
);
7233 op0
= copy_rtx (new);
7234 PUT_MODE (op0
, BLKmode
);
7235 set_mem_attributes (op0
, exp
, 1);
7241 /* If the result is BLKmode, use that to access the object
7243 if (mode
== BLKmode
)
7246 /* Get a reference to just this component. */
7247 if (modifier
== EXPAND_CONST_ADDRESS
7248 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7249 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7251 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7253 if (op0
== orig_op0
)
7254 op0
= copy_rtx (op0
);
7256 set_mem_attributes (op0
, exp
, 0);
7257 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7258 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7260 MEM_VOLATILE_P (op0
) |= volatilep
;
7261 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7262 || modifier
== EXPAND_CONST_ADDRESS
7263 || modifier
== EXPAND_INITIALIZER
)
7265 else if (target
== 0)
7266 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7268 convert_move (target
, op0
, unsignedp
);
7274 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7276 /* Evaluate the interior expression. */
7277 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7280 /* Get or create an instruction off which to hang a note. */
7281 if (REG_P (subtarget
))
7284 insn
= get_last_insn ();
7287 if (! INSN_P (insn
))
7288 insn
= prev_nonnote_insn (insn
);
7292 target
= gen_reg_rtx (GET_MODE (subtarget
));
7293 insn
= emit_move_insn (target
, subtarget
);
7296 /* Collect the data for the note. */
7297 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7298 vtbl_ref
= plus_constant (vtbl_ref
,
7299 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7300 /* Discard the initial CONST that was added. */
7301 vtbl_ref
= XEXP (vtbl_ref
, 0);
7304 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7309 /* Intended for a reference to a buffer of a file-object in Pascal.
7310 But it's not certain that a special tree code will really be
7311 necessary for these. INDIRECT_REF might work for them. */
7317 /* Pascal set IN expression.
7320 rlo = set_low - (set_low%bits_per_word);
7321 the_word = set [ (index - rlo)/bits_per_word ];
7322 bit_index = index % bits_per_word;
7323 bitmask = 1 << bit_index;
7324 return !!(the_word & bitmask); */
7326 tree set
= TREE_OPERAND (exp
, 0);
7327 tree index
= TREE_OPERAND (exp
, 1);
7328 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7329 tree set_type
= TREE_TYPE (set
);
7330 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7331 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7332 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7333 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7334 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7335 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7336 rtx setaddr
= XEXP (setval
, 0);
7337 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7339 rtx diff
, quo
, rem
, addr
, bit
, result
;
7341 /* If domain is empty, answer is no. Likewise if index is constant
7342 and out of bounds. */
7343 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7344 && TREE_CODE (set_low_bound
) == INTEGER_CST
7345 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7346 || (TREE_CODE (index
) == INTEGER_CST
7347 && TREE_CODE (set_low_bound
) == INTEGER_CST
7348 && tree_int_cst_lt (index
, set_low_bound
))
7349 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7350 && TREE_CODE (index
) == INTEGER_CST
7351 && tree_int_cst_lt (set_high_bound
, index
))))
7355 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7357 /* If we get here, we have to generate the code for both cases
7358 (in range and out of range). */
7360 op0
= gen_label_rtx ();
7361 op1
= gen_label_rtx ();
7363 if (! (GET_CODE (index_val
) == CONST_INT
7364 && GET_CODE (lo_r
) == CONST_INT
))
7365 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7366 GET_MODE (index_val
), iunsignedp
, op1
);
7368 if (! (GET_CODE (index_val
) == CONST_INT
7369 && GET_CODE (hi_r
) == CONST_INT
))
7370 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7371 GET_MODE (index_val
), iunsignedp
, op1
);
7373 /* Calculate the element number of bit zero in the first word
7375 if (GET_CODE (lo_r
) == CONST_INT
)
7376 rlow
= GEN_INT (INTVAL (lo_r
)
7377 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7379 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7380 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7381 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7383 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7384 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7386 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7387 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7388 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7389 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7391 addr
= memory_address (byte_mode
,
7392 expand_binop (index_mode
, add_optab
, diff
,
7393 setaddr
, NULL_RTX
, iunsignedp
,
7396 /* Extract the bit we want to examine. */
7397 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7398 gen_rtx_MEM (byte_mode
, addr
),
7399 make_tree (TREE_TYPE (index
), rem
),
7401 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7402 GET_MODE (target
) == byte_mode
? target
: 0,
7403 1, OPTAB_LIB_WIDEN
);
7405 if (result
!= target
)
7406 convert_move (target
, result
, 1);
7408 /* Output the code to handle the out-of-range case. */
7411 emit_move_insn (target
, const0_rtx
);
7416 case WITH_CLEANUP_EXPR
:
7417 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7419 WITH_CLEANUP_EXPR_RTL (exp
)
7420 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7421 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7422 CLEANUP_EH_ONLY (exp
));
7424 /* That's it for this cleanup. */
7425 TREE_OPERAND (exp
, 1) = 0;
7427 return WITH_CLEANUP_EXPR_RTL (exp
);
7429 case CLEANUP_POINT_EXPR
:
7431 /* Start a new binding layer that will keep track of all cleanup
7432 actions to be performed. */
7433 expand_start_bindings (2);
7435 target_temp_slot_level
= temp_slot_level
;
7437 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7438 /* If we're going to use this value, load it up now. */
7440 op0
= force_not_mem (op0
);
7441 preserve_temp_slots (op0
);
7442 expand_end_bindings (NULL_TREE
, 0, 0);
7447 /* Check for a built-in function. */
7448 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7449 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7451 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7453 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7454 == BUILT_IN_FRONTEND
)
7455 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7459 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7462 return expand_call (exp
, target
, ignore
);
7464 case NON_LVALUE_EXPR
:
7467 case REFERENCE_EXPR
:
7468 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7471 if (TREE_CODE (type
) == UNION_TYPE
)
7473 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7475 /* If both input and output are BLKmode, this conversion isn't doing
7476 anything except possibly changing memory attribute. */
7477 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7479 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7482 result
= copy_rtx (result
);
7483 set_mem_attributes (result
, exp
, 0);
7489 if (TYPE_MODE (type
) != BLKmode
)
7490 target
= gen_reg_rtx (TYPE_MODE (type
));
7492 target
= assign_temp (type
, 0, 1, 1);
7495 if (GET_CODE (target
) == MEM
)
7496 /* Store data into beginning of memory target. */
7497 store_expr (TREE_OPERAND (exp
, 0),
7498 adjust_address (target
, TYPE_MODE (valtype
), 0),
7499 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7501 else if (GET_CODE (target
) == REG
)
7502 /* Store this field into a union of the proper type. */
7503 store_field (target
,
7504 MIN ((int_size_in_bytes (TREE_TYPE
7505 (TREE_OPERAND (exp
, 0)))
7507 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7508 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7509 VOIDmode
, 0, type
, 0);
7513 /* Return the entire union. */
7517 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7519 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7522 /* If the signedness of the conversion differs and OP0 is
7523 a promoted SUBREG, clear that indication since we now
7524 have to do the proper extension. */
7525 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7526 && GET_CODE (op0
) == SUBREG
)
7527 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7532 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7533 if (GET_MODE (op0
) == mode
)
7536 /* If OP0 is a constant, just convert it into the proper mode. */
7537 if (CONSTANT_P (op0
))
7539 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7540 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7542 if (modifier
== EXPAND_INITIALIZER
)
7543 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7544 subreg_lowpart_offset (mode
,
7547 return convert_modes (mode
, inner_mode
, op0
,
7548 TREE_UNSIGNED (inner_type
));
7551 if (modifier
== EXPAND_INITIALIZER
)
7552 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7556 convert_to_mode (mode
, op0
,
7557 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7559 convert_move (target
, op0
,
7560 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7563 case VIEW_CONVERT_EXPR
:
7564 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7566 /* If the input and output modes are both the same, we are done.
7567 Otherwise, if neither mode is BLKmode and both are integral and within
7568 a word, we can use gen_lowpart. If neither is true, make sure the
7569 operand is in memory and convert the MEM to the new mode. */
7570 if (TYPE_MODE (type
) == GET_MODE (op0
))
7572 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7573 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7574 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7575 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7576 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7577 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7578 else if (GET_CODE (op0
) != MEM
)
7580 /* If the operand is not a MEM, force it into memory. Since we
7581 are going to be be changing the mode of the MEM, don't call
7582 force_const_mem for constants because we don't allow pool
7583 constants to change mode. */
7584 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7586 if (TREE_ADDRESSABLE (exp
))
7589 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7591 = assign_stack_temp_for_type
7592 (TYPE_MODE (inner_type
),
7593 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7595 emit_move_insn (target
, op0
);
7599 /* At this point, OP0 is in the correct mode. If the output type is such
7600 that the operand is known to be aligned, indicate that it is.
7601 Otherwise, we need only be concerned about alignment for non-BLKmode
7603 if (GET_CODE (op0
) == MEM
)
7605 op0
= copy_rtx (op0
);
7607 if (TYPE_ALIGN_OK (type
))
7608 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7609 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7610 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7612 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7613 HOST_WIDE_INT temp_size
7614 = MAX (int_size_in_bytes (inner_type
),
7615 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7616 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7617 temp_size
, 0, type
);
7618 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7620 if (TREE_ADDRESSABLE (exp
))
7623 if (GET_MODE (op0
) == BLKmode
)
7624 emit_block_move (new_with_op0_mode
, op0
,
7625 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7626 (modifier
== EXPAND_STACK_PARM
7627 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7629 emit_move_insn (new_with_op0_mode
, op0
);
7634 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7640 this_optab
= ! unsignedp
&& flag_trapv
7641 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7642 ? addv_optab
: add_optab
;
7644 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7645 something else, make sure we add the register to the constant and
7646 then to the other thing. This case can occur during strength
7647 reduction and doing it this way will produce better code if the
7648 frame pointer or argument pointer is eliminated.
7650 fold-const.c will ensure that the constant is always in the inner
7651 PLUS_EXPR, so the only case we need to do anything about is if
7652 sp, ap, or fp is our second argument, in which case we must swap
7653 the innermost first argument and our second argument. */
7655 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7656 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7657 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7658 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7659 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7660 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7662 tree t
= TREE_OPERAND (exp
, 1);
7664 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7665 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7668 /* If the result is to be ptr_mode and we are adding an integer to
7669 something, we might be forming a constant. So try to use
7670 plus_constant. If it produces a sum and we can't accept it,
7671 use force_operand. This allows P = &ARR[const] to generate
7672 efficient code on machines where a SYMBOL_REF is not a valid
7675 If this is an EXPAND_SUM call, always return the sum. */
7676 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7677 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7679 if (modifier
== EXPAND_STACK_PARM
)
7681 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7682 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7683 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7687 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7689 /* Use immed_double_const to ensure that the constant is
7690 truncated according to the mode of OP1, then sign extended
7691 to a HOST_WIDE_INT. Using the constant directly can result
7692 in non-canonical RTL in a 64x32 cross compile. */
7694 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7696 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7697 op1
= plus_constant (op1
, INTVAL (constant_part
));
7698 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7699 op1
= force_operand (op1
, target
);
7703 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7704 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7705 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7709 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7710 (modifier
== EXPAND_INITIALIZER
7711 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7712 if (! CONSTANT_P (op0
))
7714 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7715 VOIDmode
, modifier
);
7716 /* Return a PLUS if modifier says it's OK. */
7717 if (modifier
== EXPAND_SUM
7718 || modifier
== EXPAND_INITIALIZER
)
7719 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7722 /* Use immed_double_const to ensure that the constant is
7723 truncated according to the mode of OP1, then sign extended
7724 to a HOST_WIDE_INT. Using the constant directly can result
7725 in non-canonical RTL in a 64x32 cross compile. */
7727 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7729 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7730 op0
= plus_constant (op0
, INTVAL (constant_part
));
7731 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7732 op0
= force_operand (op0
, target
);
7737 /* No sense saving up arithmetic to be done
7738 if it's all in the wrong mode to form part of an address.
7739 And force_operand won't know whether to sign-extend or
7741 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7742 || mode
!= ptr_mode
)
7744 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7745 subtarget
, &op0
, &op1
, 0);
7746 if (op0
== const0_rtx
)
7748 if (op1
== const0_rtx
)
7753 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7754 subtarget
, &op0
, &op1
, modifier
);
7755 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7758 /* For initializers, we are allowed to return a MINUS of two
7759 symbolic constants. Here we handle all cases when both operands
7761 /* Handle difference of two symbolic constants,
7762 for the sake of an initializer. */
7763 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7764 && really_constant_p (TREE_OPERAND (exp
, 0))
7765 && really_constant_p (TREE_OPERAND (exp
, 1)))
7767 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7768 NULL_RTX
, &op0
, &op1
, modifier
);
7770 /* If the last operand is a CONST_INT, use plus_constant of
7771 the negated constant. Else make the MINUS. */
7772 if (GET_CODE (op1
) == CONST_INT
)
7773 return plus_constant (op0
, - INTVAL (op1
));
7775 return gen_rtx_MINUS (mode
, op0
, op1
);
7778 this_optab
= ! unsignedp
&& flag_trapv
7779 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7780 ? subv_optab
: sub_optab
;
7782 /* No sense saving up arithmetic to be done
7783 if it's all in the wrong mode to form part of an address.
7784 And force_operand won't know whether to sign-extend or
7786 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7787 || mode
!= ptr_mode
)
7790 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7791 subtarget
, &op0
, &op1
, modifier
);
7793 /* Convert A - const to A + (-const). */
7794 if (GET_CODE (op1
) == CONST_INT
)
7796 op1
= negate_rtx (mode
, op1
);
7797 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7803 /* If first operand is constant, swap them.
7804 Thus the following special case checks need only
7805 check the second operand. */
7806 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7808 tree t1
= TREE_OPERAND (exp
, 0);
7809 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7810 TREE_OPERAND (exp
, 1) = t1
;
7813 /* Attempt to return something suitable for generating an
7814 indexed address, for machines that support that. */
7816 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7817 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7819 tree exp1
= TREE_OPERAND (exp
, 1);
7821 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7824 if (GET_CODE (op0
) != REG
)
7825 op0
= force_operand (op0
, NULL_RTX
);
7826 if (GET_CODE (op0
) != REG
)
7827 op0
= copy_to_mode_reg (mode
, op0
);
7829 return gen_rtx_MULT (mode
, op0
,
7830 gen_int_mode (tree_low_cst (exp1
, 0),
7831 TYPE_MODE (TREE_TYPE (exp1
))));
7834 if (modifier
== EXPAND_STACK_PARM
)
7837 /* Check for multiplying things that have been extended
7838 from a narrower type. If this machine supports multiplying
7839 in that narrower type with a result in the desired type,
7840 do it that way, and avoid the explicit type-conversion. */
7841 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7842 && TREE_CODE (type
) == INTEGER_TYPE
7843 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7844 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7845 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7846 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7847 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7848 /* Don't use a widening multiply if a shift will do. */
7849 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7850 > HOST_BITS_PER_WIDE_INT
)
7851 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7853 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7854 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7856 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7857 /* If both operands are extended, they must either both
7858 be zero-extended or both be sign-extended. */
7859 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7861 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7863 enum machine_mode innermode
7864 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7865 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7866 ? smul_widen_optab
: umul_widen_optab
);
7867 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7868 ? umul_widen_optab
: smul_widen_optab
);
7869 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7871 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7873 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7874 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7875 TREE_OPERAND (exp
, 1),
7876 NULL_RTX
, &op0
, &op1
, 0);
7878 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7879 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7880 NULL_RTX
, &op0
, &op1
, 0);
7883 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7884 && innermode
== word_mode
)
7887 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7888 NULL_RTX
, VOIDmode
, 0);
7889 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7890 op1
= convert_modes (innermode
, mode
,
7891 expand_expr (TREE_OPERAND (exp
, 1),
7892 NULL_RTX
, VOIDmode
, 0),
7895 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7896 NULL_RTX
, VOIDmode
, 0);
7897 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7898 unsignedp
, OPTAB_LIB_WIDEN
);
7899 htem
= expand_mult_highpart_adjust (innermode
,
7900 gen_highpart (innermode
, temp
),
7902 gen_highpart (innermode
, temp
),
7904 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7909 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7910 subtarget
, &op0
, &op1
, 0);
7911 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7913 case TRUNC_DIV_EXPR
:
7914 case FLOOR_DIV_EXPR
:
7916 case ROUND_DIV_EXPR
:
7917 case EXACT_DIV_EXPR
:
7918 if (modifier
== EXPAND_STACK_PARM
)
7920 /* Possible optimization: compute the dividend with EXPAND_SUM
7921 then if the divisor is constant can optimize the case
7922 where some terms of the dividend have coeffs divisible by it. */
7923 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7924 subtarget
, &op0
, &op1
, 0);
7925 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7928 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7929 expensive divide. If not, combine will rebuild the original
7931 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7932 && TREE_CODE (type
) == REAL_TYPE
7933 && !real_onep (TREE_OPERAND (exp
, 0)))
7934 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7935 build (RDIV_EXPR
, type
,
7936 build_real (type
, dconst1
),
7937 TREE_OPERAND (exp
, 1))),
7938 target
, tmode
, modifier
);
7939 this_optab
= sdiv_optab
;
7942 case TRUNC_MOD_EXPR
:
7943 case FLOOR_MOD_EXPR
:
7945 case ROUND_MOD_EXPR
:
7946 if (modifier
== EXPAND_STACK_PARM
)
7948 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7949 subtarget
, &op0
, &op1
, 0);
7950 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7952 case FIX_ROUND_EXPR
:
7953 case FIX_FLOOR_EXPR
:
7955 abort (); /* Not used for C. */
7957 case FIX_TRUNC_EXPR
:
7958 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7959 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7960 target
= gen_reg_rtx (mode
);
7961 expand_fix (target
, op0
, unsignedp
);
7965 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7966 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7967 target
= gen_reg_rtx (mode
);
7968 /* expand_float can't figure out what to do if FROM has VOIDmode.
7969 So give it the correct mode. With -O, cse will optimize this. */
7970 if (GET_MODE (op0
) == VOIDmode
)
7971 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7973 expand_float (target
, op0
,
7974 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7978 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7979 if (modifier
== EXPAND_STACK_PARM
)
7981 temp
= expand_unop (mode
,
7982 ! unsignedp
&& flag_trapv
7983 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7984 ? negv_optab
: neg_optab
, op0
, target
, 0);
7990 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7991 if (modifier
== EXPAND_STACK_PARM
)
7994 /* ABS_EXPR is not valid for complex arguments. */
7995 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7996 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7999 /* Unsigned abs is simply the operand. Testing here means we don't
8000 risk generating incorrect code below. */
8001 if (TREE_UNSIGNED (type
))
8004 return expand_abs (mode
, op0
, target
, unsignedp
,
8005 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8009 target
= original_target
;
8011 || modifier
== EXPAND_STACK_PARM
8012 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8013 || GET_MODE (target
) != mode
8014 || (GET_CODE (target
) == REG
8015 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8016 target
= gen_reg_rtx (mode
);
8017 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8018 target
, &op0
, &op1
, 0);
8020 /* First try to do it with a special MIN or MAX instruction.
8021 If that does not win, use a conditional jump to select the proper
8023 this_optab
= (TREE_UNSIGNED (type
)
8024 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8025 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8027 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8032 /* At this point, a MEM target is no longer useful; we will get better
8035 if (GET_CODE (target
) == MEM
)
8036 target
= gen_reg_rtx (mode
);
8038 /* If op1 was placed in target, swap op0 and op1. */
8039 if (target
!= op0
&& target
== op1
)
8047 emit_move_insn (target
, op0
);
8049 op0
= gen_label_rtx ();
8051 /* If this mode is an integer too wide to compare properly,
8052 compare word by word. Rely on cse to optimize constant cases. */
8053 if (GET_MODE_CLASS (mode
) == MODE_INT
8054 && ! can_compare_p (GE
, mode
, ccp_jump
))
8056 if (code
== MAX_EXPR
)
8057 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8058 target
, op1
, NULL_RTX
, op0
);
8060 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8061 op1
, target
, NULL_RTX
, op0
);
8065 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8066 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8067 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8070 emit_move_insn (target
, op1
);
8075 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8076 if (modifier
== EXPAND_STACK_PARM
)
8078 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8083 /* ??? Can optimize bitwise operations with one arg constant.
8084 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8085 and (a bitwise1 b) bitwise2 b (etc)
8086 but that is probably not worth while. */
8088 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8089 boolean values when we want in all cases to compute both of them. In
8090 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8091 as actual zero-or-1 values and then bitwise anding. In cases where
8092 there cannot be any side effects, better code would be made by
8093 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8094 how to recognize those cases. */
8096 case TRUTH_AND_EXPR
:
8098 this_optab
= and_optab
;
8103 this_optab
= ior_optab
;
8106 case TRUTH_XOR_EXPR
:
8108 this_optab
= xor_optab
;
8115 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8117 if (modifier
== EXPAND_STACK_PARM
)
8119 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8120 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8123 /* Could determine the answer when only additive constants differ. Also,
8124 the addition of one can be handled by changing the condition. */
8131 case UNORDERED_EXPR
:
8138 temp
= do_store_flag (exp
,
8139 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8140 tmode
!= VOIDmode
? tmode
: mode
, 0);
8144 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8145 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8147 && GET_CODE (original_target
) == REG
8148 && (GET_MODE (original_target
)
8149 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8151 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8154 /* If temp is constant, we can just compute the result. */
8155 if (GET_CODE (temp
) == CONST_INT
)
8157 if (INTVAL (temp
) != 0)
8158 emit_move_insn (target
, const1_rtx
);
8160 emit_move_insn (target
, const0_rtx
);
8165 if (temp
!= original_target
)
8167 enum machine_mode mode1
= GET_MODE (temp
);
8168 if (mode1
== VOIDmode
)
8169 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8171 temp
= copy_to_mode_reg (mode1
, temp
);
8174 op1
= gen_label_rtx ();
8175 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8176 GET_MODE (temp
), unsignedp
, op1
);
8177 emit_move_insn (temp
, const1_rtx
);
8182 /* If no set-flag instruction, must generate a conditional
8183 store into a temporary variable. Drop through
8184 and handle this like && and ||. */
8186 case TRUTH_ANDIF_EXPR
:
8187 case TRUTH_ORIF_EXPR
:
8190 || modifier
== EXPAND_STACK_PARM
8191 || ! safe_from_p (target
, exp
, 1)
8192 /* Make sure we don't have a hard reg (such as function's return
8193 value) live across basic blocks, if not optimizing. */
8194 || (!optimize
&& GET_CODE (target
) == REG
8195 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8196 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8199 emit_clr_insn (target
);
8201 op1
= gen_label_rtx ();
8202 jumpifnot (exp
, op1
);
8205 emit_0_to_1_insn (target
);
8208 return ignore
? const0_rtx
: target
;
8210 case TRUTH_NOT_EXPR
:
8211 if (modifier
== EXPAND_STACK_PARM
)
8213 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8214 /* The parser is careful to generate TRUTH_NOT_EXPR
8215 only with operands that are always zero or one. */
8216 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8217 target
, 1, OPTAB_LIB_WIDEN
);
8223 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8225 return expand_expr_real (TREE_OPERAND (exp
, 1),
8226 (ignore
? const0_rtx
: target
),
8227 VOIDmode
, modifier
, alt_rtl
);
8230 /* If we would have a "singleton" (see below) were it not for a
8231 conversion in each arm, bring that conversion back out. */
8232 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8233 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8234 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8235 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8237 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8238 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8240 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8241 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8242 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8243 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8244 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8245 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8246 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8247 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8248 return expand_expr (build1 (NOP_EXPR
, type
,
8249 build (COND_EXPR
, TREE_TYPE (iftrue
),
8250 TREE_OPERAND (exp
, 0),
8252 target
, tmode
, modifier
);
8256 /* Note that COND_EXPRs whose type is a structure or union
8257 are required to be constructed to contain assignments of
8258 a temporary variable, so that we can evaluate them here
8259 for side effect only. If type is void, we must do likewise. */
8261 /* If an arm of the branch requires a cleanup,
8262 only that cleanup is performed. */
8265 tree binary_op
= 0, unary_op
= 0;
8267 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8268 convert it to our mode, if necessary. */
8269 if (integer_onep (TREE_OPERAND (exp
, 1))
8270 && integer_zerop (TREE_OPERAND (exp
, 2))
8271 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8275 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8280 if (modifier
== EXPAND_STACK_PARM
)
8282 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8283 if (GET_MODE (op0
) == mode
)
8287 target
= gen_reg_rtx (mode
);
8288 convert_move (target
, op0
, unsignedp
);
8292 /* Check for X ? A + B : A. If we have this, we can copy A to the
8293 output and conditionally add B. Similarly for unary operations.
8294 Don't do this if X has side-effects because those side effects
8295 might affect A or B and the "?" operation is a sequence point in
8296 ANSI. (operand_equal_p tests for side effects.) */
8298 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8299 && operand_equal_p (TREE_OPERAND (exp
, 2),
8300 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8301 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8302 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8303 && operand_equal_p (TREE_OPERAND (exp
, 1),
8304 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8305 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8306 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8307 && operand_equal_p (TREE_OPERAND (exp
, 2),
8308 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8309 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8310 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8311 && operand_equal_p (TREE_OPERAND (exp
, 1),
8312 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8313 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8315 /* If we are not to produce a result, we have no target. Otherwise,
8316 if a target was specified use it; it will not be used as an
8317 intermediate target unless it is safe. If no target, use a
8322 else if (modifier
== EXPAND_STACK_PARM
)
8323 temp
= assign_temp (type
, 0, 0, 1);
8324 else if (original_target
8325 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8326 || (singleton
&& GET_CODE (original_target
) == REG
8327 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8328 && original_target
== var_rtx (singleton
)))
8329 && GET_MODE (original_target
) == mode
8330 #ifdef HAVE_conditional_move
8331 && (! can_conditionally_move_p (mode
)
8332 || GET_CODE (original_target
) == REG
8333 || TREE_ADDRESSABLE (type
))
8335 && (GET_CODE (original_target
) != MEM
8336 || TREE_ADDRESSABLE (type
)))
8337 temp
= original_target
;
8338 else if (TREE_ADDRESSABLE (type
))
8341 temp
= assign_temp (type
, 0, 0, 1);
8343 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8344 do the test of X as a store-flag operation, do this as
8345 A + ((X != 0) << log C). Similarly for other simple binary
8346 operators. Only do for C == 1 if BRANCH_COST is low. */
8347 if (temp
&& singleton
&& binary_op
8348 && (TREE_CODE (binary_op
) == PLUS_EXPR
8349 || TREE_CODE (binary_op
) == MINUS_EXPR
8350 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8351 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8352 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8353 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8354 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8358 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8359 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8360 ? addv_optab
: add_optab
)
8361 : TREE_CODE (binary_op
) == MINUS_EXPR
8362 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8363 ? subv_optab
: sub_optab
)
8364 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8367 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8368 if (singleton
== TREE_OPERAND (exp
, 1))
8369 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8371 cond
= TREE_OPERAND (exp
, 0);
8373 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8375 mode
, BRANCH_COST
<= 1);
8377 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8378 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8379 build_int_2 (tree_log2
8383 (safe_from_p (temp
, singleton
, 1)
8384 ? temp
: NULL_RTX
), 0);
8388 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8389 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8390 unsignedp
, OPTAB_LIB_WIDEN
);
8394 do_pending_stack_adjust ();
8396 op0
= gen_label_rtx ();
8398 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8402 /* If the target conflicts with the other operand of the
8403 binary op, we can't use it. Also, we can't use the target
8404 if it is a hard register, because evaluating the condition
8405 might clobber it. */
8407 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8408 || (GET_CODE (temp
) == REG
8409 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8410 temp
= gen_reg_rtx (mode
);
8411 store_expr (singleton
, temp
,
8412 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8415 expand_expr (singleton
,
8416 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8417 if (singleton
== TREE_OPERAND (exp
, 1))
8418 jumpif (TREE_OPERAND (exp
, 0), op0
);
8420 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8422 start_cleanup_deferral ();
8423 if (binary_op
&& temp
== 0)
8424 /* Just touch the other operand. */
8425 expand_expr (TREE_OPERAND (binary_op
, 1),
8426 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8428 store_expr (build (TREE_CODE (binary_op
), type
,
8429 make_tree (type
, temp
),
8430 TREE_OPERAND (binary_op
, 1)),
8431 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8433 store_expr (build1 (TREE_CODE (unary_op
), type
,
8434 make_tree (type
, temp
)),
8435 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8438 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8439 comparison operator. If we have one of these cases, set the
8440 output to A, branch on A (cse will merge these two references),
8441 then set the output to FOO. */
8443 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8444 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8445 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8446 TREE_OPERAND (exp
, 1), 0)
8447 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8448 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8449 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8451 if (GET_CODE (temp
) == REG
8452 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8453 temp
= gen_reg_rtx (mode
);
8454 store_expr (TREE_OPERAND (exp
, 1), temp
,
8455 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8456 jumpif (TREE_OPERAND (exp
, 0), op0
);
8458 start_cleanup_deferral ();
8459 if (TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8460 store_expr (TREE_OPERAND (exp
, 2), temp
,
8461 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8463 expand_expr (TREE_OPERAND (exp
, 2),
8464 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8468 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8469 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8470 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8471 TREE_OPERAND (exp
, 2), 0)
8472 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8473 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8474 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8476 if (GET_CODE (temp
) == REG
8477 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8478 temp
= gen_reg_rtx (mode
);
8479 store_expr (TREE_OPERAND (exp
, 2), temp
,
8480 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8481 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8483 start_cleanup_deferral ();
8484 if (TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8485 store_expr (TREE_OPERAND (exp
, 1), temp
,
8486 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8488 expand_expr (TREE_OPERAND (exp
, 1),
8489 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8494 op1
= gen_label_rtx ();
8495 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8497 start_cleanup_deferral ();
8499 /* One branch of the cond can be void, if it never returns. For
8500 example A ? throw : E */
8502 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8503 store_expr (TREE_OPERAND (exp
, 1), temp
,
8504 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8506 expand_expr (TREE_OPERAND (exp
, 1),
8507 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8508 end_cleanup_deferral ();
8510 emit_jump_insn (gen_jump (op1
));
8513 start_cleanup_deferral ();
8515 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8516 store_expr (TREE_OPERAND (exp
, 2), temp
,
8517 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8519 expand_expr (TREE_OPERAND (exp
, 2),
8520 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8523 end_cleanup_deferral ();
8534 /* Something needs to be initialized, but we didn't know
8535 where that thing was when building the tree. For example,
8536 it could be the return value of a function, or a parameter
8537 to a function which lays down in the stack, or a temporary
8538 variable which must be passed by reference.
8540 We guarantee that the expression will either be constructed
8541 or copied into our original target. */
8543 tree slot
= TREE_OPERAND (exp
, 0);
8544 tree cleanups
= NULL_TREE
;
8547 if (TREE_CODE (slot
) != VAR_DECL
)
8551 target
= original_target
;
8553 /* Set this here so that if we get a target that refers to a
8554 register variable that's already been used, put_reg_into_stack
8555 knows that it should fix up those uses. */
8556 TREE_USED (slot
) = 1;
8560 if (DECL_RTL_SET_P (slot
))
8562 target
= DECL_RTL (slot
);
8563 /* If we have already expanded the slot, so don't do
8565 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8570 target
= assign_temp (type
, 2, 0, 1);
8571 /* All temp slots at this level must not conflict. */
8572 preserve_temp_slots (target
);
8573 SET_DECL_RTL (slot
, target
);
8574 if (TREE_ADDRESSABLE (slot
))
8575 put_var_into_stack (slot
, /*rescan=*/false);
8577 /* Since SLOT is not known to the called function
8578 to belong to its stack frame, we must build an explicit
8579 cleanup. This case occurs when we must build up a reference
8580 to pass the reference as an argument. In this case,
8581 it is very likely that such a reference need not be
8584 if (TREE_OPERAND (exp
, 2) == 0)
8585 TREE_OPERAND (exp
, 2)
8586 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8587 cleanups
= TREE_OPERAND (exp
, 2);
8592 /* This case does occur, when expanding a parameter which
8593 needs to be constructed on the stack. The target
8594 is the actual stack address that we want to initialize.
8595 The function we call will perform the cleanup in this case. */
8597 /* If we have already assigned it space, use that space,
8598 not target that we were passed in, as our target
8599 parameter is only a hint. */
8600 if (DECL_RTL_SET_P (slot
))
8602 target
= DECL_RTL (slot
);
8603 /* If we have already expanded the slot, so don't do
8605 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8610 SET_DECL_RTL (slot
, target
);
8611 /* If we must have an addressable slot, then make sure that
8612 the RTL that we just stored in slot is OK. */
8613 if (TREE_ADDRESSABLE (slot
))
8614 put_var_into_stack (slot
, /*rescan=*/true);
8618 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8619 /* Mark it as expanded. */
8620 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8622 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8624 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8631 tree lhs
= TREE_OPERAND (exp
, 0);
8632 tree rhs
= TREE_OPERAND (exp
, 1);
8634 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8640 /* If lhs is complex, expand calls in rhs before computing it.
8641 That's so we don't compute a pointer and save it over a
8642 call. If lhs is simple, compute it first so we can give it
8643 as a target if the rhs is just a call. This avoids an
8644 extra temp and copy and that prevents a partial-subsumption
8645 which makes bad code. Actually we could treat
8646 component_ref's of vars like vars. */
8648 tree lhs
= TREE_OPERAND (exp
, 0);
8649 tree rhs
= TREE_OPERAND (exp
, 1);
8653 /* Check for |= or &= of a bitfield of size one into another bitfield
8654 of size 1. In this case, (unless we need the result of the
8655 assignment) we can do this more efficiently with a
8656 test followed by an assignment, if necessary.
8658 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8659 things change so we do, this code should be enhanced to
8662 && TREE_CODE (lhs
) == COMPONENT_REF
8663 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8664 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8665 && TREE_OPERAND (rhs
, 0) == lhs
8666 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8667 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8668 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8670 rtx label
= gen_label_rtx ();
8672 do_jump (TREE_OPERAND (rhs
, 1),
8673 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8674 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8675 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8676 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8678 : integer_zero_node
)),
8680 do_pending_stack_adjust ();
8685 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8691 if (!TREE_OPERAND (exp
, 0))
8692 expand_null_return ();
8694 expand_return (TREE_OPERAND (exp
, 0));
8697 case PREINCREMENT_EXPR
:
8698 case PREDECREMENT_EXPR
:
8699 return expand_increment (exp
, 0, ignore
);
8701 case POSTINCREMENT_EXPR
:
8702 case POSTDECREMENT_EXPR
:
8703 /* Faster to treat as pre-increment if result is not used. */
8704 return expand_increment (exp
, ! ignore
, ignore
);
8707 if (modifier
== EXPAND_STACK_PARM
)
8709 /* Are we taking the address of a nested function? */
8710 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8711 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8712 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8713 && ! TREE_STATIC (exp
))
8715 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8716 op0
= force_operand (op0
, target
);
8718 /* If we are taking the address of something erroneous, just
8720 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8722 /* If we are taking the address of a constant and are at the
8723 top level, we have to use output_constant_def since we can't
8724 call force_const_mem at top level. */
8726 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8727 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8729 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8732 /* We make sure to pass const0_rtx down if we came in with
8733 ignore set, to avoid doing the cleanups twice for something. */
8734 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8735 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8736 (modifier
== EXPAND_INITIALIZER
8737 ? modifier
: EXPAND_CONST_ADDRESS
));
8739 /* If we are going to ignore the result, OP0 will have been set
8740 to const0_rtx, so just return it. Don't get confused and
8741 think we are taking the address of the constant. */
8745 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8746 clever and returns a REG when given a MEM. */
8747 op0
= protect_from_queue (op0
, 1);
8749 /* We would like the object in memory. If it is a constant, we can
8750 have it be statically allocated into memory. For a non-constant,
8751 we need to allocate some memory and store the value into it. */
8753 if (CONSTANT_P (op0
))
8754 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8756 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8757 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8758 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
8760 /* If the operand is a SAVE_EXPR, we can deal with this by
8761 forcing the SAVE_EXPR into memory. */
8762 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8764 put_var_into_stack (TREE_OPERAND (exp
, 0),
8766 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8770 /* If this object is in a register, it can't be BLKmode. */
8771 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8772 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8774 if (GET_CODE (op0
) == PARALLEL
)
8775 /* Handle calls that pass values in multiple
8776 non-contiguous locations. The Irix 6 ABI has examples
8778 emit_group_store (memloc
, op0
, inner_type
,
8779 int_size_in_bytes (inner_type
));
8781 emit_move_insn (memloc
, op0
);
8787 if (GET_CODE (op0
) != MEM
)
8790 mark_temp_addr_taken (op0
);
8791 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8793 op0
= XEXP (op0
, 0);
8794 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8795 op0
= convert_memory_address (ptr_mode
, op0
);
8799 /* If OP0 is not aligned as least as much as the type requires, we
8800 need to make a temporary, copy OP0 to it, and take the address of
8801 the temporary. We want to use the alignment of the type, not of
8802 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8803 the test for BLKmode means that can't happen. The test for
8804 BLKmode is because we never make mis-aligned MEMs with
8807 We don't need to do this at all if the machine doesn't have
8808 strict alignment. */
8809 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8810 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8812 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8814 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8817 if (TYPE_ALIGN_OK (inner_type
))
8820 if (TREE_ADDRESSABLE (inner_type
))
8822 /* We can't make a bitwise copy of this object, so fail. */
8823 error ("cannot take the address of an unaligned member");
8827 new = assign_stack_temp_for_type
8828 (TYPE_MODE (inner_type
),
8829 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8830 : int_size_in_bytes (inner_type
),
8831 1, build_qualified_type (inner_type
,
8832 (TYPE_QUALS (inner_type
)
8833 | TYPE_QUAL_CONST
)));
8835 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
8836 (modifier
== EXPAND_STACK_PARM
8837 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8842 op0
= force_operand (XEXP (op0
, 0), target
);
8846 && GET_CODE (op0
) != REG
8847 && modifier
!= EXPAND_CONST_ADDRESS
8848 && modifier
!= EXPAND_INITIALIZER
8849 && modifier
!= EXPAND_SUM
)
8850 op0
= force_reg (Pmode
, op0
);
8852 if (GET_CODE (op0
) == REG
8853 && ! REG_USERVAR_P (op0
))
8854 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8856 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8857 op0
= convert_memory_address (ptr_mode
, op0
);
8861 case ENTRY_VALUE_EXPR
:
8864 /* COMPLEX type for Extended Pascal & Fortran */
8867 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8870 /* Get the rtx code of the operands. */
8871 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8872 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8875 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8879 /* Move the real (op0) and imaginary (op1) parts to their location. */
8880 emit_move_insn (gen_realpart (mode
, target
), op0
);
8881 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8883 insns
= get_insns ();
8886 /* Complex construction should appear as a single unit. */
8887 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8888 each with a separate pseudo as destination.
8889 It's not correct for flow to treat them as a unit. */
8890 if (GET_CODE (target
) != CONCAT
)
8891 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8899 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8900 return gen_realpart (mode
, op0
);
8903 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8904 return gen_imagpart (mode
, op0
);
8908 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8912 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8915 target
= gen_reg_rtx (mode
);
8919 /* Store the realpart and the negated imagpart to target. */
8920 emit_move_insn (gen_realpart (partmode
, target
),
8921 gen_realpart (partmode
, op0
));
8923 imag_t
= gen_imagpart (partmode
, target
);
8924 temp
= expand_unop (partmode
,
8925 ! unsignedp
&& flag_trapv
8926 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8927 ? negv_optab
: neg_optab
,
8928 gen_imagpart (partmode
, op0
), imag_t
, 0);
8930 emit_move_insn (imag_t
, temp
);
8932 insns
= get_insns ();
8935 /* Conjugate should appear as a single unit
8936 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8937 each with a separate pseudo as destination.
8938 It's not correct for flow to treat them as a unit. */
8939 if (GET_CODE (target
) != CONCAT
)
8940 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8947 case TRY_CATCH_EXPR
:
8949 tree handler
= TREE_OPERAND (exp
, 1);
8951 expand_eh_region_start ();
8953 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8955 expand_eh_region_end_cleanup (handler
);
8960 case TRY_FINALLY_EXPR
:
8962 tree try_block
= TREE_OPERAND (exp
, 0);
8963 tree finally_block
= TREE_OPERAND (exp
, 1);
8965 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
8967 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8968 is not sufficient, so we cannot expand the block twice.
8969 So we play games with GOTO_SUBROUTINE_EXPR to let us
8970 expand the thing only once. */
8971 /* When not optimizing, we go ahead with this form since
8972 (1) user breakpoints operate more predictably without
8973 code duplication, and
8974 (2) we're not running any of the global optimizers
8975 that would explode in time/space with the highly
8976 connected CFG created by the indirect branching. */
8978 rtx finally_label
= gen_label_rtx ();
8979 rtx done_label
= gen_label_rtx ();
8980 rtx return_link
= gen_reg_rtx (Pmode
);
8981 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8982 (tree
) finally_label
, (tree
) return_link
);
8983 TREE_SIDE_EFFECTS (cleanup
) = 1;
8985 /* Start a new binding layer that will keep track of all cleanup
8986 actions to be performed. */
8987 expand_start_bindings (2);
8988 target_temp_slot_level
= temp_slot_level
;
8990 expand_decl_cleanup (NULL_TREE
, cleanup
);
8991 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8993 preserve_temp_slots (op0
);
8994 expand_end_bindings (NULL_TREE
, 0, 0);
8995 emit_jump (done_label
);
8996 emit_label (finally_label
);
8997 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8998 emit_indirect_jump (return_link
);
8999 emit_label (done_label
);
9003 expand_start_bindings (2);
9004 target_temp_slot_level
= temp_slot_level
;
9006 expand_decl_cleanup (NULL_TREE
, finally_block
);
9007 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9009 preserve_temp_slots (op0
);
9010 expand_end_bindings (NULL_TREE
, 0, 0);
9016 case GOTO_SUBROUTINE_EXPR
:
9018 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9019 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9020 rtx return_address
= gen_label_rtx ();
9021 emit_move_insn (return_link
,
9022 gen_rtx_LABEL_REF (Pmode
, return_address
));
9024 emit_label (return_address
);
9029 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9032 return get_exception_pointer (cfun
);
9035 /* Function descriptors are not valid except for as
9036 initialization constants, and should not be expanded. */
9040 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
,
9044 /* Here to do an ordinary binary operator, generating an instruction
9045 from the optab already placed in `this_optab'. */
9047 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9048 subtarget
, &op0
, &op1
, 0);
9050 if (modifier
== EXPAND_STACK_PARM
)
9052 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9053 unsignedp
, OPTAB_LIB_WIDEN
);
9059 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9060 when applied to the address of EXP produces an address known to be
9061 aligned more than BIGGEST_ALIGNMENT. */
9064 is_aligning_offset (tree offset
, tree exp
)
9066 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9067 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9068 || TREE_CODE (offset
) == NOP_EXPR
9069 || TREE_CODE (offset
) == CONVERT_EXPR
9070 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9071 offset
= TREE_OPERAND (offset
, 0);
9073 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9074 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9075 if (TREE_CODE (offset
) != BIT_AND_EXPR
9076 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9077 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9078 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9081 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9082 It must be NEGATE_EXPR. Then strip any more conversions. */
9083 offset
= TREE_OPERAND (offset
, 0);
9084 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9085 || TREE_CODE (offset
) == NOP_EXPR
9086 || TREE_CODE (offset
) == CONVERT_EXPR
)
9087 offset
= TREE_OPERAND (offset
, 0);
9089 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9092 offset
= TREE_OPERAND (offset
, 0);
9093 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9094 || TREE_CODE (offset
) == NOP_EXPR
9095 || TREE_CODE (offset
) == CONVERT_EXPR
)
9096 offset
= TREE_OPERAND (offset
, 0);
9098 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9099 whose type is the same as EXP. */
9100 return (TREE_CODE (offset
) == ADDR_EXPR
9101 && (TREE_OPERAND (offset
, 0) == exp
9102 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9103 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9104 == TREE_TYPE (exp
)))));
9107 /* Return the tree node if an ARG corresponds to a string constant or zero
9108 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9109 in bytes within the string that ARG is accessing. The type of the
9110 offset will be `sizetype'. */
9113 string_constant (tree arg
, tree
*ptr_offset
)
9117 if (TREE_CODE (arg
) == ADDR_EXPR
9118 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9120 *ptr_offset
= size_zero_node
;
9121 return TREE_OPERAND (arg
, 0);
9123 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9125 tree arg0
= TREE_OPERAND (arg
, 0);
9126 tree arg1
= TREE_OPERAND (arg
, 1);
9131 if (TREE_CODE (arg0
) == ADDR_EXPR
9132 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9134 *ptr_offset
= convert (sizetype
, arg1
);
9135 return TREE_OPERAND (arg0
, 0);
9137 else if (TREE_CODE (arg1
) == ADDR_EXPR
9138 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9140 *ptr_offset
= convert (sizetype
, arg0
);
9141 return TREE_OPERAND (arg1
, 0);
9148 /* Expand code for a post- or pre- increment or decrement
9149 and return the RTX for the result.
9150 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9153 expand_increment (tree exp
, int post
, int ignore
)
9157 tree incremented
= TREE_OPERAND (exp
, 0);
9158 optab this_optab
= add_optab
;
9160 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9161 int op0_is_copy
= 0;
9162 int single_insn
= 0;
9163 /* 1 means we can't store into OP0 directly,
9164 because it is a subreg narrower than a word,
9165 and we don't dare clobber the rest of the word. */
9168 /* Stabilize any component ref that might need to be
9169 evaluated more than once below. */
9171 || TREE_CODE (incremented
) == BIT_FIELD_REF
9172 || (TREE_CODE (incremented
) == COMPONENT_REF
9173 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9174 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9175 incremented
= stabilize_reference (incremented
);
9176 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9177 ones into save exprs so that they don't accidentally get evaluated
9178 more than once by the code below. */
9179 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9180 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9181 incremented
= save_expr (incremented
);
9183 /* Compute the operands as RTX.
9184 Note whether OP0 is the actual lvalue or a copy of it:
9185 I believe it is a copy iff it is a register or subreg
9186 and insns were generated in computing it. */
9188 temp
= get_last_insn ();
9189 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9191 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9192 in place but instead must do sign- or zero-extension during assignment,
9193 so we copy it into a new register and let the code below use it as
9196 Note that we can safely modify this SUBREG since it is know not to be
9197 shared (it was made by the expand_expr call above). */
9199 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9202 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9206 else if (GET_CODE (op0
) == SUBREG
9207 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9209 /* We cannot increment this SUBREG in place. If we are
9210 post-incrementing, get a copy of the old value. Otherwise,
9211 just mark that we cannot increment in place. */
9213 op0
= copy_to_reg (op0
);
9218 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9219 && temp
!= get_last_insn ());
9220 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9222 /* Decide whether incrementing or decrementing. */
9223 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9224 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9225 this_optab
= sub_optab
;
9227 /* Convert decrement by a constant into a negative increment. */
9228 if (this_optab
== sub_optab
9229 && GET_CODE (op1
) == CONST_INT
)
9231 op1
= GEN_INT (-INTVAL (op1
));
9232 this_optab
= add_optab
;
9235 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9236 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9238 /* For a preincrement, see if we can do this with a single instruction. */
9241 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9242 if (icode
!= (int) CODE_FOR_nothing
9243 /* Make sure that OP0 is valid for operands 0 and 1
9244 of the insn we want to queue. */
9245 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9246 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9247 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9251 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9252 then we cannot just increment OP0. We must therefore contrive to
9253 increment the original value. Then, for postincrement, we can return
9254 OP0 since it is a copy of the old value. For preincrement, expand here
9255 unless we can do it with a single insn.
9257 Likewise if storing directly into OP0 would clobber high bits
9258 we need to preserve (bad_subreg). */
9259 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9261 /* This is the easiest way to increment the value wherever it is.
9262 Problems with multiple evaluation of INCREMENTED are prevented
9263 because either (1) it is a component_ref or preincrement,
9264 in which case it was stabilized above, or (2) it is an array_ref
9265 with constant index in an array in a register, which is
9266 safe to reevaluate. */
9267 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9268 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9269 ? MINUS_EXPR
: PLUS_EXPR
),
9272 TREE_OPERAND (exp
, 1));
9274 while (TREE_CODE (incremented
) == NOP_EXPR
9275 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9277 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9278 incremented
= TREE_OPERAND (incremented
, 0);
9281 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9282 return post
? op0
: temp
;
9287 /* We have a true reference to the value in OP0.
9288 If there is an insn to add or subtract in this mode, queue it.
9289 Queuing the increment insn avoids the register shuffling
9290 that often results if we must increment now and first save
9291 the old value for subsequent use. */
9293 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9294 op0
= stabilize (op0
);
9297 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9298 if (icode
!= (int) CODE_FOR_nothing
9299 /* Make sure that OP0 is valid for operands 0 and 1
9300 of the insn we want to queue. */
9301 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9302 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9304 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9305 op1
= force_reg (mode
, op1
);
9307 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9309 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9311 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9312 ? force_reg (Pmode
, XEXP (op0
, 0))
9313 : copy_to_reg (XEXP (op0
, 0)));
9316 op0
= replace_equiv_address (op0
, addr
);
9317 temp
= force_reg (GET_MODE (op0
), op0
);
9318 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9319 op1
= force_reg (mode
, op1
);
9321 /* The increment queue is LIFO, thus we have to `queue'
9322 the instructions in reverse order. */
9323 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9324 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9329 /* Preincrement, or we can't increment with one simple insn. */
9331 /* Save a copy of the value before inc or dec, to return it later. */
9332 temp
= value
= copy_to_reg (op0
);
9334 /* Arrange to return the incremented value. */
9335 /* Copy the rtx because expand_binop will protect from the queue,
9336 and the results of that would be invalid for us to return
9337 if our caller does emit_queue before using our result. */
9338 temp
= copy_rtx (value
= op0
);
9340 /* Increment however we can. */
9341 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9342 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9344 /* Make sure the value is stored into OP0. */
9346 emit_move_insn (op0
, op1
);
9351 /* Generate code to calculate EXP using a store-flag instruction
9352 and return an rtx for the result. EXP is either a comparison
9353 or a TRUTH_NOT_EXPR whose operand is a comparison.
9355 If TARGET is nonzero, store the result there if convenient.
9357 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9360 Return zero if there is no suitable set-flag instruction
9361 available on this machine.
9363 Once expand_expr has been called on the arguments of the comparison,
9364 we are committed to doing the store flag, since it is not safe to
9365 re-evaluate the expression. We emit the store-flag insn by calling
9366 emit_store_flag, but only expand the arguments if we have a reason
9367 to believe that emit_store_flag will be successful. If we think that
9368 it will, but it isn't, we have to simulate the store-flag with a
9369 set/jump/set sequence. */
9372 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9375 tree arg0
, arg1
, type
;
9377 enum machine_mode operand_mode
;
9381 enum insn_code icode
;
9382 rtx subtarget
= target
;
9385 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9386 result at the end. We can't simply invert the test since it would
9387 have already been inverted if it were valid. This case occurs for
9388 some floating-point comparisons. */
9390 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9391 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9393 arg0
= TREE_OPERAND (exp
, 0);
9394 arg1
= TREE_OPERAND (exp
, 1);
9396 /* Don't crash if the comparison was erroneous. */
9397 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9400 type
= TREE_TYPE (arg0
);
9401 operand_mode
= TYPE_MODE (type
);
9402 unsignedp
= TREE_UNSIGNED (type
);
9404 /* We won't bother with BLKmode store-flag operations because it would mean
9405 passing a lot of information to emit_store_flag. */
9406 if (operand_mode
== BLKmode
)
9409 /* We won't bother with store-flag operations involving function pointers
9410 when function pointers must be canonicalized before comparisons. */
9411 #ifdef HAVE_canonicalize_funcptr_for_compare
9412 if (HAVE_canonicalize_funcptr_for_compare
9413 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9414 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9416 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9417 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9418 == FUNCTION_TYPE
))))
9425 /* Get the rtx comparison code to use. We know that EXP is a comparison
9426 operation of some type. Some comparisons against 1 and -1 can be
9427 converted to comparisons with zero. Do so here so that the tests
9428 below will be aware that we have a comparison with zero. These
9429 tests will not catch constants in the first operand, but constants
9430 are rarely passed as the first operand. */
9432 switch (TREE_CODE (exp
))
9441 if (integer_onep (arg1
))
9442 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9444 code
= unsignedp
? LTU
: LT
;
9447 if (! unsignedp
&& integer_all_onesp (arg1
))
9448 arg1
= integer_zero_node
, code
= LT
;
9450 code
= unsignedp
? LEU
: LE
;
9453 if (! unsignedp
&& integer_all_onesp (arg1
))
9454 arg1
= integer_zero_node
, code
= GE
;
9456 code
= unsignedp
? GTU
: GT
;
9459 if (integer_onep (arg1
))
9460 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9462 code
= unsignedp
? GEU
: GE
;
9465 case UNORDERED_EXPR
:
9491 /* Put a constant second. */
9492 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9494 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9495 code
= swap_condition (code
);
9498 /* If this is an equality or inequality test of a single bit, we can
9499 do this by shifting the bit being tested to the low-order bit and
9500 masking the result with the constant 1. If the condition was EQ,
9501 we xor it with 1. This does not require an scc insn and is faster
9502 than an scc insn even if we have it.
9504 The code to make this transformation was moved into fold_single_bit_test,
9505 so we just call into the folder and expand its result. */
9507 if ((code
== NE
|| code
== EQ
)
9508 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9509 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9511 tree type
= (*lang_hooks
.types
.type_for_mode
) (mode
, unsignedp
);
9512 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9514 target
, VOIDmode
, EXPAND_NORMAL
);
9517 /* Now see if we are likely to be able to do this. Return if not. */
9518 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9521 icode
= setcc_gen_code
[(int) code
];
9522 if (icode
== CODE_FOR_nothing
9523 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9525 /* We can only do this if it is one of the special cases that
9526 can be handled without an scc insn. */
9527 if ((code
== LT
&& integer_zerop (arg1
))
9528 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9530 else if (BRANCH_COST
>= 0
9531 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9532 && TREE_CODE (type
) != REAL_TYPE
9533 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9534 != CODE_FOR_nothing
)
9535 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9536 != CODE_FOR_nothing
)))
9542 if (! get_subtarget (target
)
9543 || GET_MODE (subtarget
) != operand_mode
)
9546 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9549 target
= gen_reg_rtx (mode
);
9551 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9552 because, if the emit_store_flag does anything it will succeed and
9553 OP0 and OP1 will not be used subsequently. */
9555 result
= emit_store_flag (target
, code
,
9556 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9557 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9558 operand_mode
, unsignedp
, 1);
9563 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9564 result
, 0, OPTAB_LIB_WIDEN
);
9568 /* If this failed, we have to do this with set/compare/jump/set code. */
9569 if (GET_CODE (target
) != REG
9570 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9571 target
= gen_reg_rtx (GET_MODE (target
));
9573 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9574 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9575 operand_mode
, NULL_RTX
);
9576 if (GET_CODE (result
) == CONST_INT
)
9577 return (((result
== const0_rtx
&& ! invert
)
9578 || (result
!= const0_rtx
&& invert
))
9579 ? const0_rtx
: const1_rtx
);
9581 /* The code of RESULT may not match CODE if compare_from_rtx
9582 decided to swap its operands and reverse the original code.
9584 We know that compare_from_rtx returns either a CONST_INT or
9585 a new comparison code, so it is safe to just extract the
9586 code from RESULT. */
9587 code
= GET_CODE (result
);
9589 label
= gen_label_rtx ();
9590 if (bcc_gen_fctn
[(int) code
] == 0)
9593 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9594 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9601 /* Stubs in case we haven't got a casesi insn. */
9603 # define HAVE_casesi 0
9604 # define gen_casesi(a, b, c, d, e) (0)
9605 # define CODE_FOR_casesi CODE_FOR_nothing
9608 /* If the machine does not have a case insn that compares the bounds,
9609 this means extra overhead for dispatch tables, which raises the
9610 threshold for using them. */
9611 #ifndef CASE_VALUES_THRESHOLD
9612 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9613 #endif /* CASE_VALUES_THRESHOLD */
9616 case_values_threshold (void)
9618 return CASE_VALUES_THRESHOLD
;
9621 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9622 0 otherwise (i.e. if there is no casesi instruction). */
9624 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9625 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9627 enum machine_mode index_mode
= SImode
;
9628 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9629 rtx op1
, op2
, index
;
9630 enum machine_mode op_mode
;
9635 /* Convert the index to SImode. */
9636 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9638 enum machine_mode omode
= TYPE_MODE (index_type
);
9639 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9641 /* We must handle the endpoints in the original mode. */
9642 index_expr
= build (MINUS_EXPR
, index_type
,
9643 index_expr
, minval
);
9644 minval
= integer_zero_node
;
9645 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9646 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9647 omode
, 1, default_label
);
9648 /* Now we can safely truncate. */
9649 index
= convert_to_mode (index_mode
, index
, 0);
9653 if (TYPE_MODE (index_type
) != index_mode
)
9655 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
9656 (index_bits
, 0), index_expr
);
9657 index_type
= TREE_TYPE (index_expr
);
9660 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9663 index
= protect_from_queue (index
, 0);
9664 do_pending_stack_adjust ();
9666 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9667 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9669 index
= copy_to_mode_reg (op_mode
, index
);
9671 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
9673 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9674 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9675 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
9676 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9678 op1
= copy_to_mode_reg (op_mode
, op1
);
9680 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9682 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9683 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9684 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
9685 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9687 op2
= copy_to_mode_reg (op_mode
, op2
);
9689 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9690 table_label
, default_label
));
9694 /* Attempt to generate a tablejump instruction; same concept. */
9695 #ifndef HAVE_tablejump
9696 #define HAVE_tablejump 0
9697 #define gen_tablejump(x, y) (0)
9700 /* Subroutine of the next function.
9702 INDEX is the value being switched on, with the lowest value
9703 in the table already subtracted.
9704 MODE is its expected mode (needed if INDEX is constant).
9705 RANGE is the length of the jump table.
9706 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9708 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9709 index value is out of range. */
9712 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9717 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9718 cfun
->max_jumptable_ents
= INTVAL (range
);
9720 /* Do an unsigned comparison (in the proper mode) between the index
9721 expression and the value which represents the length of the range.
9722 Since we just finished subtracting the lower bound of the range
9723 from the index expression, this comparison allows us to simultaneously
9724 check that the original index expression value is both greater than
9725 or equal to the minimum value of the range and less than or equal to
9726 the maximum value of the range. */
9728 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9731 /* If index is in range, it must fit in Pmode.
9732 Convert to Pmode so we can index with it. */
9734 index
= convert_to_mode (Pmode
, index
, 1);
9736 /* Don't let a MEM slip through, because then INDEX that comes
9737 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9738 and break_out_memory_refs will go to work on it and mess it up. */
9739 #ifdef PIC_CASE_VECTOR_ADDRESS
9740 if (flag_pic
&& GET_CODE (index
) != REG
)
9741 index
= copy_to_mode_reg (Pmode
, index
);
9744 /* If flag_force_addr were to affect this address
9745 it could interfere with the tricky assumptions made
9746 about addresses that contain label-refs,
9747 which may be valid only very near the tablejump itself. */
9748 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9749 GET_MODE_SIZE, because this indicates how large insns are. The other
9750 uses should all be Pmode, because they are addresses. This code
9751 could fail if addresses and insns are not the same size. */
9752 index
= gen_rtx_PLUS (Pmode
,
9753 gen_rtx_MULT (Pmode
, index
,
9754 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9755 gen_rtx_LABEL_REF (Pmode
, table_label
));
9756 #ifdef PIC_CASE_VECTOR_ADDRESS
9758 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9761 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9762 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9763 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
9764 RTX_UNCHANGING_P (vector
) = 1;
9765 MEM_NOTRAP_P (vector
) = 1;
9766 convert_move (temp
, vector
, 0);
9768 emit_jump_insn (gen_tablejump (temp
, table_label
));
9770 /* If we are generating PIC code or if the table is PC-relative, the
9771 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9772 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9777 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9778 rtx table_label
, rtx default_label
)
9782 if (! HAVE_tablejump
)
9785 index_expr
= fold (build (MINUS_EXPR
, index_type
,
9786 convert (index_type
, index_expr
),
9787 convert (index_type
, minval
)));
9788 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9790 index
= protect_from_queue (index
, 0);
9791 do_pending_stack_adjust ();
9793 do_tablejump (index
, TYPE_MODE (index_type
),
9794 convert_modes (TYPE_MODE (index_type
),
9795 TYPE_MODE (TREE_TYPE (range
)),
9796 expand_expr (range
, NULL_RTX
,
9798 TREE_UNSIGNED (TREE_TYPE (range
))),
9799 table_label
, default_label
);
9803 /* Nonzero if the mode is a valid vector mode for this architecture.
9804 This returns nonzero even if there is no hardware support for the
9805 vector mode, but we can emulate with narrower modes. */
9808 vector_mode_valid_p (enum machine_mode mode
)
9810 enum mode_class
class = GET_MODE_CLASS (mode
);
9811 enum machine_mode innermode
;
9813 /* Doh! What's going on? */
9814 if (class != MODE_VECTOR_INT
9815 && class != MODE_VECTOR_FLOAT
)
9818 /* Hardware support. Woo hoo! */
9819 if (VECTOR_MODE_SUPPORTED_P (mode
))
9822 innermode
= GET_MODE_INNER (mode
);
9824 /* We should probably return 1 if requesting V4DI and we have no DI,
9825 but we have V2DI, but this is probably very unlikely. */
9827 /* If we have support for the inner mode, we can safely emulate it.
9828 We may not have V2DI, but me can emulate with a pair of DIs. */
9829 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
9832 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9834 const_vector_from_tree (tree exp
)
9839 enum machine_mode inner
, mode
;
9841 mode
= TYPE_MODE (TREE_TYPE (exp
));
9843 if (is_zeros_p (exp
))
9844 return CONST0_RTX (mode
);
9846 units
= GET_MODE_NUNITS (mode
);
9847 inner
= GET_MODE_INNER (mode
);
9849 v
= rtvec_alloc (units
);
9851 link
= TREE_VECTOR_CST_ELTS (exp
);
9852 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9854 elt
= TREE_VALUE (link
);
9856 if (TREE_CODE (elt
) == REAL_CST
)
9857 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9860 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9861 TREE_INT_CST_HIGH (elt
),
9865 /* Initialize remaining elements to 0. */
9866 for (; i
< units
; ++i
)
9867 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9869 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
9872 #include "gt-expr.h"