1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
72 #define STACK_PUSH_CODE PRE_INC
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
81 #define TARGET_MEM_FUNCTIONS 0
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 tree placeholder_list
= 0;
96 /* This structure is used by move_by_pieces to describe the move to
107 int explicit_inc_from
;
108 unsigned HOST_WIDE_INT len
;
109 HOST_WIDE_INT offset
;
113 /* This structure is used by store_by_pieces to describe the clear to
116 struct store_by_pieces
122 unsigned HOST_WIDE_INT len
;
123 HOST_WIDE_INT offset
;
124 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
129 static rtx
enqueue_insn (rtx
, rtx
);
130 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
132 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
133 struct move_by_pieces
*);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
136 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
137 static tree
emit_block_move_libcall_fn (int);
138 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
139 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
140 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
141 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
142 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
143 struct store_by_pieces
*);
144 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
145 static rtx
clear_storage_via_libcall (rtx
, rtx
);
146 static tree
clear_storage_libcall_fn (int);
147 static rtx
compress_float_constant (rtx
, rtx
);
148 static rtx
get_subtarget (rtx
);
149 static int is_zeros_p (tree
);
150 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
151 HOST_WIDE_INT
, enum machine_mode
,
152 tree
, tree
, int, int);
153 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
154 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
155 tree
, enum machine_mode
, int, tree
, int);
156 static rtx
var_rtx (tree
);
158 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
159 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_type (tree
, tree
);
161 static int is_aligning_offset (tree
, tree
);
162 static rtx
expand_increment (tree
, int, int);
163 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
164 enum expand_modifier
);
165 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
167 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
169 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
170 static rtx
const_vector_from_tree (tree
);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load
[NUM_MACHINE_MODES
];
177 static char direct_store
[NUM_MACHINE_MODES
];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
213 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
215 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216 struct file_stack
*expr_wfl_stack
;
218 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
228 init_expr_once (void)
231 enum machine_mode mode
;
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
239 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
240 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg
= gen_rtx_REG (VOIDmode
, -1);
246 insn
= rtx_alloc (INSN
);
247 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
248 PATTERN (insn
) = pat
;
250 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
251 mode
= (enum machine_mode
) ((int) mode
+ 1))
255 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
256 PUT_MODE (mem
, mode
);
257 PUT_MODE (mem1
, mode
);
258 PUT_MODE (reg
, mode
);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
264 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
265 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
268 if (! HARD_REGNO_MODE_OK (regno
, mode
))
274 SET_DEST (pat
) = reg
;
275 if (recog (pat
, insn
, &num_clobbers
) >= 0)
276 direct_load
[(int) mode
] = 1;
278 SET_SRC (pat
) = mem1
;
279 SET_DEST (pat
) = reg
;
280 if (recog (pat
, insn
, &num_clobbers
) >= 0)
281 direct_load
[(int) mode
] = 1;
284 SET_DEST (pat
) = mem
;
285 if (recog (pat
, insn
, &num_clobbers
) >= 0)
286 direct_store
[(int) mode
] = 1;
289 SET_DEST (pat
) = mem1
;
290 if (recog (pat
, insn
, &num_clobbers
) >= 0)
291 direct_store
[(int) mode
] = 1;
295 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
297 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
298 mode
= GET_MODE_WIDER_MODE (mode
))
300 enum machine_mode srcmode
;
301 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
302 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
306 ic
= can_extend_p (mode
, srcmode
, 0);
307 if (ic
== CODE_FOR_nothing
)
310 PUT_MODE (mem
, srcmode
);
312 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
313 float_extend_from_mem
[mode
][srcmode
] = true;
318 /* This is run at the start of compiling a function. */
323 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
326 /* Small sanity check that the queue is empty at the end of a function. */
329 finish_expr_for_function (void)
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
346 enqueue_insn (rtx var
, rtx body
)
348 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
349 body
, pending_chain
);
350 return pending_chain
;
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
369 protect_from_queue (rtx x
, int modify
)
371 RTX_CODE code
= GET_CODE (x
);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain
== 0)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
386 if (code
== MEM
&& GET_MODE (x
) != BLKmode
387 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
390 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
394 rtx temp
= gen_reg_rtx (GET_MODE (x
));
396 emit_insn_before (gen_move_insn (temp
, new),
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
410 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
411 if (tem
!= XEXP (x
, 0))
417 else if (code
== PLUS
|| code
== MULT
)
419 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
420 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
421 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
433 if (QUEUED_INSN (x
) == 0)
434 return copy_to_reg (QUEUED_VAR (x
));
435 /* If the increment has happened and a pre-increment copy exists,
437 if (QUEUED_COPY (x
) != 0)
438 return QUEUED_COPY (x
);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
444 return QUEUED_COPY (x
);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
453 queued_subexp_p (rtx x
)
455 enum rtx_code code
= GET_CODE (x
);
461 return queued_subexp_p (XEXP (x
, 0));
465 return (queued_subexp_p (XEXP (x
, 0))
466 || queued_subexp_p (XEXP (x
, 1)));
472 /* Perform all the pending incrementations. */
478 while ((p
= pending_chain
))
480 rtx body
= QUEUED_BODY (p
);
482 switch (GET_CODE (body
))
490 QUEUED_INSN (p
) = body
;
494 #ifdef ENABLE_CHECKING
501 QUEUED_INSN (p
) = emit_insn (body
);
505 pending_chain
= QUEUED_NEXT (p
);
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
515 convert_move (rtx to
, rtx from
, int unsignedp
)
517 enum machine_mode to_mode
= GET_MODE (to
);
518 enum machine_mode from_mode
= GET_MODE (from
);
519 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
520 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
524 /* rtx code for making an equivalent value. */
525 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
526 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
528 to
= protect_from_queue (to
, 1);
529 from
= protect_from_queue (from
, 0);
531 if (to_real
!= from_real
)
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
538 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
540 >= GET_MODE_SIZE (to_mode
))
541 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
542 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
544 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
547 if (to_mode
== from_mode
548 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
550 emit_move_insn (to
, from
);
554 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
556 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
559 if (VECTOR_MODE_P (to_mode
))
560 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
562 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
564 emit_move_insn (to
, from
);
568 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
570 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
571 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
580 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
582 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
587 /* Try converting directly if the insn is supported. */
589 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
590 if (code
!= CODE_FOR_nothing
)
592 emit_unop_insn (code
, to
, from
,
593 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
597 /* Otherwise use a libcall. */
598 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
601 /* This conversion is not implemented yet. */
605 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
607 insns
= get_insns ();
609 emit_libcall_block (insns
, to
, value
,
610 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
612 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
624 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
628 if (full_mode
!= from_mode
)
629 from
= convert_to_mode (full_mode
, from
, unsignedp
);
630 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
634 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
639 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
643 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
645 if (to_mode
== full_mode
)
648 /* else proceed to integer conversions below */
649 from_mode
= full_mode
;
652 /* Now both modes are integers. */
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
656 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
663 enum machine_mode lowpart_mode
;
664 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
666 /* Try converting directly if the insn is supported. */
667 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
675 from
= force_reg (from_mode
, from
);
676 emit_unop_insn (code
, to
, from
, equiv_code
);
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
681 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
682 != CODE_FOR_nothing
))
684 if (GET_CODE (to
) == REG
)
685 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
686 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
687 emit_unop_insn (code
, to
,
688 gen_lowpart (word_mode
, to
), equiv_code
);
692 /* No special multiword conversion insn; do it by hand. */
695 /* Since we will turn this into a no conflict block, we must ensure
696 that the source does not overlap the target. */
698 if (reg_overlap_mentioned_p (to
, from
))
699 from
= force_reg (from_mode
, from
);
701 /* Get a copy of FROM widened to a word, if necessary. */
702 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
703 lowpart_mode
= word_mode
;
705 lowpart_mode
= from_mode
;
707 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
709 lowpart
= gen_lowpart (lowpart_mode
, to
);
710 emit_move_insn (lowpart
, lowfrom
);
712 /* Compute the value to put in each remaining word. */
714 fill_value
= const0_rtx
;
719 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
720 && STORE_FLAG_VALUE
== -1)
722 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
724 fill_value
= gen_reg_rtx (word_mode
);
725 emit_insn (gen_slt (fill_value
));
731 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
732 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
734 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
738 /* Fill the remaining words. */
739 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
741 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
742 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
747 if (fill_value
!= subword
)
748 emit_move_insn (subword
, fill_value
);
751 insns
= get_insns ();
754 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
755 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
759 /* Truncating multi-word to a word or less. */
760 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
761 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
763 if (!((GET_CODE (from
) == MEM
764 && ! MEM_VOLATILE_P (from
)
765 && direct_load
[(int) to_mode
]
766 && ! mode_dependent_address_p (XEXP (from
, 0)))
767 || GET_CODE (from
) == REG
768 || GET_CODE (from
) == SUBREG
))
769 from
= force_reg (from_mode
, from
);
770 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
774 /* Now follow all the conversions between integers
775 no more than a word long. */
777 /* For truncation, usually we can just refer to FROM in a narrower mode. */
778 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
779 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
780 GET_MODE_BITSIZE (from_mode
)))
782 if (!((GET_CODE (from
) == MEM
783 && ! MEM_VOLATILE_P (from
)
784 && direct_load
[(int) to_mode
]
785 && ! mode_dependent_address_p (XEXP (from
, 0)))
786 || GET_CODE (from
) == REG
787 || GET_CODE (from
) == SUBREG
))
788 from
= force_reg (from_mode
, from
);
789 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
790 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
791 from
= copy_to_reg (from
);
792 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
796 /* Handle extension. */
797 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
799 /* Convert directly if that works. */
800 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
804 from
= force_not_mem (from
);
806 emit_unop_insn (code
, to
, from
, equiv_code
);
811 enum machine_mode intermediate
;
815 /* Search for a mode to convert via. */
816 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
817 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
818 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
820 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
821 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
822 GET_MODE_BITSIZE (intermediate
))))
823 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
824 != CODE_FOR_nothing
))
826 convert_move (to
, convert_to_mode (intermediate
, from
,
827 unsignedp
), unsignedp
);
831 /* No suitable intermediate mode.
832 Generate what we need with shifts. */
833 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
834 - GET_MODE_BITSIZE (from_mode
), 0);
835 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
836 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
838 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
841 emit_move_insn (to
, tmp
);
846 /* Support special truncate insns for certain modes. */
847 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
849 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
854 /* Handle truncation of volatile memrefs, and so on;
855 the things that couldn't be truncated directly,
856 and for which there was no special instruction.
858 ??? Code above formerly short-circuited this, for most integer
859 mode pairs, with a force_reg in from_mode followed by a recursive
860 call to this routine. Appears always to have been wrong. */
861 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
863 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
864 emit_move_insn (to
, temp
);
868 /* Mode combination is not recognized. */
872 /* Return an rtx for a value that would result
873 from converting X to mode MODE.
874 Both X and MODE may be floating, or both integer.
875 UNSIGNEDP is nonzero if X is an unsigned value.
876 This can be done by referring to a part of X in place
877 or by copying to a new temporary with conversion.
879 This function *must not* call protect_from_queue
880 except when putting X into an insn (in which case convert_move does it). */
883 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
885 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
888 /* Return an rtx for a value that would result
889 from converting X from mode OLDMODE to mode MODE.
890 Both modes may be floating, or both integer.
891 UNSIGNEDP is nonzero if X is an unsigned value.
893 This can be done by referring to a part of X in place
894 or by copying to a new temporary with conversion.
896 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
898 This function *must not* call protect_from_queue
899 except when putting X into an insn (in which case convert_move does it). */
902 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
906 /* If FROM is a SUBREG that indicates that we have already done at least
907 the required extension, strip it. */
909 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
910 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
911 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
912 x
= gen_lowpart (mode
, x
);
914 if (GET_MODE (x
) != VOIDmode
)
915 oldmode
= GET_MODE (x
);
920 /* There is one case that we must handle specially: If we are converting
921 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
922 we are to interpret the constant as unsigned, gen_lowpart will do
923 the wrong if the constant appears negative. What we want to do is
924 make the high-order word of the constant zero, not all ones. */
926 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
927 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
928 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
930 HOST_WIDE_INT val
= INTVAL (x
);
932 if (oldmode
!= VOIDmode
933 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
935 int width
= GET_MODE_BITSIZE (oldmode
);
937 /* We need to zero extend VAL. */
938 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
941 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
944 /* We can do this with a gen_lowpart if both desired and current modes
945 are integer, and this is either a constant integer, a register, or a
946 non-volatile MEM. Except for the constant case where MODE is no
947 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
949 if ((GET_CODE (x
) == CONST_INT
950 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
951 || (GET_MODE_CLASS (mode
) == MODE_INT
952 && GET_MODE_CLASS (oldmode
) == MODE_INT
953 && (GET_CODE (x
) == CONST_DOUBLE
954 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
955 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
956 && direct_load
[(int) mode
])
957 || (GET_CODE (x
) == REG
958 && (! HARD_REGISTER_P (x
)
959 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
960 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
961 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
963 /* ?? If we don't know OLDMODE, we have to assume here that
964 X does not need sign- or zero-extension. This may not be
965 the case, but it's the best we can do. */
966 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
967 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
969 HOST_WIDE_INT val
= INTVAL (x
);
970 int width
= GET_MODE_BITSIZE (oldmode
);
972 /* We must sign or zero-extend in this case. Start by
973 zero-extending, then sign extend if we need to. */
974 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
976 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
977 val
|= (HOST_WIDE_INT
) (-1) << width
;
979 return gen_int_mode (val
, mode
);
982 return gen_lowpart (mode
, x
);
985 /* Converting from integer constant into mode is always equivalent to an
987 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
989 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
991 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
994 temp
= gen_reg_rtx (mode
);
995 convert_move (temp
, x
, unsignedp
);
999 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1000 store efficiently. Due to internal GCC limitations, this is
1001 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1002 for an immediate constant. */
1004 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1006 /* Determine whether the LEN bytes can be moved by using several move
1007 instructions. Return nonzero if a call to move_by_pieces should
1011 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1012 unsigned int align ATTRIBUTE_UNUSED
)
1014 return MOVE_BY_PIECES_P (len
, align
);
1017 /* Generate several move instructions to copy LEN bytes from block FROM to
1018 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1019 and TO through protect_from_queue before calling.
1021 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1022 used to push FROM to the stack.
1024 ALIGN is maximum stack alignment we can assume.
1026 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1027 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1031 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1032 unsigned int align
, int endp
)
1034 struct move_by_pieces data
;
1035 rtx to_addr
, from_addr
= XEXP (from
, 0);
1036 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1037 enum machine_mode mode
= VOIDmode
, tmode
;
1038 enum insn_code icode
;
1040 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1043 data
.from_addr
= from_addr
;
1046 to_addr
= XEXP (to
, 0);
1049 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1050 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1052 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1059 #ifdef STACK_GROWS_DOWNWARD
1065 data
.to_addr
= to_addr
;
1068 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1069 || GET_CODE (from_addr
) == POST_INC
1070 || GET_CODE (from_addr
) == POST_DEC
);
1072 data
.explicit_inc_from
= 0;
1073 data
.explicit_inc_to
= 0;
1074 if (data
.reverse
) data
.offset
= len
;
1077 /* If copying requires more than two move insns,
1078 copy addresses to registers (to make displacements shorter)
1079 and use post-increment if available. */
1080 if (!(data
.autinc_from
&& data
.autinc_to
)
1081 && move_by_pieces_ninsns (len
, align
) > 2)
1083 /* Find the mode of the largest move... */
1084 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1085 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1086 if (GET_MODE_SIZE (tmode
) < max_size
)
1089 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1091 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1092 data
.autinc_from
= 1;
1093 data
.explicit_inc_from
= -1;
1095 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1097 data
.from_addr
= copy_addr_to_reg (from_addr
);
1098 data
.autinc_from
= 1;
1099 data
.explicit_inc_from
= 1;
1101 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1102 data
.from_addr
= copy_addr_to_reg (from_addr
);
1103 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1105 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1107 data
.explicit_inc_to
= -1;
1109 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1111 data
.to_addr
= copy_addr_to_reg (to_addr
);
1113 data
.explicit_inc_to
= 1;
1115 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1116 data
.to_addr
= copy_addr_to_reg (to_addr
);
1119 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1120 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1121 align
= MOVE_MAX
* BITS_PER_UNIT
;
1123 /* First move what we can in the largest integer mode, then go to
1124 successively smaller modes. */
1126 while (max_size
> 1)
1128 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1129 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1130 if (GET_MODE_SIZE (tmode
) < max_size
)
1133 if (mode
== VOIDmode
)
1136 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1137 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1138 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1140 max_size
= GET_MODE_SIZE (mode
);
1143 /* The code above should have handled everything. */
1157 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1158 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1160 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1163 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1170 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1178 /* Return number of insns required to move L bytes by pieces.
1179 ALIGN (in bits) is maximum alignment we can assume. */
1181 static unsigned HOST_WIDE_INT
1182 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1184 unsigned HOST_WIDE_INT n_insns
= 0;
1185 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1187 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1188 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1189 align
= MOVE_MAX
* BITS_PER_UNIT
;
1191 while (max_size
> 1)
1193 enum machine_mode mode
= VOIDmode
, tmode
;
1194 enum insn_code icode
;
1196 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1197 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1198 if (GET_MODE_SIZE (tmode
) < max_size
)
1201 if (mode
== VOIDmode
)
1204 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1205 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1206 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1208 max_size
= GET_MODE_SIZE (mode
);
1216 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1217 with move instructions for mode MODE. GENFUN is the gen_... function
1218 to make a move insn for that mode. DATA has all the other info. */
1221 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1222 struct move_by_pieces
*data
)
1224 unsigned int size
= GET_MODE_SIZE (mode
);
1225 rtx to1
= NULL_RTX
, from1
;
1227 while (data
->len
>= size
)
1230 data
->offset
-= size
;
1234 if (data
->autinc_to
)
1235 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1238 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1241 if (data
->autinc_from
)
1242 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1245 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1247 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1248 emit_insn (gen_add2_insn (data
->to_addr
,
1249 GEN_INT (-(HOST_WIDE_INT
)size
)));
1250 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1251 emit_insn (gen_add2_insn (data
->from_addr
,
1252 GEN_INT (-(HOST_WIDE_INT
)size
)));
1255 emit_insn ((*genfun
) (to1
, from1
));
1258 #ifdef PUSH_ROUNDING
1259 emit_single_push_insn (mode
, from1
, NULL
);
1265 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1266 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1267 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1268 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1270 if (! data
->reverse
)
1271 data
->offset
+= size
;
1277 /* Emit code to move a block Y to a block X. This may be done with
1278 string-move instructions, with multiple scalar move instructions,
1279 or with a library call.
1281 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1282 SIZE is an rtx that says how long they are.
1283 ALIGN is the maximum alignment we can assume they have.
1284 METHOD describes what kind of copy this is, and what mechanisms may be used.
1286 Return the address of the new block, if memcpy is called and returns it,
1290 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1298 case BLOCK_OP_NORMAL
:
1299 may_use_call
= true;
1302 case BLOCK_OP_CALL_PARM
:
1303 may_use_call
= block_move_libcall_safe_for_call_parm ();
1305 /* Make inhibit_defer_pop nonzero around the library call
1306 to force it to pop the arguments right away. */
1310 case BLOCK_OP_NO_LIBCALL
:
1311 may_use_call
= false;
1318 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1320 if (GET_MODE (x
) != BLKmode
)
1322 if (GET_MODE (y
) != BLKmode
)
1325 x
= protect_from_queue (x
, 1);
1326 y
= protect_from_queue (y
, 0);
1327 size
= protect_from_queue (size
, 0);
1329 if (GET_CODE (x
) != MEM
)
1331 if (GET_CODE (y
) != MEM
)
1336 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1337 can be incorrect is coming from __builtin_memcpy. */
1338 if (GET_CODE (size
) == CONST_INT
)
1340 if (INTVAL (size
) == 0)
1343 x
= shallow_copy_rtx (x
);
1344 y
= shallow_copy_rtx (y
);
1345 set_mem_size (x
, size
);
1346 set_mem_size (y
, size
);
1349 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1350 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1351 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1353 else if (may_use_call
)
1354 retval
= emit_block_move_via_libcall (x
, y
, size
);
1356 emit_block_move_via_loop (x
, y
, size
, align
);
1358 if (method
== BLOCK_OP_CALL_PARM
)
1364 /* A subroutine of emit_block_move. Returns true if calling the
1365 block move libcall will not clobber any parameters which may have
1366 already been placed on the stack. */
1369 block_move_libcall_safe_for_call_parm (void)
1371 /* If arguments are pushed on the stack, then they're safe. */
1375 /* If registers go on the stack anyway, any argument is sure to clobber
1376 an outgoing argument. */
1377 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1379 tree fn
= emit_block_move_libcall_fn (false);
1381 if (REG_PARM_STACK_SPACE (fn
) != 0)
1386 /* If any argument goes in memory, then it might clobber an outgoing
1389 CUMULATIVE_ARGS args_so_far
;
1392 fn
= emit_block_move_libcall_fn (false);
1393 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0);
1395 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1396 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1398 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1399 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1400 if (!tmp
|| !REG_P (tmp
))
1402 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1403 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1407 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1413 /* A subroutine of emit_block_move. Expand a movstr pattern;
1414 return true if successful. */
1417 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1419 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1420 enum machine_mode mode
;
1422 /* Since this is a move insn, we don't care about volatility. */
1425 /* Try the most limited insn first, because there's no point
1426 including more than one in the machine description unless
1427 the more limited one has some advantage. */
1429 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1430 mode
= GET_MODE_WIDER_MODE (mode
))
1432 enum insn_code code
= movstr_optab
[(int) mode
];
1433 insn_operand_predicate_fn pred
;
1435 if (code
!= CODE_FOR_nothing
1436 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1437 here because if SIZE is less than the mode mask, as it is
1438 returned by the macro, it will definitely be less than the
1439 actual mode mask. */
1440 && ((GET_CODE (size
) == CONST_INT
1441 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1442 <= (GET_MODE_MASK (mode
) >> 1)))
1443 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1444 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1445 || (*pred
) (x
, BLKmode
))
1446 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1447 || (*pred
) (y
, BLKmode
))
1448 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1449 || (*pred
) (opalign
, VOIDmode
)))
1452 rtx last
= get_last_insn ();
1455 op2
= convert_to_mode (mode
, size
, 1);
1456 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1457 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1458 op2
= copy_to_mode_reg (mode
, op2
);
1460 /* ??? When called via emit_block_move_for_call, it'd be
1461 nice if there were some way to inform the backend, so
1462 that it doesn't fail the expansion because it thinks
1463 emitting the libcall would be more efficient. */
1465 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1473 delete_insns_since (last
);
1481 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1482 Return the return value from memcpy, 0 otherwise. */
1485 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1487 rtx dst_addr
, src_addr
;
1488 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1489 enum machine_mode size_mode
;
1492 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1494 It is unsafe to save the value generated by protect_from_queue and reuse
1495 it later. Consider what happens if emit_queue is called before the
1496 return value from protect_from_queue is used.
1498 Expansion of the CALL_EXPR below will call emit_queue before we are
1499 finished emitting RTL for argument setup. So if we are not careful we
1500 could get the wrong value for an argument.
1502 To avoid this problem we go ahead and emit code to copy the addresses of
1503 DST and SRC and SIZE into new pseudos. We can then place those new
1504 pseudos into an RTL_EXPR and use them later, even after a call to
1507 Note this is not strictly needed for library calls since they do not call
1508 emit_queue before loading their arguments. However, we may need to have
1509 library calls call emit_queue in the future since failing to do so could
1510 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1511 arguments in registers. */
1513 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1514 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1516 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1517 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1519 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1520 src_tree
= make_tree (ptr_type_node
, src_addr
);
1522 if (TARGET_MEM_FUNCTIONS
)
1523 size_mode
= TYPE_MODE (sizetype
);
1525 size_mode
= TYPE_MODE (unsigned_type_node
);
1527 size
= convert_to_mode (size_mode
, size
, 1);
1528 size
= copy_to_mode_reg (size_mode
, size
);
1530 /* It is incorrect to use the libcall calling conventions to call
1531 memcpy in this context. This could be a user call to memcpy and
1532 the user may wish to examine the return value from memcpy. For
1533 targets where libcalls and normal calls have different conventions
1534 for returning pointers, we could end up generating incorrect code.
1536 For convenience, we generate the call to bcopy this way as well. */
1538 if (TARGET_MEM_FUNCTIONS
)
1539 size_tree
= make_tree (sizetype
, size
);
1541 size_tree
= make_tree (unsigned_type_node
, size
);
1543 fn
= emit_block_move_libcall_fn (true);
1544 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1545 if (TARGET_MEM_FUNCTIONS
)
1547 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1548 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1552 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1553 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1556 /* Now we have to build up the CALL_EXPR itself. */
1557 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1558 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1559 call_expr
, arg_list
, NULL_TREE
);
1561 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1563 /* If we are initializing a readonly value, show the above call clobbered
1564 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1565 the delay slot scheduler might overlook conflicts and take nasty
1567 if (RTX_UNCHANGING_P (dst
))
1568 add_function_usage_to
1569 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1570 gen_rtx_CLOBBER (VOIDmode
, dst
),
1573 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
1576 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1577 for the function we use for block copies. The first time FOR_CALL
1578 is true, we call assemble_external. */
1580 static GTY(()) tree block_move_fn
;
1583 init_block_move_fn (const char *asmspec
)
1589 if (TARGET_MEM_FUNCTIONS
)
1591 fn
= get_identifier ("memcpy");
1592 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1593 const_ptr_type_node
, sizetype
,
1598 fn
= get_identifier ("bcopy");
1599 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
1600 ptr_type_node
, unsigned_type_node
,
1604 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1605 DECL_EXTERNAL (fn
) = 1;
1606 TREE_PUBLIC (fn
) = 1;
1607 DECL_ARTIFICIAL (fn
) = 1;
1608 TREE_NOTHROW (fn
) = 1;
1615 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
1616 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
1621 emit_block_move_libcall_fn (int for_call
)
1623 static bool emitted_extern
;
1626 init_block_move_fn (NULL
);
1628 if (for_call
&& !emitted_extern
)
1630 emitted_extern
= true;
1631 make_decl_rtl (block_move_fn
, NULL
);
1632 assemble_external (block_move_fn
);
1635 return block_move_fn
;
1638 /* A subroutine of emit_block_move. Copy the data via an explicit
1639 loop. This is used only when libcalls are forbidden. */
1640 /* ??? It'd be nice to copy in hunks larger than QImode. */
1643 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1644 unsigned int align ATTRIBUTE_UNUSED
)
1646 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1647 enum machine_mode iter_mode
;
1649 iter_mode
= GET_MODE (size
);
1650 if (iter_mode
== VOIDmode
)
1651 iter_mode
= word_mode
;
1653 top_label
= gen_label_rtx ();
1654 cmp_label
= gen_label_rtx ();
1655 iter
= gen_reg_rtx (iter_mode
);
1657 emit_move_insn (iter
, const0_rtx
);
1659 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1660 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1661 do_pending_stack_adjust ();
1663 emit_note (NOTE_INSN_LOOP_BEG
);
1665 emit_jump (cmp_label
);
1666 emit_label (top_label
);
1668 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1669 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1670 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1671 x
= change_address (x
, QImode
, x_addr
);
1672 y
= change_address (y
, QImode
, y_addr
);
1674 emit_move_insn (x
, y
);
1676 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1677 true, OPTAB_LIB_WIDEN
);
1679 emit_move_insn (iter
, tmp
);
1681 emit_note (NOTE_INSN_LOOP_CONT
);
1682 emit_label (cmp_label
);
1684 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1687 emit_note (NOTE_INSN_LOOP_END
);
1690 /* Copy all or part of a value X into registers starting at REGNO.
1691 The number of registers to be filled is NREGS. */
1694 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1697 #ifdef HAVE_load_multiple
1705 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1706 x
= validize_mem (force_const_mem (mode
, x
));
1708 /* See if the machine can do this with a load multiple insn. */
1709 #ifdef HAVE_load_multiple
1710 if (HAVE_load_multiple
)
1712 last
= get_last_insn ();
1713 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1721 delete_insns_since (last
);
1725 for (i
= 0; i
< nregs
; i
++)
1726 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1727 operand_subword_force (x
, i
, mode
));
1730 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1731 The number of registers to be filled is NREGS. */
1734 move_block_from_reg (int regno
, rtx x
, int nregs
)
1741 /* See if the machine can do this with a store multiple insn. */
1742 #ifdef HAVE_store_multiple
1743 if (HAVE_store_multiple
)
1745 rtx last
= get_last_insn ();
1746 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1754 delete_insns_since (last
);
1758 for (i
= 0; i
< nregs
; i
++)
1760 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1765 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1769 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1770 ORIG, where ORIG is a non-consecutive group of registers represented by
1771 a PARALLEL. The clone is identical to the original except in that the
1772 original set of registers is replaced by a new set of pseudo registers.
1773 The new set has the same modes as the original set. */
1776 gen_group_rtx (rtx orig
)
1781 if (GET_CODE (orig
) != PARALLEL
)
1784 length
= XVECLEN (orig
, 0);
1785 tmps
= alloca (sizeof (rtx
) * length
);
1787 /* Skip a NULL entry in first slot. */
1788 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1793 for (; i
< length
; i
++)
1795 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1796 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1798 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1801 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1804 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1805 where DST is non-consecutive registers represented by a PARALLEL.
1806 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1810 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1815 if (GET_CODE (dst
) != PARALLEL
)
1818 /* Check for a NULL entry, used to indicate that the parameter goes
1819 both on the stack and in registers. */
1820 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1825 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1827 /* Process the pieces. */
1828 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1830 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1831 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1832 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1835 /* Handle trailing fragments that run over the size of the struct. */
1836 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1838 /* Arrange to shift the fragment to where it belongs.
1839 extract_bit_field loads to the lsb of the reg. */
1841 #ifdef BLOCK_REG_PADDING
1842 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1843 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1848 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1849 bytelen
= ssize
- bytepos
;
1854 /* If we won't be loading directly from memory, protect the real source
1855 from strange tricks we might play; but make sure that the source can
1856 be loaded directly into the destination. */
1858 if (GET_CODE (orig_src
) != MEM
1859 && (!CONSTANT_P (orig_src
)
1860 || (GET_MODE (orig_src
) != mode
1861 && GET_MODE (orig_src
) != VOIDmode
)))
1863 if (GET_MODE (orig_src
) == VOIDmode
)
1864 src
= gen_reg_rtx (mode
);
1866 src
= gen_reg_rtx (GET_MODE (orig_src
));
1868 emit_move_insn (src
, orig_src
);
1871 /* Optimize the access just a bit. */
1872 if (GET_CODE (src
) == MEM
1873 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1874 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1875 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1876 && bytelen
== GET_MODE_SIZE (mode
))
1878 tmps
[i
] = gen_reg_rtx (mode
);
1879 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1881 else if (GET_CODE (src
) == CONCAT
)
1883 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1884 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1886 if ((bytepos
== 0 && bytelen
== slen0
)
1887 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1889 /* The following assumes that the concatenated objects all
1890 have the same size. In this case, a simple calculation
1891 can be used to determine the object and the bit field
1893 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1894 if (! CONSTANT_P (tmps
[i
])
1895 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
1896 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1897 (bytepos
% slen0
) * BITS_PER_UNIT
,
1898 1, NULL_RTX
, mode
, mode
, ssize
);
1900 else if (bytepos
== 0)
1902 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1903 emit_move_insn (mem
, src
);
1904 tmps
[i
] = adjust_address (mem
, mode
, 0);
1909 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1910 SIMD register, which is currently broken. While we get GCC
1911 to emit proper RTL for these cases, let's dump to memory. */
1912 else if (VECTOR_MODE_P (GET_MODE (dst
))
1913 && GET_CODE (src
) == REG
)
1915 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1918 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1919 emit_move_insn (mem
, src
);
1920 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1922 else if (CONSTANT_P (src
)
1923 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
1926 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1927 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1931 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1932 tmps
[i
], 0, OPTAB_WIDEN
);
1937 /* Copy the extracted pieces into the proper (probable) hard regs. */
1938 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1939 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1942 /* Emit code to move a block SRC to block DST, where SRC and DST are
1943 non-consecutive groups of registers, each represented by a PARALLEL. */
1946 emit_group_move (rtx dst
, rtx src
)
1950 if (GET_CODE (src
) != PARALLEL
1951 || GET_CODE (dst
) != PARALLEL
1952 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1955 /* Skip first entry if NULL. */
1956 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1957 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1958 XEXP (XVECEXP (src
, 0, i
), 0));
1961 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1962 where SRC is non-consecutive registers represented by a PARALLEL.
1963 SSIZE represents the total size of block ORIG_DST, or -1 if not
1967 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1972 if (GET_CODE (src
) != PARALLEL
)
1975 /* Check for a NULL entry, used to indicate that the parameter goes
1976 both on the stack and in registers. */
1977 if (XEXP (XVECEXP (src
, 0, 0), 0))
1982 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1984 /* Copy the (probable) hard regs into pseudos. */
1985 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1987 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1988 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1989 emit_move_insn (tmps
[i
], reg
);
1993 /* If we won't be storing directly into memory, protect the real destination
1994 from strange tricks we might play. */
1996 if (GET_CODE (dst
) == PARALLEL
)
2000 /* We can get a PARALLEL dst if there is a conditional expression in
2001 a return statement. In that case, the dst and src are the same,
2002 so no action is necessary. */
2003 if (rtx_equal_p (dst
, src
))
2006 /* It is unclear if we can ever reach here, but we may as well handle
2007 it. Allocate a temporary, and split this into a store/load to/from
2010 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2011 emit_group_store (temp
, src
, type
, ssize
);
2012 emit_group_load (dst
, temp
, type
, ssize
);
2015 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2017 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2018 /* Make life a bit easier for combine. */
2019 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2022 /* Process the pieces. */
2023 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2025 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2026 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2027 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2030 /* Handle trailing fragments that run over the size of the struct. */
2031 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2033 /* store_bit_field always takes its value from the lsb.
2034 Move the fragment to the lsb if it's not already there. */
2036 #ifdef BLOCK_REG_PADDING
2037 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2038 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2044 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2045 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2046 tmps
[i
], 0, OPTAB_WIDEN
);
2048 bytelen
= ssize
- bytepos
;
2051 if (GET_CODE (dst
) == CONCAT
)
2053 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2054 dest
= XEXP (dst
, 0);
2055 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2057 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2058 dest
= XEXP (dst
, 1);
2060 else if (bytepos
== 0 && XVECLEN (src
, 0))
2062 dest
= assign_stack_temp (GET_MODE (dest
),
2063 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2064 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2073 /* Optimize the access just a bit. */
2074 if (GET_CODE (dest
) == MEM
2075 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2076 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2077 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2078 && bytelen
== GET_MODE_SIZE (mode
))
2079 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2081 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2082 mode
, tmps
[i
], ssize
);
2087 /* Copy from the pseudo into the (probable) hard reg. */
2088 if (orig_dst
!= dst
)
2089 emit_move_insn (orig_dst
, dst
);
2092 /* Generate code to copy a BLKmode object of TYPE out of a
2093 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2094 is null, a stack temporary is created. TGTBLK is returned.
2096 The purpose of this routine is to handle functions that return
2097 BLKmode structures in registers. Some machines (the PA for example)
2098 want to return all small structures in registers regardless of the
2099 structure's alignment. */
2102 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2104 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2105 rtx src
= NULL
, dst
= NULL
;
2106 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2107 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2111 tgtblk
= assign_temp (build_qualified_type (type
,
2113 | TYPE_QUAL_CONST
)),
2115 preserve_temp_slots (tgtblk
);
2118 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2119 into a new pseudo which is a full word. */
2121 if (GET_MODE (srcreg
) != BLKmode
2122 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2123 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2125 /* If the structure doesn't take up a whole number of words, see whether
2126 SRCREG is padded on the left or on the right. If it's on the left,
2127 set PADDING_CORRECTION to the number of bits to skip.
2129 In most ABIs, the structure will be returned at the least end of
2130 the register, which translates to right padding on little-endian
2131 targets and left padding on big-endian targets. The opposite
2132 holds if the structure is returned at the most significant
2133 end of the register. */
2134 if (bytes
% UNITS_PER_WORD
!= 0
2135 && (targetm
.calls
.return_in_msb (type
)
2137 : BYTES_BIG_ENDIAN
))
2139 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2141 /* Copy the structure BITSIZE bites at a time.
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2146 for (bitpos
= 0, xbitpos
= padding_correction
;
2147 bitpos
< bytes
* BITS_PER_UNIT
;
2148 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2150 /* We need a new source operand each time xbitpos is on a
2151 word boundary and when xbitpos == padding_correction
2152 (the first time through). */
2153 if (xbitpos
% BITS_PER_WORD
== 0
2154 || xbitpos
== padding_correction
)
2155 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2158 /* We need a new destination operand each time bitpos is on
2160 if (bitpos
% BITS_PER_WORD
== 0)
2161 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2163 /* Use xbitpos for the source extraction (right justified) and
2164 xbitpos for the destination store (left justified). */
2165 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2166 extract_bit_field (src
, bitsize
,
2167 xbitpos
% BITS_PER_WORD
, 1,
2168 NULL_RTX
, word_mode
, word_mode
,
2176 /* Add a USE expression for REG to the (possibly empty) list pointed
2177 to by CALL_FUSAGE. REG must denote a hard register. */
2180 use_reg (rtx
*call_fusage
, rtx reg
)
2182 if (GET_CODE (reg
) != REG
2183 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2187 = gen_rtx_EXPR_LIST (VOIDmode
,
2188 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2191 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2192 starting at REGNO. All of these registers must be hard registers. */
2195 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2199 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2202 for (i
= 0; i
< nregs
; i
++)
2203 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2206 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2207 PARALLEL REGS. This is for calls that pass values in multiple
2208 non-contiguous locations. The Irix 6 ABI has examples of this. */
2211 use_group_regs (rtx
*call_fusage
, rtx regs
)
2215 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2217 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
2222 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2223 use_reg (call_fusage
, reg
);
2228 /* Determine whether the LEN bytes generated by CONSTFUN can be
2229 stored to memory using several move instructions. CONSTFUNDATA is
2230 a pointer which will be passed as argument in every CONSTFUN call.
2231 ALIGN is maximum alignment we can assume. Return nonzero if a
2232 call to store_by_pieces should succeed. */
2235 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2236 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2237 void *constfundata
, unsigned int align
)
2239 unsigned HOST_WIDE_INT max_size
, l
;
2240 HOST_WIDE_INT offset
= 0;
2241 enum machine_mode mode
, tmode
;
2242 enum insn_code icode
;
2249 if (! STORE_BY_PIECES_P (len
, align
))
2252 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2253 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2254 align
= MOVE_MAX
* BITS_PER_UNIT
;
2256 /* We would first store what we can in the largest integer mode, then go to
2257 successively smaller modes. */
2260 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2265 max_size
= STORE_MAX_PIECES
+ 1;
2266 while (max_size
> 1)
2268 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2269 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2270 if (GET_MODE_SIZE (tmode
) < max_size
)
2273 if (mode
== VOIDmode
)
2276 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2277 if (icode
!= CODE_FOR_nothing
2278 && align
>= GET_MODE_ALIGNMENT (mode
))
2280 unsigned int size
= GET_MODE_SIZE (mode
);
2287 cst
= (*constfun
) (constfundata
, offset
, mode
);
2288 if (!LEGITIMATE_CONSTANT_P (cst
))
2298 max_size
= GET_MODE_SIZE (mode
);
2301 /* The code above should have handled everything. */
2309 /* Generate several move instructions to store LEN bytes generated by
2310 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2311 pointer which will be passed as argument in every CONSTFUN call.
2312 ALIGN is maximum alignment we can assume.
2313 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2314 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2318 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2319 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2320 void *constfundata
, unsigned int align
, int endp
)
2322 struct store_by_pieces data
;
2331 if (! STORE_BY_PIECES_P (len
, align
))
2333 to
= protect_from_queue (to
, 1);
2334 data
.constfun
= constfun
;
2335 data
.constfundata
= constfundata
;
2338 store_by_pieces_1 (&data
, align
);
2349 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2350 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2352 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2355 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2362 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2370 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2371 rtx with BLKmode). The caller must pass TO through protect_from_queue
2372 before calling. ALIGN is maximum alignment we can assume. */
2375 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2377 struct store_by_pieces data
;
2382 data
.constfun
= clear_by_pieces_1
;
2383 data
.constfundata
= NULL
;
2386 store_by_pieces_1 (&data
, align
);
2389 /* Callback routine for clear_by_pieces.
2390 Return const0_rtx unconditionally. */
2393 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2394 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2395 enum machine_mode mode ATTRIBUTE_UNUSED
)
2400 /* Subroutine of clear_by_pieces and store_by_pieces.
2401 Generate several move instructions to store LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
2406 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2407 unsigned int align ATTRIBUTE_UNUSED
)
2409 rtx to_addr
= XEXP (data
->to
, 0);
2410 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2411 enum machine_mode mode
= VOIDmode
, tmode
;
2412 enum insn_code icode
;
2415 data
->to_addr
= to_addr
;
2417 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2418 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2420 data
->explicit_inc_to
= 0;
2422 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2424 data
->offset
= data
->len
;
2426 /* If storing requires more than two move insns,
2427 copy addresses to registers (to make displacements shorter)
2428 and use post-increment if available. */
2429 if (!data
->autinc_to
2430 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2432 /* Determine the main mode we'll be using. */
2433 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2434 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2435 if (GET_MODE_SIZE (tmode
) < max_size
)
2438 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2440 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2441 data
->autinc_to
= 1;
2442 data
->explicit_inc_to
= -1;
2445 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2446 && ! data
->autinc_to
)
2448 data
->to_addr
= copy_addr_to_reg (to_addr
);
2449 data
->autinc_to
= 1;
2450 data
->explicit_inc_to
= 1;
2453 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2454 data
->to_addr
= copy_addr_to_reg (to_addr
);
2457 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2458 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2459 align
= MOVE_MAX
* BITS_PER_UNIT
;
2461 /* First store what we can in the largest integer mode, then go to
2462 successively smaller modes. */
2464 while (max_size
> 1)
2466 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2467 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2468 if (GET_MODE_SIZE (tmode
) < max_size
)
2471 if (mode
== VOIDmode
)
2474 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2475 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2476 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2478 max_size
= GET_MODE_SIZE (mode
);
2481 /* The code above should have handled everything. */
2486 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2487 with move instructions for mode MODE. GENFUN is the gen_... function
2488 to make a move insn for that mode. DATA has all the other info. */
2491 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2492 struct store_by_pieces
*data
)
2494 unsigned int size
= GET_MODE_SIZE (mode
);
2497 while (data
->len
>= size
)
2500 data
->offset
-= size
;
2502 if (data
->autinc_to
)
2503 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2506 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2508 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2509 emit_insn (gen_add2_insn (data
->to_addr
,
2510 GEN_INT (-(HOST_WIDE_INT
) size
)));
2512 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2513 emit_insn ((*genfun
) (to1
, cst
));
2515 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2516 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2518 if (! data
->reverse
)
2519 data
->offset
+= size
;
2525 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2526 its length in bytes. */
2529 clear_storage (rtx object
, rtx size
)
2532 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2533 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2535 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2536 just move a zero. Otherwise, do this a piece at a time. */
2537 if (GET_MODE (object
) != BLKmode
2538 && GET_CODE (size
) == CONST_INT
2539 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2540 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2543 object
= protect_from_queue (object
, 1);
2544 size
= protect_from_queue (size
, 0);
2546 if (size
== const0_rtx
)
2548 else if (GET_CODE (size
) == CONST_INT
2549 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2550 clear_by_pieces (object
, INTVAL (size
), align
);
2551 else if (clear_storage_via_clrstr (object
, size
, align
))
2554 retval
= clear_storage_via_libcall (object
, size
);
2560 /* A subroutine of clear_storage. Expand a clrstr pattern;
2561 return true if successful. */
2564 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2566 /* Try the most limited insn first, because there's no point
2567 including more than one in the machine description unless
2568 the more limited one has some advantage. */
2570 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2571 enum machine_mode mode
;
2573 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2574 mode
= GET_MODE_WIDER_MODE (mode
))
2576 enum insn_code code
= clrstr_optab
[(int) mode
];
2577 insn_operand_predicate_fn pred
;
2579 if (code
!= CODE_FOR_nothing
2580 /* We don't need MODE to be narrower than
2581 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2582 the mode mask, as it is returned by the macro, it will
2583 definitely be less than the actual mode mask. */
2584 && ((GET_CODE (size
) == CONST_INT
2585 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2586 <= (GET_MODE_MASK (mode
) >> 1)))
2587 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2588 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2589 || (*pred
) (object
, BLKmode
))
2590 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2591 || (*pred
) (opalign
, VOIDmode
)))
2594 rtx last
= get_last_insn ();
2597 op1
= convert_to_mode (mode
, size
, 1);
2598 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2599 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2600 op1
= copy_to_mode_reg (mode
, op1
);
2602 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2609 delete_insns_since (last
);
2616 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2617 Return the return value of memset, 0 otherwise. */
2620 clear_storage_via_libcall (rtx object
, rtx size
)
2622 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2623 enum machine_mode size_mode
;
2626 /* OBJECT or SIZE may have been passed through protect_from_queue.
2628 It is unsafe to save the value generated by protect_from_queue
2629 and reuse it later. Consider what happens if emit_queue is
2630 called before the return value from protect_from_queue is used.
2632 Expansion of the CALL_EXPR below will call emit_queue before
2633 we are finished emitting RTL for argument setup. So if we are
2634 not careful we could get the wrong value for an argument.
2636 To avoid this problem we go ahead and emit code to copy OBJECT
2637 and SIZE into new pseudos. We can then place those new pseudos
2638 into an RTL_EXPR and use them later, even after a call to
2641 Note this is not strictly needed for library calls since they
2642 do not call emit_queue before loading their arguments. However,
2643 we may need to have library calls call emit_queue in the future
2644 since failing to do so could cause problems for targets which
2645 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2647 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2649 if (TARGET_MEM_FUNCTIONS
)
2650 size_mode
= TYPE_MODE (sizetype
);
2652 size_mode
= TYPE_MODE (unsigned_type_node
);
2653 size
= convert_to_mode (size_mode
, size
, 1);
2654 size
= copy_to_mode_reg (size_mode
, size
);
2656 /* It is incorrect to use the libcall calling conventions to call
2657 memset in this context. This could be a user call to memset and
2658 the user may wish to examine the return value from memset. For
2659 targets where libcalls and normal calls have different conventions
2660 for returning pointers, we could end up generating incorrect code.
2662 For convenience, we generate the call to bzero this way as well. */
2664 object_tree
= make_tree (ptr_type_node
, object
);
2665 if (TARGET_MEM_FUNCTIONS
)
2666 size_tree
= make_tree (sizetype
, size
);
2668 size_tree
= make_tree (unsigned_type_node
, size
);
2670 fn
= clear_storage_libcall_fn (true);
2671 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2672 if (TARGET_MEM_FUNCTIONS
)
2673 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2674 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2676 /* Now we have to build up the CALL_EXPR itself. */
2677 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2678 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2679 call_expr
, arg_list
, NULL_TREE
);
2681 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2683 /* If we are initializing a readonly value, show the above call
2684 clobbered it. Otherwise, a load from it may erroneously be
2685 hoisted from a loop. */
2686 if (RTX_UNCHANGING_P (object
))
2687 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2689 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
2692 /* A subroutine of clear_storage_via_libcall. Create the tree node
2693 for the function we use for block clears. The first time FOR_CALL
2694 is true, we call assemble_external. */
2696 static GTY(()) tree block_clear_fn
;
2699 init_block_clear_fn (const char *asmspec
)
2701 if (!block_clear_fn
)
2705 if (TARGET_MEM_FUNCTIONS
)
2707 fn
= get_identifier ("memset");
2708 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2709 integer_type_node
, sizetype
,
2714 fn
= get_identifier ("bzero");
2715 args
= build_function_type_list (void_type_node
, ptr_type_node
,
2716 unsigned_type_node
, NULL_TREE
);
2719 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2720 DECL_EXTERNAL (fn
) = 1;
2721 TREE_PUBLIC (fn
) = 1;
2722 DECL_ARTIFICIAL (fn
) = 1;
2723 TREE_NOTHROW (fn
) = 1;
2725 block_clear_fn
= fn
;
2730 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
2731 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
2736 clear_storage_libcall_fn (int for_call
)
2738 static bool emitted_extern
;
2740 if (!block_clear_fn
)
2741 init_block_clear_fn (NULL
);
2743 if (for_call
&& !emitted_extern
)
2745 emitted_extern
= true;
2746 make_decl_rtl (block_clear_fn
, NULL
);
2747 assemble_external (block_clear_fn
);
2750 return block_clear_fn
;
2753 /* Generate code to copy Y into X.
2754 Both Y and X must have the same mode, except that
2755 Y can be a constant with VOIDmode.
2756 This mode cannot be BLKmode; use emit_block_move for that.
2758 Return the last instruction emitted. */
2761 emit_move_insn (rtx x
, rtx y
)
2763 enum machine_mode mode
= GET_MODE (x
);
2764 rtx y_cst
= NULL_RTX
;
2767 x
= protect_from_queue (x
, 1);
2768 y
= protect_from_queue (y
, 0);
2770 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2773 /* Never force constant_p_rtx to memory. */
2774 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2776 else if (CONSTANT_P (y
))
2779 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2780 && (last_insn
= compress_float_constant (x
, y
)))
2785 if (!LEGITIMATE_CONSTANT_P (y
))
2787 y
= force_const_mem (mode
, y
);
2789 /* If the target's cannot_force_const_mem prevented the spill,
2790 assume that the target's move expanders will also take care
2791 of the non-legitimate constant. */
2797 /* If X or Y are memory references, verify that their addresses are valid
2799 if (GET_CODE (x
) == MEM
2800 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2801 && ! push_operand (x
, GET_MODE (x
)))
2803 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2804 x
= validize_mem (x
);
2806 if (GET_CODE (y
) == MEM
2807 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2809 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2810 y
= validize_mem (y
);
2812 if (mode
== BLKmode
)
2815 last_insn
= emit_move_insn_1 (x
, y
);
2817 if (y_cst
&& GET_CODE (x
) == REG
2818 && (set
= single_set (last_insn
)) != NULL_RTX
2819 && SET_DEST (set
) == x
2820 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2821 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2826 /* Low level part of emit_move_insn.
2827 Called just like emit_move_insn, but assumes X and Y
2828 are basically valid. */
2831 emit_move_insn_1 (rtx x
, rtx y
)
2833 enum machine_mode mode
= GET_MODE (x
);
2834 enum machine_mode submode
;
2835 enum mode_class
class = GET_MODE_CLASS (mode
);
2837 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2840 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2842 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2844 /* Expand complex moves by moving real part and imag part, if possible. */
2845 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2846 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2847 && (mov_optab
->handlers
[(int) submode
].insn_code
2848 != CODE_FOR_nothing
))
2850 /* Don't split destination if it is a stack push. */
2851 int stack
= push_operand (x
, GET_MODE (x
));
2853 #ifdef PUSH_ROUNDING
2854 /* In case we output to the stack, but the size is smaller than the
2855 machine can push exactly, we need to use move instructions. */
2857 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2858 != GET_MODE_SIZE (submode
)))
2861 HOST_WIDE_INT offset1
, offset2
;
2863 /* Do not use anti_adjust_stack, since we don't want to update
2864 stack_pointer_delta. */
2865 temp
= expand_binop (Pmode
,
2866 #ifdef STACK_GROWS_DOWNWARD
2874 (GET_MODE_SIZE (GET_MODE (x
)))),
2875 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2877 if (temp
!= stack_pointer_rtx
)
2878 emit_move_insn (stack_pointer_rtx
, temp
);
2880 #ifdef STACK_GROWS_DOWNWARD
2882 offset2
= GET_MODE_SIZE (submode
);
2884 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2885 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2886 + GET_MODE_SIZE (submode
));
2889 emit_move_insn (change_address (x
, submode
,
2890 gen_rtx_PLUS (Pmode
,
2892 GEN_INT (offset1
))),
2893 gen_realpart (submode
, y
));
2894 emit_move_insn (change_address (x
, submode
,
2895 gen_rtx_PLUS (Pmode
,
2897 GEN_INT (offset2
))),
2898 gen_imagpart (submode
, y
));
2902 /* If this is a stack, push the highpart first, so it
2903 will be in the argument order.
2905 In that case, change_address is used only to convert
2906 the mode, not to change the address. */
2909 /* Note that the real part always precedes the imag part in memory
2910 regardless of machine's endianness. */
2911 #ifdef STACK_GROWS_DOWNWARD
2912 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2913 gen_imagpart (submode
, y
));
2914 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2915 gen_realpart (submode
, y
));
2917 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2918 gen_realpart (submode
, y
));
2919 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2920 gen_imagpart (submode
, y
));
2925 rtx realpart_x
, realpart_y
;
2926 rtx imagpart_x
, imagpart_y
;
2928 /* If this is a complex value with each part being smaller than a
2929 word, the usual calling sequence will likely pack the pieces into
2930 a single register. Unfortunately, SUBREG of hard registers only
2931 deals in terms of words, so we have a problem converting input
2932 arguments to the CONCAT of two registers that is used elsewhere
2933 for complex values. If this is before reload, we can copy it into
2934 memory and reload. FIXME, we should see about using extract and
2935 insert on integer registers, but complex short and complex char
2936 variables should be rarely used. */
2937 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2938 && (reload_in_progress
| reload_completed
) == 0)
2941 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2943 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2945 if (packed_dest_p
|| packed_src_p
)
2947 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2948 ? MODE_FLOAT
: MODE_INT
);
2950 enum machine_mode reg_mode
2951 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2953 if (reg_mode
!= BLKmode
)
2955 rtx mem
= assign_stack_temp (reg_mode
,
2956 GET_MODE_SIZE (mode
), 0);
2957 rtx cmem
= adjust_address (mem
, mode
, 0);
2960 = N_("function using short complex types cannot be inline");
2964 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2966 emit_move_insn_1 (cmem
, y
);
2967 return emit_move_insn_1 (sreg
, mem
);
2971 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2973 emit_move_insn_1 (mem
, sreg
);
2974 return emit_move_insn_1 (x
, cmem
);
2980 realpart_x
= gen_realpart (submode
, x
);
2981 realpart_y
= gen_realpart (submode
, y
);
2982 imagpart_x
= gen_imagpart (submode
, x
);
2983 imagpart_y
= gen_imagpart (submode
, y
);
2985 /* Show the output dies here. This is necessary for SUBREGs
2986 of pseudos since we cannot track their lifetimes correctly;
2987 hard regs shouldn't appear here except as return values.
2988 We never want to emit such a clobber after reload. */
2990 && ! (reload_in_progress
|| reload_completed
)
2991 && (GET_CODE (realpart_x
) == SUBREG
2992 || GET_CODE (imagpart_x
) == SUBREG
))
2993 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2995 emit_move_insn (realpart_x
, realpart_y
);
2996 emit_move_insn (imagpart_x
, imagpart_y
);
2999 return get_last_insn ();
3002 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3003 find a mode to do it in. If we have a movcc, use it. Otherwise,
3004 find the MODE_INT mode of the same width. */
3005 else if (GET_MODE_CLASS (mode
) == MODE_CC
3006 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3008 enum insn_code insn_code
;
3009 enum machine_mode tmode
= VOIDmode
;
3013 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3016 for (tmode
= QImode
; tmode
!= VOIDmode
;
3017 tmode
= GET_MODE_WIDER_MODE (tmode
))
3018 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3021 if (tmode
== VOIDmode
)
3024 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3025 may call change_address which is not appropriate if we were
3026 called when a reload was in progress. We don't have to worry
3027 about changing the address since the size in bytes is supposed to
3028 be the same. Copy the MEM to change the mode and move any
3029 substitutions from the old MEM to the new one. */
3031 if (reload_in_progress
)
3033 x
= gen_lowpart_common (tmode
, x1
);
3034 if (x
== 0 && GET_CODE (x1
) == MEM
)
3036 x
= adjust_address_nv (x1
, tmode
, 0);
3037 copy_replacements (x1
, x
);
3040 y
= gen_lowpart_common (tmode
, y1
);
3041 if (y
== 0 && GET_CODE (y1
) == MEM
)
3043 y
= adjust_address_nv (y1
, tmode
, 0);
3044 copy_replacements (y1
, y
);
3049 x
= gen_lowpart (tmode
, x
);
3050 y
= gen_lowpart (tmode
, y
);
3053 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3054 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3057 /* Try using a move pattern for the corresponding integer mode. This is
3058 only safe when simplify_subreg can convert MODE constants into integer
3059 constants. At present, it can only do this reliably if the value
3060 fits within a HOST_WIDE_INT. */
3061 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3062 && (submode
= int_mode_for_mode (mode
)) != BLKmode
3063 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
3064 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
3065 (simplify_gen_subreg (submode
, x
, mode
, 0),
3066 simplify_gen_subreg (submode
, y
, mode
, 0)));
3068 /* This will handle any multi-word or full-word mode that lacks a move_insn
3069 pattern. However, you will get better code if you define such patterns,
3070 even if they must turn into multiple assembler instructions. */
3071 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3078 #ifdef PUSH_ROUNDING
3080 /* If X is a push on the stack, do the push now and replace
3081 X with a reference to the stack pointer. */
3082 if (push_operand (x
, GET_MODE (x
)))
3087 /* Do not use anti_adjust_stack, since we don't want to update
3088 stack_pointer_delta. */
3089 temp
= expand_binop (Pmode
,
3090 #ifdef STACK_GROWS_DOWNWARD
3098 (GET_MODE_SIZE (GET_MODE (x
)))),
3099 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3101 if (temp
!= stack_pointer_rtx
)
3102 emit_move_insn (stack_pointer_rtx
, temp
);
3104 code
= GET_CODE (XEXP (x
, 0));
3106 /* Just hope that small offsets off SP are OK. */
3107 if (code
== POST_INC
)
3108 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3109 GEN_INT (-((HOST_WIDE_INT
)
3110 GET_MODE_SIZE (GET_MODE (x
)))));
3111 else if (code
== POST_DEC
)
3112 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3113 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3115 temp
= stack_pointer_rtx
;
3117 x
= change_address (x
, VOIDmode
, temp
);
3121 /* If we are in reload, see if either operand is a MEM whose address
3122 is scheduled for replacement. */
3123 if (reload_in_progress
&& GET_CODE (x
) == MEM
3124 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3125 x
= replace_equiv_address_nv (x
, inner
);
3126 if (reload_in_progress
&& GET_CODE (y
) == MEM
3127 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3128 y
= replace_equiv_address_nv (y
, inner
);
3134 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3137 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3138 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3140 /* If we can't get a part of Y, put Y into memory if it is a
3141 constant. Otherwise, force it into a register. If we still
3142 can't get a part of Y, abort. */
3143 if (ypart
== 0 && CONSTANT_P (y
))
3145 y
= force_const_mem (mode
, y
);
3146 ypart
= operand_subword (y
, i
, 1, mode
);
3148 else if (ypart
== 0)
3149 ypart
= operand_subword_force (y
, i
, mode
);
3151 if (xpart
== 0 || ypart
== 0)
3154 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3156 last_insn
= emit_move_insn (xpart
, ypart
);
3162 /* Show the output dies here. This is necessary for SUBREGs
3163 of pseudos since we cannot track their lifetimes correctly;
3164 hard regs shouldn't appear here except as return values.
3165 We never want to emit such a clobber after reload. */
3167 && ! (reload_in_progress
|| reload_completed
)
3168 && need_clobber
!= 0)
3169 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3179 /* If Y is representable exactly in a narrower mode, and the target can
3180 perform the extension directly from constant or memory, then emit the
3181 move as an extension. */
3184 compress_float_constant (rtx x
, rtx y
)
3186 enum machine_mode dstmode
= GET_MODE (x
);
3187 enum machine_mode orig_srcmode
= GET_MODE (y
);
3188 enum machine_mode srcmode
;
3191 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3193 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3194 srcmode
!= orig_srcmode
;
3195 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3198 rtx trunc_y
, last_insn
;
3200 /* Skip if the target can't extend this way. */
3201 ic
= can_extend_p (dstmode
, srcmode
, 0);
3202 if (ic
== CODE_FOR_nothing
)
3205 /* Skip if the narrowed value isn't exact. */
3206 if (! exact_real_truncate (srcmode
, &r
))
3209 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3211 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3213 /* Skip if the target needs extra instructions to perform
3215 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3218 else if (float_extend_from_mem
[dstmode
][srcmode
])
3219 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3223 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3224 last_insn
= get_last_insn ();
3226 if (GET_CODE (x
) == REG
)
3227 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3235 /* Pushing data onto the stack. */
3237 /* Push a block of length SIZE (perhaps variable)
3238 and return an rtx to address the beginning of the block.
3239 Note that it is not possible for the value returned to be a QUEUED.
3240 The value may be virtual_outgoing_args_rtx.
3242 EXTRA is the number of bytes of padding to push in addition to SIZE.
3243 BELOW nonzero means this padding comes at low addresses;
3244 otherwise, the padding comes at high addresses. */
3247 push_block (rtx size
, int extra
, int below
)
3251 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3252 if (CONSTANT_P (size
))
3253 anti_adjust_stack (plus_constant (size
, extra
));
3254 else if (GET_CODE (size
) == REG
&& extra
== 0)
3255 anti_adjust_stack (size
);
3258 temp
= copy_to_mode_reg (Pmode
, size
);
3260 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3261 temp
, 0, OPTAB_LIB_WIDEN
);
3262 anti_adjust_stack (temp
);
3265 #ifndef STACK_GROWS_DOWNWARD
3271 temp
= virtual_outgoing_args_rtx
;
3272 if (extra
!= 0 && below
)
3273 temp
= plus_constant (temp
, extra
);
3277 if (GET_CODE (size
) == CONST_INT
)
3278 temp
= plus_constant (virtual_outgoing_args_rtx
,
3279 -INTVAL (size
) - (below
? 0 : extra
));
3280 else if (extra
!= 0 && !below
)
3281 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3282 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3284 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3285 negate_rtx (Pmode
, size
));
3288 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3291 #ifdef PUSH_ROUNDING
3293 /* Emit single push insn. */
3296 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3299 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3301 enum insn_code icode
;
3302 insn_operand_predicate_fn pred
;
3304 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3305 /* If there is push pattern, use it. Otherwise try old way of throwing
3306 MEM representing push operation to move expander. */
3307 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3308 if (icode
!= CODE_FOR_nothing
)
3310 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3311 && !((*pred
) (x
, mode
))))
3312 x
= force_reg (mode
, x
);
3313 emit_insn (GEN_FCN (icode
) (x
));
3316 if (GET_MODE_SIZE (mode
) == rounded_size
)
3317 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3318 /* If we are to pad downward, adjust the stack pointer first and
3319 then store X into the stack location using an offset. This is
3320 because emit_move_insn does not know how to pad; it does not have
3322 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3324 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3325 HOST_WIDE_INT offset
;
3327 emit_move_insn (stack_pointer_rtx
,
3328 expand_binop (Pmode
,
3329 #ifdef STACK_GROWS_DOWNWARD
3335 GEN_INT (rounded_size
),
3336 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3338 offset
= (HOST_WIDE_INT
) padding_size
;
3339 #ifdef STACK_GROWS_DOWNWARD
3340 if (STACK_PUSH_CODE
== POST_DEC
)
3341 /* We have already decremented the stack pointer, so get the
3343 offset
+= (HOST_WIDE_INT
) rounded_size
;
3345 if (STACK_PUSH_CODE
== POST_INC
)
3346 /* We have already incremented the stack pointer, so get the
3348 offset
-= (HOST_WIDE_INT
) rounded_size
;
3350 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3354 #ifdef STACK_GROWS_DOWNWARD
3355 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3356 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3357 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3359 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3360 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3361 GEN_INT (rounded_size
));
3363 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3366 dest
= gen_rtx_MEM (mode
, dest_addr
);
3370 set_mem_attributes (dest
, type
, 1);
3372 if (flag_optimize_sibling_calls
)
3373 /* Function incoming arguments may overlap with sibling call
3374 outgoing arguments and we cannot allow reordering of reads
3375 from function arguments with stores to outgoing arguments
3376 of sibling calls. */
3377 set_mem_alias_set (dest
, 0);
3379 emit_move_insn (dest
, x
);
3383 /* Generate code to push X onto the stack, assuming it has mode MODE and
3385 MODE is redundant except when X is a CONST_INT (since they don't
3387 SIZE is an rtx for the size of data to be copied (in bytes),
3388 needed only if X is BLKmode.
3390 ALIGN (in bits) is maximum alignment we can assume.
3392 If PARTIAL and REG are both nonzero, then copy that many of the first
3393 words of X into registers starting with REG, and push the rest of X.
3394 The amount of space pushed is decreased by PARTIAL words,
3395 rounded *down* to a multiple of PARM_BOUNDARY.
3396 REG must be a hard register in this case.
3397 If REG is zero but PARTIAL is not, take any all others actions for an
3398 argument partially in registers, but do not actually load any
3401 EXTRA is the amount in bytes of extra space to leave next to this arg.
3402 This is ignored if an argument block has already been allocated.
3404 On a machine that lacks real push insns, ARGS_ADDR is the address of
3405 the bottom of the argument block for this call. We use indexing off there
3406 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3407 argument block has not been preallocated.
3409 ARGS_SO_FAR is the size of args previously pushed for this call.
3411 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3412 for arguments passed in registers. If nonzero, it will be the number
3413 of bytes required. */
3416 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3417 unsigned int align
, int partial
, rtx reg
, int extra
,
3418 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3422 enum direction stack_direction
3423 #ifdef STACK_GROWS_DOWNWARD
3429 /* Decide where to pad the argument: `downward' for below,
3430 `upward' for above, or `none' for don't pad it.
3431 Default is below for small data on big-endian machines; else above. */
3432 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3434 /* Invert direction if stack is post-decrement.
3436 if (STACK_PUSH_CODE
== POST_DEC
)
3437 if (where_pad
!= none
)
3438 where_pad
= (where_pad
== downward
? upward
: downward
);
3440 xinner
= x
= protect_from_queue (x
, 0);
3442 if (mode
== BLKmode
)
3444 /* Copy a block into the stack, entirely or partially. */
3447 int used
= partial
* UNITS_PER_WORD
;
3451 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3453 /* Use the size of the elt to compute offset. */
3454 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3455 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3456 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3459 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3466 /* USED is now the # of bytes we need not copy to the stack
3467 because registers will take care of them. */
3470 xinner
= adjust_address (xinner
, BLKmode
, used
);
3472 /* If the partial register-part of the arg counts in its stack size,
3473 skip the part of stack space corresponding to the registers.
3474 Otherwise, start copying to the beginning of the stack space,
3475 by setting SKIP to 0. */
3476 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3478 #ifdef PUSH_ROUNDING
3479 /* Do it with several push insns if that doesn't take lots of insns
3480 and if there is no difficulty with push insns that skip bytes
3481 on the stack for alignment purposes. */
3484 && GET_CODE (size
) == CONST_INT
3486 && MEM_ALIGN (xinner
) >= align
3487 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3488 /* Here we avoid the case of a structure whose weak alignment
3489 forces many pushes of a small amount of data,
3490 and such small pushes do rounding that causes trouble. */
3491 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3492 || align
>= BIGGEST_ALIGNMENT
3493 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3494 == (align
/ BITS_PER_UNIT
)))
3495 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3497 /* Push padding now if padding above and stack grows down,
3498 or if padding below and stack grows up.
3499 But if space already allocated, this has already been done. */
3500 if (extra
&& args_addr
== 0
3501 && where_pad
!= none
&& where_pad
!= stack_direction
)
3502 anti_adjust_stack (GEN_INT (extra
));
3504 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3507 #endif /* PUSH_ROUNDING */
3511 /* Otherwise make space on the stack and copy the data
3512 to the address of that space. */
3514 /* Deduct words put into registers from the size we must copy. */
3517 if (GET_CODE (size
) == CONST_INT
)
3518 size
= GEN_INT (INTVAL (size
) - used
);
3520 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3521 GEN_INT (used
), NULL_RTX
, 0,
3525 /* Get the address of the stack space.
3526 In this case, we do not deal with EXTRA separately.
3527 A single stack adjust will do. */
3530 temp
= push_block (size
, extra
, where_pad
== downward
);
3533 else if (GET_CODE (args_so_far
) == CONST_INT
)
3534 temp
= memory_address (BLKmode
,
3535 plus_constant (args_addr
,
3536 skip
+ INTVAL (args_so_far
)));
3538 temp
= memory_address (BLKmode
,
3539 plus_constant (gen_rtx_PLUS (Pmode
,
3544 if (!ACCUMULATE_OUTGOING_ARGS
)
3546 /* If the source is referenced relative to the stack pointer,
3547 copy it to another register to stabilize it. We do not need
3548 to do this if we know that we won't be changing sp. */
3550 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3551 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3552 temp
= copy_to_reg (temp
);
3555 target
= gen_rtx_MEM (BLKmode
, temp
);
3559 set_mem_attributes (target
, type
, 1);
3560 /* Function incoming arguments may overlap with sibling call
3561 outgoing arguments and we cannot allow reordering of reads
3562 from function arguments with stores to outgoing arguments
3563 of sibling calls. */
3564 set_mem_alias_set (target
, 0);
3567 /* ALIGN may well be better aligned than TYPE, e.g. due to
3568 PARM_BOUNDARY. Assume the caller isn't lying. */
3569 set_mem_align (target
, align
);
3571 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3574 else if (partial
> 0)
3576 /* Scalar partly in registers. */
3578 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3581 /* # words of start of argument
3582 that we must make space for but need not store. */
3583 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3584 int args_offset
= INTVAL (args_so_far
);
3587 /* Push padding now if padding above and stack grows down,
3588 or if padding below and stack grows up.
3589 But if space already allocated, this has already been done. */
3590 if (extra
&& args_addr
== 0
3591 && where_pad
!= none
&& where_pad
!= stack_direction
)
3592 anti_adjust_stack (GEN_INT (extra
));
3594 /* If we make space by pushing it, we might as well push
3595 the real data. Otherwise, we can leave OFFSET nonzero
3596 and leave the space uninitialized. */
3600 /* Now NOT_STACK gets the number of words that we don't need to
3601 allocate on the stack. */
3602 not_stack
= partial
- offset
;
3604 /* If the partial register-part of the arg counts in its stack size,
3605 skip the part of stack space corresponding to the registers.
3606 Otherwise, start copying to the beginning of the stack space,
3607 by setting SKIP to 0. */
3608 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3610 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3611 x
= validize_mem (force_const_mem (mode
, x
));
3613 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3614 SUBREGs of such registers are not allowed. */
3615 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3616 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3617 x
= copy_to_reg (x
);
3619 /* Loop over all the words allocated on the stack for this arg. */
3620 /* We can do it by words, because any scalar bigger than a word
3621 has a size a multiple of a word. */
3622 #ifndef PUSH_ARGS_REVERSED
3623 for (i
= not_stack
; i
< size
; i
++)
3625 for (i
= size
- 1; i
>= not_stack
; i
--)
3627 if (i
>= not_stack
+ offset
)
3628 emit_push_insn (operand_subword_force (x
, i
, mode
),
3629 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3631 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3633 reg_parm_stack_space
, alignment_pad
);
3640 /* Push padding now if padding above and stack grows down,
3641 or if padding below and stack grows up.
3642 But if space already allocated, this has already been done. */
3643 if (extra
&& args_addr
== 0
3644 && where_pad
!= none
&& where_pad
!= stack_direction
)
3645 anti_adjust_stack (GEN_INT (extra
));
3647 #ifdef PUSH_ROUNDING
3648 if (args_addr
== 0 && PUSH_ARGS
)
3649 emit_single_push_insn (mode
, x
, type
);
3653 if (GET_CODE (args_so_far
) == CONST_INT
)
3655 = memory_address (mode
,
3656 plus_constant (args_addr
,
3657 INTVAL (args_so_far
)));
3659 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3661 dest
= gen_rtx_MEM (mode
, addr
);
3664 set_mem_attributes (dest
, type
, 1);
3665 /* Function incoming arguments may overlap with sibling call
3666 outgoing arguments and we cannot allow reordering of reads
3667 from function arguments with stores to outgoing arguments
3668 of sibling calls. */
3669 set_mem_alias_set (dest
, 0);
3672 emit_move_insn (dest
, x
);
3676 /* If part should go in registers, copy that part
3677 into the appropriate registers. Do this now, at the end,
3678 since mem-to-mem copies above may do function calls. */
3679 if (partial
> 0 && reg
!= 0)
3681 /* Handle calls that pass values in multiple non-contiguous locations.
3682 The Irix 6 ABI has examples of this. */
3683 if (GET_CODE (reg
) == PARALLEL
)
3684 emit_group_load (reg
, x
, type
, -1);
3686 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3689 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3690 anti_adjust_stack (GEN_INT (extra
));
3692 if (alignment_pad
&& args_addr
== 0)
3693 anti_adjust_stack (alignment_pad
);
3696 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3700 get_subtarget (rtx x
)
3703 /* Only registers can be subtargets. */
3704 || GET_CODE (x
) != REG
3705 /* If the register is readonly, it can't be set more than once. */
3706 || RTX_UNCHANGING_P (x
)
3707 /* Don't use hard regs to avoid extending their life. */
3708 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3709 /* Avoid subtargets inside loops,
3710 since they hide some invariant expressions. */
3711 || preserve_subexpressions_p ())
3715 /* Expand an assignment that stores the value of FROM into TO.
3716 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3717 (This may contain a QUEUED rtx;
3718 if the value is constant, this rtx is a constant.)
3719 Otherwise, the returned value is NULL_RTX. */
3722 expand_assignment (tree to
, tree from
, int want_value
)
3727 /* Don't crash if the lhs of the assignment was erroneous. */
3729 if (TREE_CODE (to
) == ERROR_MARK
)
3731 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3732 return want_value
? result
: NULL_RTX
;
3735 /* Assignment of a structure component needs special treatment
3736 if the structure component's rtx is not simply a MEM.
3737 Assignment of an array element at a constant index, and assignment of
3738 an array element in an unaligned packed structure field, has the same
3741 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3742 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3743 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3745 enum machine_mode mode1
;
3746 HOST_WIDE_INT bitsize
, bitpos
;
3754 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3755 &unsignedp
, &volatilep
);
3757 /* If we are going to use store_bit_field and extract_bit_field,
3758 make sure to_rtx will be safe for multiple use. */
3760 if (mode1
== VOIDmode
&& want_value
)
3761 tem
= stabilize_reference (tem
);
3763 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3767 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3769 if (GET_CODE (to_rtx
) != MEM
)
3772 #ifdef POINTERS_EXTEND_UNSIGNED
3773 if (GET_MODE (offset_rtx
) != Pmode
)
3774 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3776 if (GET_MODE (offset_rtx
) != ptr_mode
)
3777 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3780 /* A constant address in TO_RTX can have VOIDmode, we must not try
3781 to call force_reg for that case. Avoid that case. */
3782 if (GET_CODE (to_rtx
) == MEM
3783 && GET_MODE (to_rtx
) == BLKmode
3784 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3786 && (bitpos
% bitsize
) == 0
3787 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3788 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3790 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3794 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3795 highest_pow2_factor_for_type (TREE_TYPE (to
),
3799 if (GET_CODE (to_rtx
) == MEM
)
3801 /* If the field is at offset zero, we could have been given the
3802 DECL_RTX of the parent struct. Don't munge it. */
3803 to_rtx
= shallow_copy_rtx (to_rtx
);
3805 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3808 /* Deal with volatile and readonly fields. The former is only done
3809 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3810 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3812 if (to_rtx
== orig_to_rtx
)
3813 to_rtx
= copy_rtx (to_rtx
);
3814 MEM_VOLATILE_P (to_rtx
) = 1;
3817 if (TREE_CODE (to
) == COMPONENT_REF
3818 && TREE_READONLY (TREE_OPERAND (to
, 1))
3819 /* We can't assert that a MEM won't be set more than once
3820 if the component is not addressable because another
3821 non-addressable component may be referenced by the same MEM. */
3822 && ! (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
)))
3824 if (to_rtx
== orig_to_rtx
)
3825 to_rtx
= copy_rtx (to_rtx
);
3826 RTX_UNCHANGING_P (to_rtx
) = 1;
3829 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3831 if (to_rtx
== orig_to_rtx
)
3832 to_rtx
= copy_rtx (to_rtx
);
3833 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3836 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3838 /* Spurious cast for HPUX compiler. */
3839 ? ((enum machine_mode
)
3840 TYPE_MODE (TREE_TYPE (to
)))
3842 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3844 preserve_temp_slots (result
);
3848 /* If the value is meaningful, convert RESULT to the proper mode.
3849 Otherwise, return nothing. */
3850 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3851 TYPE_MODE (TREE_TYPE (from
)),
3853 TREE_UNSIGNED (TREE_TYPE (to
)))
3857 /* If the rhs is a function call and its value is not an aggregate,
3858 call the function before we start to compute the lhs.
3859 This is needed for correct code for cases such as
3860 val = setjmp (buf) on machines where reference to val
3861 requires loading up part of an address in a separate insn.
3863 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3864 since it might be a promoted variable where the zero- or sign- extension
3865 needs to be done. Handling this in the normal way is safe because no
3866 computation is done before the call. */
3867 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3868 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3869 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3870 && GET_CODE (DECL_RTL (to
)) == REG
))
3875 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3877 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3879 /* Handle calls that return values in multiple non-contiguous locations.
3880 The Irix 6 ABI has examples of this. */
3881 if (GET_CODE (to_rtx
) == PARALLEL
)
3882 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3883 int_size_in_bytes (TREE_TYPE (from
)));
3884 else if (GET_MODE (to_rtx
) == BLKmode
)
3885 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3888 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3889 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3890 emit_move_insn (to_rtx
, value
);
3892 preserve_temp_slots (to_rtx
);
3895 return want_value
? to_rtx
: NULL_RTX
;
3898 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3899 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3902 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3904 /* Don't move directly into a return register. */
3905 if (TREE_CODE (to
) == RESULT_DECL
3906 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3911 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3913 if (GET_CODE (to_rtx
) == PARALLEL
)
3914 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3915 int_size_in_bytes (TREE_TYPE (from
)));
3917 emit_move_insn (to_rtx
, temp
);
3919 preserve_temp_slots (to_rtx
);
3922 return want_value
? to_rtx
: NULL_RTX
;
3925 /* In case we are returning the contents of an object which overlaps
3926 the place the value is being stored, use a safe function when copying
3927 a value through a pointer into a structure value return block. */
3928 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3929 && current_function_returns_struct
3930 && !current_function_returns_pcc_struct
)
3935 size
= expr_size (from
);
3936 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3938 if (TARGET_MEM_FUNCTIONS
)
3939 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3940 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3941 XEXP (from_rtx
, 0), Pmode
,
3942 convert_to_mode (TYPE_MODE (sizetype
),
3943 size
, TREE_UNSIGNED (sizetype
)),
3944 TYPE_MODE (sizetype
));
3946 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3947 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3948 XEXP (to_rtx
, 0), Pmode
,
3949 convert_to_mode (TYPE_MODE (integer_type_node
),
3951 TREE_UNSIGNED (integer_type_node
)),
3952 TYPE_MODE (integer_type_node
));
3954 preserve_temp_slots (to_rtx
);
3957 return want_value
? to_rtx
: NULL_RTX
;
3960 /* Compute FROM and store the value in the rtx we got. */
3963 result
= store_expr (from
, to_rtx
, want_value
);
3964 preserve_temp_slots (result
);
3967 return want_value
? result
: NULL_RTX
;
3970 /* Generate code for computing expression EXP,
3971 and storing the value into TARGET.
3972 TARGET may contain a QUEUED rtx.
3974 If WANT_VALUE & 1 is nonzero, return a copy of the value
3975 not in TARGET, so that we can be sure to use the proper
3976 value in a containing expression even if TARGET has something
3977 else stored in it. If possible, we copy the value through a pseudo
3978 and return that pseudo. Or, if the value is constant, we try to
3979 return the constant. In some cases, we return a pseudo
3980 copied *from* TARGET.
3982 If the mode is BLKmode then we may return TARGET itself.
3983 It turns out that in BLKmode it doesn't cause a problem.
3984 because C has no operators that could combine two different
3985 assignments into the same BLKmode object with different values
3986 with no sequence point. Will other languages need this to
3989 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3990 to catch quickly any cases where the caller uses the value
3991 and fails to set WANT_VALUE.
3993 If WANT_VALUE & 2 is set, this is a store into a call param on the
3994 stack, and block moves may need to be treated specially. */
3997 store_expr (tree exp
, rtx target
, int want_value
)
4000 rtx alt_rtl
= NULL_RTX
;
4001 int dont_return_target
= 0;
4002 int dont_store_target
= 0;
4004 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4006 /* C++ can generate ?: expressions with a throw expression in one
4007 branch and an rvalue in the other. Here, we resolve attempts to
4008 store the throw expression's nonexistent result. */
4011 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4014 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4016 /* Perform first part of compound expression, then assign from second
4018 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4019 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4021 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4023 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4025 /* For conditional expression, get safe form of the target. Then
4026 test the condition, doing the appropriate assignment on either
4027 side. This avoids the creation of unnecessary temporaries.
4028 For non-BLKmode, it is more efficient not to do this. */
4030 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4033 target
= protect_from_queue (target
, 1);
4035 do_pending_stack_adjust ();
4037 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4038 start_cleanup_deferral ();
4039 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4040 end_cleanup_deferral ();
4042 emit_jump_insn (gen_jump (lab2
));
4045 start_cleanup_deferral ();
4046 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4047 end_cleanup_deferral ();
4052 return want_value
& 1 ? target
: NULL_RTX
;
4054 else if (queued_subexp_p (target
))
4055 /* If target contains a postincrement, let's not risk
4056 using it as the place to generate the rhs. */
4058 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4060 /* Expand EXP into a new pseudo. */
4061 temp
= gen_reg_rtx (GET_MODE (target
));
4062 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4064 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4067 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4069 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4071 /* If target is volatile, ANSI requires accessing the value
4072 *from* the target, if it is accessed. So make that happen.
4073 In no case return the target itself. */
4074 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4075 dont_return_target
= 1;
4077 else if ((want_value
& 1) != 0
4078 && GET_CODE (target
) == MEM
4079 && ! MEM_VOLATILE_P (target
)
4080 && GET_MODE (target
) != BLKmode
)
4081 /* If target is in memory and caller wants value in a register instead,
4082 arrange that. Pass TARGET as target for expand_expr so that,
4083 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4084 We know expand_expr will not use the target in that case.
4085 Don't do this if TARGET is volatile because we are supposed
4086 to write it and then read it. */
4088 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4089 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4090 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4092 /* If TEMP is already in the desired TARGET, only copy it from
4093 memory and don't store it there again. */
4095 || (rtx_equal_p (temp
, target
)
4096 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4097 dont_store_target
= 1;
4098 temp
= copy_to_reg (temp
);
4100 dont_return_target
= 1;
4102 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4103 /* If this is a scalar in a register that is stored in a wider mode
4104 than the declared mode, compute the result into its declared mode
4105 and then convert to the wider mode. Our value is the computed
4108 rtx inner_target
= 0;
4110 /* If we don't want a value, we can do the conversion inside EXP,
4111 which will often result in some optimizations. Do the conversion
4112 in two steps: first change the signedness, if needed, then
4113 the extend. But don't do this if the type of EXP is a subtype
4114 of something else since then the conversion might involve
4115 more than just converting modes. */
4116 if ((want_value
& 1) == 0
4117 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4118 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4120 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4121 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4123 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4124 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4126 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4127 (GET_MODE (SUBREG_REG (target
)),
4128 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4131 inner_target
= SUBREG_REG (target
);
4134 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4135 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4137 /* If TEMP is a MEM and we want a result value, make the access
4138 now so it gets done only once. Strictly speaking, this is
4139 only necessary if the MEM is volatile, or if the address
4140 overlaps TARGET. But not performing the load twice also
4141 reduces the amount of rtl we generate and then have to CSE. */
4142 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4143 temp
= copy_to_reg (temp
);
4145 /* If TEMP is a VOIDmode constant, use convert_modes to make
4146 sure that we properly convert it. */
4147 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4149 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4150 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4151 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4152 GET_MODE (target
), temp
,
4153 SUBREG_PROMOTED_UNSIGNED_P (target
));
4156 convert_move (SUBREG_REG (target
), temp
,
4157 SUBREG_PROMOTED_UNSIGNED_P (target
));
4159 /* If we promoted a constant, change the mode back down to match
4160 target. Otherwise, the caller might get confused by a result whose
4161 mode is larger than expected. */
4163 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4165 if (GET_MODE (temp
) != VOIDmode
)
4167 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4168 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4169 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4170 SUBREG_PROMOTED_UNSIGNED_P (target
));
4173 temp
= convert_modes (GET_MODE (target
),
4174 GET_MODE (SUBREG_REG (target
)),
4175 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4178 return want_value
& 1 ? temp
: NULL_RTX
;
4182 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4184 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4186 /* Return TARGET if it's a specified hardware register.
4187 If TARGET is a volatile mem ref, either return TARGET
4188 or return a reg copied *from* TARGET; ANSI requires this.
4190 Otherwise, if TEMP is not TARGET, return TEMP
4191 if it is constant (for efficiency),
4192 or if we really want the correct value. */
4193 if (!(target
&& GET_CODE (target
) == REG
4194 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4195 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4196 && ! rtx_equal_p (temp
, target
)
4197 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4198 dont_return_target
= 1;
4201 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4202 the same as that of TARGET, adjust the constant. This is needed, for
4203 example, in case it is a CONST_DOUBLE and we want only a word-sized
4205 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4206 && TREE_CODE (exp
) != ERROR_MARK
4207 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4208 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4209 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4211 /* If value was not generated in the target, store it there.
4212 Convert the value to TARGET's type first if necessary.
4213 If TEMP and TARGET compare equal according to rtx_equal_p, but
4214 one or both of them are volatile memory refs, we have to distinguish
4216 - expand_expr has used TARGET. In this case, we must not generate
4217 another copy. This can be detected by TARGET being equal according
4219 - expand_expr has not used TARGET - that means that the source just
4220 happens to have the same RTX form. Since temp will have been created
4221 by expand_expr, it will compare unequal according to == .
4222 We must generate a copy in this case, to reach the correct number
4223 of volatile memory references. */
4225 if ((! rtx_equal_p (temp
, target
)
4226 || (temp
!= target
&& (side_effects_p (temp
)
4227 || side_effects_p (target
))))
4228 && TREE_CODE (exp
) != ERROR_MARK
4229 && ! dont_store_target
4230 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4231 but TARGET is not valid memory reference, TEMP will differ
4232 from TARGET although it is really the same location. */
4233 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4234 /* If there's nothing to copy, don't bother. Don't call expr_size
4235 unless necessary, because some front-ends (C++) expr_size-hook
4236 aborts on objects that are not supposed to be bit-copied or
4238 && expr_size (exp
) != const0_rtx
)
4240 target
= protect_from_queue (target
, 1);
4241 if (GET_MODE (temp
) != GET_MODE (target
)
4242 && GET_MODE (temp
) != VOIDmode
)
4244 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4245 if (dont_return_target
)
4247 /* In this case, we will return TEMP,
4248 so make sure it has the proper mode.
4249 But don't forget to store the value into TARGET. */
4250 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4251 emit_move_insn (target
, temp
);
4254 convert_move (target
, temp
, unsignedp
);
4257 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4259 /* Handle copying a string constant into an array. The string
4260 constant may be shorter than the array. So copy just the string's
4261 actual length, and clear the rest. First get the size of the data
4262 type of the string, which is actually the size of the target. */
4263 rtx size
= expr_size (exp
);
4265 if (GET_CODE (size
) == CONST_INT
4266 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4267 emit_block_move (target
, temp
, size
,
4269 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4272 /* Compute the size of the data to copy from the string. */
4274 = size_binop (MIN_EXPR
,
4275 make_tree (sizetype
, size
),
4276 size_int (TREE_STRING_LENGTH (exp
)));
4278 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4280 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4283 /* Copy that much. */
4284 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4285 TREE_UNSIGNED (sizetype
));
4286 emit_block_move (target
, temp
, copy_size_rtx
,
4288 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4290 /* Figure out how much is left in TARGET that we have to clear.
4291 Do all calculations in ptr_mode. */
4292 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4294 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4295 target
= adjust_address (target
, BLKmode
,
4296 INTVAL (copy_size_rtx
));
4300 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4301 copy_size_rtx
, NULL_RTX
, 0,
4304 #ifdef POINTERS_EXTEND_UNSIGNED
4305 if (GET_MODE (copy_size_rtx
) != Pmode
)
4306 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4307 TREE_UNSIGNED (sizetype
));
4310 target
= offset_address (target
, copy_size_rtx
,
4311 highest_pow2_factor (copy_size
));
4312 label
= gen_label_rtx ();
4313 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4314 GET_MODE (size
), 0, label
);
4317 if (size
!= const0_rtx
)
4318 clear_storage (target
, size
);
4324 /* Handle calls that return values in multiple non-contiguous locations.
4325 The Irix 6 ABI has examples of this. */
4326 else if (GET_CODE (target
) == PARALLEL
)
4327 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4328 int_size_in_bytes (TREE_TYPE (exp
)));
4329 else if (GET_MODE (temp
) == BLKmode
)
4330 emit_block_move (target
, temp
, expr_size (exp
),
4332 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4334 emit_move_insn (target
, temp
);
4337 /* If we don't want a value, return NULL_RTX. */
4338 if ((want_value
& 1) == 0)
4341 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4342 ??? The latter test doesn't seem to make sense. */
4343 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4346 /* Return TARGET itself if it is a hard register. */
4347 else if ((want_value
& 1) != 0
4348 && GET_MODE (target
) != BLKmode
4349 && ! (GET_CODE (target
) == REG
4350 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4351 return copy_to_reg (target
);
4357 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4360 is_zeros_p (tree exp
)
4364 switch (TREE_CODE (exp
))
4368 case NON_LVALUE_EXPR
:
4369 case VIEW_CONVERT_EXPR
:
4370 return is_zeros_p (TREE_OPERAND (exp
, 0));
4373 return integer_zerop (exp
);
4377 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4380 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4383 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4384 elt
= TREE_CHAIN (elt
))
4385 if (!is_zeros_p (TREE_VALUE (elt
)))
4391 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4392 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4393 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4394 if (! is_zeros_p (TREE_VALUE (elt
)))
4404 /* Return 1 if EXP contains mostly (3/4) zeros. */
4407 mostly_zeros_p (tree exp
)
4409 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4411 int elts
= 0, zeros
= 0;
4412 tree elt
= CONSTRUCTOR_ELTS (exp
);
4413 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4415 /* If there are no ranges of true bits, it is all zero. */
4416 return elt
== NULL_TREE
;
4418 for (; elt
; elt
= TREE_CHAIN (elt
))
4420 /* We do not handle the case where the index is a RANGE_EXPR,
4421 so the statistic will be somewhat inaccurate.
4422 We do make a more accurate count in store_constructor itself,
4423 so since this function is only used for nested array elements,
4424 this should be close enough. */
4425 if (mostly_zeros_p (TREE_VALUE (elt
)))
4430 return 4 * zeros
>= 3 * elts
;
4433 return is_zeros_p (exp
);
4436 /* Helper function for store_constructor.
4437 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4438 TYPE is the type of the CONSTRUCTOR, not the element type.
4439 CLEARED is as for store_constructor.
4440 ALIAS_SET is the alias set to use for any stores.
4442 This provides a recursive shortcut back to store_constructor when it isn't
4443 necessary to go through store_field. This is so that we can pass through
4444 the cleared field to let store_constructor know that we may not have to
4445 clear a substructure if the outer structure has already been cleared. */
4448 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4449 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4450 tree exp
, tree type
, int cleared
, int alias_set
)
4452 if (TREE_CODE (exp
) == CONSTRUCTOR
4453 && bitpos
% BITS_PER_UNIT
== 0
4454 /* If we have a nonzero bitpos for a register target, then we just
4455 let store_field do the bitfield handling. This is unlikely to
4456 generate unnecessary clear instructions anyways. */
4457 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4459 if (GET_CODE (target
) == MEM
)
4461 = adjust_address (target
,
4462 GET_MODE (target
) == BLKmode
4464 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4465 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4468 /* Update the alias set, if required. */
4469 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4470 && MEM_ALIAS_SET (target
) != 0)
4472 target
= copy_rtx (target
);
4473 set_mem_alias_set (target
, alias_set
);
4476 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4479 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4483 /* Store the value of constructor EXP into the rtx TARGET.
4484 TARGET is either a REG or a MEM; we know it cannot conflict, since
4485 safe_from_p has been called.
4486 CLEARED is true if TARGET is known to have been zero'd.
4487 SIZE is the number of bytes of TARGET we are allowed to modify: this
4488 may not be the same as the size of EXP if we are assigning to a field
4489 which has been packed to exclude padding bits. */
4492 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4494 tree type
= TREE_TYPE (exp
);
4495 #ifdef WORD_REGISTER_OPERATIONS
4496 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4499 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4500 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4504 /* If size is zero or the target is already cleared, do nothing. */
4505 if (size
== 0 || cleared
)
4507 /* We either clear the aggregate or indicate the value is dead. */
4508 else if ((TREE_CODE (type
) == UNION_TYPE
4509 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4510 && ! CONSTRUCTOR_ELTS (exp
))
4511 /* If the constructor is empty, clear the union. */
4513 clear_storage (target
, expr_size (exp
));
4517 /* If we are building a static constructor into a register,
4518 set the initial value as zero so we can fold the value into
4519 a constant. But if more than one register is involved,
4520 this probably loses. */
4521 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4522 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4524 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4528 /* If the constructor has fewer fields than the structure
4529 or if we are initializing the structure to mostly zeros,
4530 clear the whole structure first. Don't do this if TARGET is a
4531 register whose mode size isn't equal to SIZE since clear_storage
4532 can't handle this case. */
4533 else if (((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4534 || mostly_zeros_p (exp
))
4535 && (GET_CODE (target
) != REG
4536 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4539 rtx xtarget
= target
;
4541 if (readonly_fields_p (type
))
4543 xtarget
= copy_rtx (xtarget
);
4544 RTX_UNCHANGING_P (xtarget
) = 1;
4547 clear_storage (xtarget
, GEN_INT (size
));
4552 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4554 /* Store each element of the constructor into
4555 the corresponding field of TARGET. */
4557 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4559 tree field
= TREE_PURPOSE (elt
);
4560 tree value
= TREE_VALUE (elt
);
4561 enum machine_mode mode
;
4562 HOST_WIDE_INT bitsize
;
4563 HOST_WIDE_INT bitpos
= 0;
4565 rtx to_rtx
= target
;
4567 /* Just ignore missing fields.
4568 We cleared the whole structure, above,
4569 if any fields are missing. */
4573 if (cleared
&& is_zeros_p (value
))
4576 if (host_integerp (DECL_SIZE (field
), 1))
4577 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4581 mode
= DECL_MODE (field
);
4582 if (DECL_BIT_FIELD (field
))
4585 offset
= DECL_FIELD_OFFSET (field
);
4586 if (host_integerp (offset
, 0)
4587 && host_integerp (bit_position (field
), 0))
4589 bitpos
= int_bit_position (field
);
4593 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4599 if (CONTAINS_PLACEHOLDER_P (offset
))
4600 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4601 offset
, make_tree (TREE_TYPE (exp
), target
));
4603 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4604 if (GET_CODE (to_rtx
) != MEM
)
4607 #ifdef POINTERS_EXTEND_UNSIGNED
4608 if (GET_MODE (offset_rtx
) != Pmode
)
4609 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4611 if (GET_MODE (offset_rtx
) != ptr_mode
)
4612 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4615 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4616 highest_pow2_factor (offset
));
4619 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4620 on the MEM might lead to scheduling the clearing after the
4622 if (TREE_READONLY (field
) && !cleared
)
4624 if (GET_CODE (to_rtx
) == MEM
)
4625 to_rtx
= copy_rtx (to_rtx
);
4627 RTX_UNCHANGING_P (to_rtx
) = 1;
4630 #ifdef WORD_REGISTER_OPERATIONS
4631 /* If this initializes a field that is smaller than a word, at the
4632 start of a word, try to widen it to a full word.
4633 This special case allows us to output C++ member function
4634 initializations in a form that the optimizers can understand. */
4635 if (GET_CODE (target
) == REG
4636 && bitsize
< BITS_PER_WORD
4637 && bitpos
% BITS_PER_WORD
== 0
4638 && GET_MODE_CLASS (mode
) == MODE_INT
4639 && TREE_CODE (value
) == INTEGER_CST
4641 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4643 tree type
= TREE_TYPE (value
);
4645 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4647 type
= (*lang_hooks
.types
.type_for_size
)
4648 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4649 value
= convert (type
, value
);
4652 if (BYTES_BIG_ENDIAN
)
4654 = fold (build (LSHIFT_EXPR
, type
, value
,
4655 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4656 bitsize
= BITS_PER_WORD
;
4661 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4662 && DECL_NONADDRESSABLE_P (field
))
4664 to_rtx
= copy_rtx (to_rtx
);
4665 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4668 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4669 value
, type
, cleared
,
4670 get_alias_set (TREE_TYPE (field
)));
4673 else if (TREE_CODE (type
) == ARRAY_TYPE
4674 || TREE_CODE (type
) == VECTOR_TYPE
)
4679 tree domain
= TYPE_DOMAIN (type
);
4680 tree elttype
= TREE_TYPE (type
);
4682 HOST_WIDE_INT minelt
= 0;
4683 HOST_WIDE_INT maxelt
= 0;
4687 unsigned n_elts
= 0;
4689 /* Vectors are like arrays, but the domain is stored via an array
4691 if (TREE_CODE (type
) == VECTOR_TYPE
)
4693 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4694 the same field as TYPE_DOMAIN, we are not guaranteed that
4696 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4697 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4698 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
4700 enum machine_mode mode
= GET_MODE (target
);
4702 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
4703 if (icode
!= CODE_FOR_nothing
)
4707 elt_size
= GET_MODE_SIZE (GET_MODE_INNER (mode
));
4708 n_elts
= (GET_MODE_SIZE (mode
) / elt_size
);
4709 vector
= alloca (n_elts
);
4710 for (i
= 0; i
< n_elts
; i
++)
4711 vector
[i
] = CONST0_RTX (GET_MODE_INNER (mode
));
4716 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4717 && TYPE_MAX_VALUE (domain
)
4718 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4719 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4721 /* If we have constant bounds for the range of the type, get them. */
4724 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4725 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4728 /* If the constructor has fewer elements than the array,
4729 clear the whole array first. Similarly if this is
4730 static constructor of a non-BLKmode object. */
4731 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4735 HOST_WIDE_INT count
= 0, zero_count
= 0;
4736 need_to_clear
= ! const_bounds_p
;
4738 /* This loop is a more accurate version of the loop in
4739 mostly_zeros_p (it handles RANGE_EXPR in an index).
4740 It is also needed to check for missing elements. */
4741 for (elt
= CONSTRUCTOR_ELTS (exp
);
4742 elt
!= NULL_TREE
&& ! need_to_clear
;
4743 elt
= TREE_CHAIN (elt
))
4745 tree index
= TREE_PURPOSE (elt
);
4746 HOST_WIDE_INT this_node_count
;
4748 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4750 tree lo_index
= TREE_OPERAND (index
, 0);
4751 tree hi_index
= TREE_OPERAND (index
, 1);
4753 if (! host_integerp (lo_index
, 1)
4754 || ! host_integerp (hi_index
, 1))
4760 this_node_count
= (tree_low_cst (hi_index
, 1)
4761 - tree_low_cst (lo_index
, 1) + 1);
4764 this_node_count
= 1;
4766 count
+= this_node_count
;
4767 if (mostly_zeros_p (TREE_VALUE (elt
)))
4768 zero_count
+= this_node_count
;
4771 /* Clear the entire array first if there are any missing elements,
4772 or if the incidence of zero elements is >= 75%. */
4774 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4778 if (need_to_clear
&& size
> 0 && !vector
)
4783 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4785 clear_storage (target
, GEN_INT (size
));
4789 else if (REG_P (target
))
4790 /* Inform later passes that the old value is dead. */
4791 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4793 /* Store each element of the constructor into
4794 the corresponding element of TARGET, determined
4795 by counting the elements. */
4796 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4798 elt
= TREE_CHAIN (elt
), i
++)
4800 enum machine_mode mode
;
4801 HOST_WIDE_INT bitsize
;
4802 HOST_WIDE_INT bitpos
;
4804 tree value
= TREE_VALUE (elt
);
4805 tree index
= TREE_PURPOSE (elt
);
4806 rtx xtarget
= target
;
4808 if (cleared
&& is_zeros_p (value
))
4811 unsignedp
= TREE_UNSIGNED (elttype
);
4812 mode
= TYPE_MODE (elttype
);
4813 if (mode
== BLKmode
)
4814 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4815 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4818 bitsize
= GET_MODE_BITSIZE (mode
);
4820 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4822 tree lo_index
= TREE_OPERAND (index
, 0);
4823 tree hi_index
= TREE_OPERAND (index
, 1);
4824 rtx index_r
, pos_rtx
, loop_end
;
4825 struct nesting
*loop
;
4826 HOST_WIDE_INT lo
, hi
, count
;
4832 /* If the range is constant and "small", unroll the loop. */
4834 && host_integerp (lo_index
, 0)
4835 && host_integerp (hi_index
, 0)
4836 && (lo
= tree_low_cst (lo_index
, 0),
4837 hi
= tree_low_cst (hi_index
, 0),
4838 count
= hi
- lo
+ 1,
4839 (GET_CODE (target
) != MEM
4841 || (host_integerp (TYPE_SIZE (elttype
), 1)
4842 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4845 lo
-= minelt
; hi
-= minelt
;
4846 for (; lo
<= hi
; lo
++)
4848 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4850 if (GET_CODE (target
) == MEM
4851 && !MEM_KEEP_ALIAS_SET_P (target
)
4852 && TREE_CODE (type
) == ARRAY_TYPE
4853 && TYPE_NONALIASED_COMPONENT (type
))
4855 target
= copy_rtx (target
);
4856 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4859 store_constructor_field
4860 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4861 get_alias_set (elttype
));
4866 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4867 loop_end
= gen_label_rtx ();
4869 unsignedp
= TREE_UNSIGNED (domain
);
4871 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4874 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4876 SET_DECL_RTL (index
, index_r
);
4877 if (TREE_CODE (value
) == SAVE_EXPR
4878 && SAVE_EXPR_RTL (value
) == 0)
4880 /* Make sure value gets expanded once before the
4882 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4885 store_expr (lo_index
, index_r
, 0);
4886 loop
= expand_start_loop (0);
4888 /* Assign value to element index. */
4890 = convert (ssizetype
,
4891 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4892 index
, TYPE_MIN_VALUE (domain
))));
4893 position
= size_binop (MULT_EXPR
, position
,
4895 TYPE_SIZE_UNIT (elttype
)));
4897 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4898 xtarget
= offset_address (target
, pos_rtx
,
4899 highest_pow2_factor (position
));
4900 xtarget
= adjust_address (xtarget
, mode
, 0);
4901 if (TREE_CODE (value
) == CONSTRUCTOR
)
4902 store_constructor (value
, xtarget
, cleared
,
4903 bitsize
/ BITS_PER_UNIT
);
4905 store_expr (value
, xtarget
, 0);
4907 expand_exit_loop_if_false (loop
,
4908 build (LT_EXPR
, integer_type_node
,
4911 expand_increment (build (PREINCREMENT_EXPR
,
4913 index
, integer_one_node
), 0, 0);
4915 emit_label (loop_end
);
4918 else if ((index
!= 0 && ! host_integerp (index
, 0))
4919 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4927 index
= ssize_int (1);
4930 index
= convert (ssizetype
,
4931 fold (build (MINUS_EXPR
, index
,
4932 TYPE_MIN_VALUE (domain
))));
4934 position
= size_binop (MULT_EXPR
, index
,
4936 TYPE_SIZE_UNIT (elttype
)));
4937 xtarget
= offset_address (target
,
4938 expand_expr (position
, 0, VOIDmode
, 0),
4939 highest_pow2_factor (position
));
4940 xtarget
= adjust_address (xtarget
, mode
, 0);
4941 store_expr (value
, xtarget
, 0);
4948 pos
= tree_low_cst (index
, 0) - minelt
;
4951 vector
[pos
] = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
4956 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4957 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4959 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4961 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4962 && TREE_CODE (type
) == ARRAY_TYPE
4963 && TYPE_NONALIASED_COMPONENT (type
))
4965 target
= copy_rtx (target
);
4966 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4968 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4969 type
, cleared
, get_alias_set (elttype
));
4974 emit_insn (GEN_FCN (icode
) (target
,
4975 gen_rtx_PARALLEL (GET_MODE (target
),
4976 gen_rtvec_v (n_elts
, vector
))));
4980 /* Set constructor assignments. */
4981 else if (TREE_CODE (type
) == SET_TYPE
)
4983 tree elt
= CONSTRUCTOR_ELTS (exp
);
4984 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4985 tree domain
= TYPE_DOMAIN (type
);
4986 tree domain_min
, domain_max
, bitlength
;
4988 /* The default implementation strategy is to extract the constant
4989 parts of the constructor, use that to initialize the target,
4990 and then "or" in whatever non-constant ranges we need in addition.
4992 If a large set is all zero or all ones, it is
4993 probably better to set it using memset (if available) or bzero.
4994 Also, if a large set has just a single range, it may also be
4995 better to first clear all the first clear the set (using
4996 bzero/memset), and set the bits we want. */
4998 /* Check for all zeros. */
4999 if (elt
== NULL_TREE
&& size
> 0)
5002 clear_storage (target
, GEN_INT (size
));
5006 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5007 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5008 bitlength
= size_binop (PLUS_EXPR
,
5009 size_diffop (domain_max
, domain_min
),
5012 nbits
= tree_low_cst (bitlength
, 1);
5014 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5015 are "complicated" (more than one range), initialize (the
5016 constant parts) by copying from a constant. */
5017 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5018 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5020 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5021 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5022 char *bit_buffer
= alloca (nbits
);
5023 HOST_WIDE_INT word
= 0;
5024 unsigned int bit_pos
= 0;
5025 unsigned int ibit
= 0;
5026 unsigned int offset
= 0; /* In bytes from beginning of set. */
5028 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5031 if (bit_buffer
[ibit
])
5033 if (BYTES_BIG_ENDIAN
)
5034 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5036 word
|= 1 << bit_pos
;
5040 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5042 if (word
!= 0 || ! cleared
)
5044 rtx datum
= GEN_INT (word
);
5047 /* The assumption here is that it is safe to use
5048 XEXP if the set is multi-word, but not if
5049 it's single-word. */
5050 if (GET_CODE (target
) == MEM
)
5051 to_rtx
= adjust_address (target
, mode
, offset
);
5052 else if (offset
== 0)
5056 emit_move_insn (to_rtx
, datum
);
5063 offset
+= set_word_size
/ BITS_PER_UNIT
;
5068 /* Don't bother clearing storage if the set is all ones. */
5069 if (TREE_CHAIN (elt
) != NULL_TREE
5070 || (TREE_PURPOSE (elt
) == NULL_TREE
5072 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5073 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5074 || (tree_low_cst (TREE_VALUE (elt
), 0)
5075 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5076 != (HOST_WIDE_INT
) nbits
))))
5077 clear_storage (target
, expr_size (exp
));
5079 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5081 /* Start of range of element or NULL. */
5082 tree startbit
= TREE_PURPOSE (elt
);
5083 /* End of range of element, or element value. */
5084 tree endbit
= TREE_VALUE (elt
);
5085 HOST_WIDE_INT startb
, endb
;
5086 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5088 bitlength_rtx
= expand_expr (bitlength
,
5089 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5091 /* Handle non-range tuple element like [ expr ]. */
5092 if (startbit
== NULL_TREE
)
5094 startbit
= save_expr (endbit
);
5098 startbit
= convert (sizetype
, startbit
);
5099 endbit
= convert (sizetype
, endbit
);
5100 if (! integer_zerop (domain_min
))
5102 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5103 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5105 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5106 EXPAND_CONST_ADDRESS
);
5107 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5108 EXPAND_CONST_ADDRESS
);
5114 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5115 (GET_MODE (target
), 0),
5118 emit_move_insn (targetx
, target
);
5121 else if (GET_CODE (target
) == MEM
)
5126 /* Optimization: If startbit and endbit are constants divisible
5127 by BITS_PER_UNIT, call memset instead. */
5128 if (TARGET_MEM_FUNCTIONS
5129 && TREE_CODE (startbit
) == INTEGER_CST
5130 && TREE_CODE (endbit
) == INTEGER_CST
5131 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5132 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5134 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5136 plus_constant (XEXP (targetx
, 0),
5137 startb
/ BITS_PER_UNIT
),
5139 constm1_rtx
, TYPE_MODE (integer_type_node
),
5140 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5141 TYPE_MODE (sizetype
));
5144 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5145 VOIDmode
, 4, XEXP (targetx
, 0),
5146 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5147 startbit_rtx
, TYPE_MODE (sizetype
),
5148 endbit_rtx
, TYPE_MODE (sizetype
));
5151 emit_move_insn (target
, targetx
);
5159 /* Store the value of EXP (an expression tree)
5160 into a subfield of TARGET which has mode MODE and occupies
5161 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5162 If MODE is VOIDmode, it means that we are storing into a bit-field.
5164 If VALUE_MODE is VOIDmode, return nothing in particular.
5165 UNSIGNEDP is not used in this case.
5167 Otherwise, return an rtx for the value stored. This rtx
5168 has mode VALUE_MODE if that is convenient to do.
5169 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5171 TYPE is the type of the underlying object,
5173 ALIAS_SET is the alias set for the destination. This value will
5174 (in general) be different from that for TARGET, since TARGET is a
5175 reference to the containing structure. */
5178 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5179 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5180 int unsignedp
, tree type
, int alias_set
)
5182 HOST_WIDE_INT width_mask
= 0;
5184 if (TREE_CODE (exp
) == ERROR_MARK
)
5187 /* If we have nothing to store, do nothing unless the expression has
5190 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5191 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5192 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5194 /* If we are storing into an unaligned field of an aligned union that is
5195 in a register, we may have the mode of TARGET being an integer mode but
5196 MODE == BLKmode. In that case, get an aligned object whose size and
5197 alignment are the same as TARGET and store TARGET into it (we can avoid
5198 the store if the field being stored is the entire width of TARGET). Then
5199 call ourselves recursively to store the field into a BLKmode version of
5200 that object. Finally, load from the object into TARGET. This is not
5201 very efficient in general, but should only be slightly more expensive
5202 than the otherwise-required unaligned accesses. Perhaps this can be
5203 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5204 twice, once with emit_move_insn and once via store_field. */
5207 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5209 rtx object
= assign_temp (type
, 0, 1, 1);
5210 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5212 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5213 emit_move_insn (object
, target
);
5215 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5218 emit_move_insn (target
, object
);
5220 /* We want to return the BLKmode version of the data. */
5224 if (GET_CODE (target
) == CONCAT
)
5226 /* We're storing into a struct containing a single __complex. */
5230 return store_expr (exp
, target
, 0);
5233 /* If the structure is in a register or if the component
5234 is a bit field, we cannot use addressing to access it.
5235 Use bit-field techniques or SUBREG to store in it. */
5237 if (mode
== VOIDmode
5238 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5239 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5240 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5241 || GET_CODE (target
) == REG
5242 || GET_CODE (target
) == SUBREG
5243 /* If the field isn't aligned enough to store as an ordinary memref,
5244 store it as a bit field. */
5246 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5247 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5248 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5249 || (bitpos
% BITS_PER_UNIT
!= 0)))
5250 /* If the RHS and field are a constant size and the size of the
5251 RHS isn't the same size as the bitfield, we must use bitfield
5254 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5255 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5257 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5259 /* If BITSIZE is narrower than the size of the type of EXP
5260 we will be narrowing TEMP. Normally, what's wanted are the
5261 low-order bits. However, if EXP's type is a record and this is
5262 big-endian machine, we want the upper BITSIZE bits. */
5263 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5264 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5265 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5266 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5267 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5271 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5273 if (mode
!= VOIDmode
&& mode
!= BLKmode
5274 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5275 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5277 /* If the modes of TARGET and TEMP are both BLKmode, both
5278 must be in memory and BITPOS must be aligned on a byte
5279 boundary. If so, we simply do a block copy. */
5280 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5282 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5283 || bitpos
% BITS_PER_UNIT
!= 0)
5286 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5287 emit_block_move (target
, temp
,
5288 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5292 return value_mode
== VOIDmode
? const0_rtx
: target
;
5295 /* Store the value in the bitfield. */
5296 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5297 int_size_in_bytes (type
));
5299 if (value_mode
!= VOIDmode
)
5301 /* The caller wants an rtx for the value.
5302 If possible, avoid refetching from the bitfield itself. */
5304 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5307 enum machine_mode tmode
;
5309 tmode
= GET_MODE (temp
);
5310 if (tmode
== VOIDmode
)
5314 return expand_and (tmode
, temp
,
5315 gen_int_mode (width_mask
, tmode
),
5318 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5319 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5320 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5323 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5324 NULL_RTX
, value_mode
, VOIDmode
,
5325 int_size_in_bytes (type
));
5331 rtx addr
= XEXP (target
, 0);
5332 rtx to_rtx
= target
;
5334 /* If a value is wanted, it must be the lhs;
5335 so make the address stable for multiple use. */
5337 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5338 && ! CONSTANT_ADDRESS_P (addr
)
5339 /* A frame-pointer reference is already stable. */
5340 && ! (GET_CODE (addr
) == PLUS
5341 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5342 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5343 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5344 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5346 /* Now build a reference to just the desired component. */
5348 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5350 if (to_rtx
== target
)
5351 to_rtx
= copy_rtx (to_rtx
);
5353 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5354 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5355 set_mem_alias_set (to_rtx
, alias_set
);
5357 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5361 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5362 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5363 codes and find the ultimate containing object, which we return.
5365 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5366 bit position, and *PUNSIGNEDP to the signedness of the field.
5367 If the position of the field is variable, we store a tree
5368 giving the variable offset (in units) in *POFFSET.
5369 This offset is in addition to the bit position.
5370 If the position is not variable, we store 0 in *POFFSET.
5372 If any of the extraction expressions is volatile,
5373 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5375 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5376 is a mode that can be used to access the field. In that case, *PBITSIZE
5379 If the field describes a variable-sized object, *PMODE is set to
5380 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5381 this case, but the address of the object can be found. */
5384 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5385 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5386 enum machine_mode
*pmode
, int *punsignedp
,
5390 enum machine_mode mode
= VOIDmode
;
5391 tree offset
= size_zero_node
;
5392 tree bit_offset
= bitsize_zero_node
;
5393 tree placeholder_ptr
= 0;
5396 /* First get the mode, signedness, and size. We do this from just the
5397 outermost expression. */
5398 if (TREE_CODE (exp
) == COMPONENT_REF
)
5400 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5401 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5402 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5404 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5406 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5408 size_tree
= TREE_OPERAND (exp
, 1);
5409 *punsignedp
= TREE_UNSIGNED (exp
);
5413 mode
= TYPE_MODE (TREE_TYPE (exp
));
5414 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5416 if (mode
== BLKmode
)
5417 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5419 *pbitsize
= GET_MODE_BITSIZE (mode
);
5424 if (! host_integerp (size_tree
, 1))
5425 mode
= BLKmode
, *pbitsize
= -1;
5427 *pbitsize
= tree_low_cst (size_tree
, 1);
5430 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5431 and find the ultimate containing object. */
5434 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5435 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5436 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5438 tree field
= TREE_OPERAND (exp
, 1);
5439 tree this_offset
= DECL_FIELD_OFFSET (field
);
5441 /* If this field hasn't been filled in yet, don't go
5442 past it. This should only happen when folding expressions
5443 made during type construction. */
5444 if (this_offset
== 0)
5446 else if (CONTAINS_PLACEHOLDER_P (this_offset
))
5447 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5449 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5450 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5451 DECL_FIELD_BIT_OFFSET (field
));
5453 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5456 else if (TREE_CODE (exp
) == ARRAY_REF
5457 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5459 tree index
= TREE_OPERAND (exp
, 1);
5460 tree array
= TREE_OPERAND (exp
, 0);
5461 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5462 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5463 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5465 /* We assume all arrays have sizes that are a multiple of a byte.
5466 First subtract the lower bound, if any, in the type of the
5467 index, then convert to sizetype and multiply by the size of the
5469 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5470 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5473 /* If the index has a self-referential type, pass it to a
5474 WITH_RECORD_EXPR; if the component size is, pass our
5475 component to one. */
5476 if (CONTAINS_PLACEHOLDER_P (index
))
5477 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5478 if (CONTAINS_PLACEHOLDER_P (unit_size
))
5479 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5481 offset
= size_binop (PLUS_EXPR
, offset
,
5482 size_binop (MULT_EXPR
,
5483 convert (sizetype
, index
),
5487 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5489 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5491 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5492 We might have been called from tree optimization where we
5493 haven't set up an object yet. */
5502 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5503 conversions that don't change the mode, and all view conversions
5504 except those that need to "step up" the alignment. */
5505 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5506 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5507 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5508 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5510 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5511 < BIGGEST_ALIGNMENT
)
5512 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5513 || TYPE_ALIGN_OK (TREE_TYPE
5514 (TREE_OPERAND (exp
, 0))))))
5515 && ! ((TREE_CODE (exp
) == NOP_EXPR
5516 || TREE_CODE (exp
) == CONVERT_EXPR
)
5517 && (TYPE_MODE (TREE_TYPE (exp
))
5518 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5521 /* If any reference in the chain is volatile, the effect is volatile. */
5522 if (TREE_THIS_VOLATILE (exp
))
5525 exp
= TREE_OPERAND (exp
, 0);
5528 /* If OFFSET is constant, see if we can return the whole thing as a
5529 constant bit position. Otherwise, split it up. */
5530 if (host_integerp (offset
, 0)
5531 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5533 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5534 && host_integerp (tem
, 0))
5535 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5537 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5543 /* Return 1 if T is an expression that get_inner_reference handles. */
5546 handled_component_p (tree t
)
5548 switch (TREE_CODE (t
))
5553 case ARRAY_RANGE_REF
:
5554 case NON_LVALUE_EXPR
:
5555 case VIEW_CONVERT_EXPR
:
5558 /* ??? Sure they are handled, but get_inner_reference may return
5559 a different PBITSIZE, depending upon whether the expression is
5560 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5563 return (TYPE_MODE (TREE_TYPE (t
))
5564 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5571 /* Given an rtx VALUE that may contain additions and multiplications, return
5572 an equivalent value that just refers to a register, memory, or constant.
5573 This is done by generating instructions to perform the arithmetic and
5574 returning a pseudo-register containing the value.
5576 The returned value may be a REG, SUBREG, MEM or constant. */
5579 force_operand (rtx value
, rtx target
)
5582 /* Use subtarget as the target for operand 0 of a binary operation. */
5583 rtx subtarget
= get_subtarget (target
);
5584 enum rtx_code code
= GET_CODE (value
);
5586 /* Check for a PIC address load. */
5587 if ((code
== PLUS
|| code
== MINUS
)
5588 && XEXP (value
, 0) == pic_offset_table_rtx
5589 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5590 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5591 || GET_CODE (XEXP (value
, 1)) == CONST
))
5594 subtarget
= gen_reg_rtx (GET_MODE (value
));
5595 emit_move_insn (subtarget
, value
);
5599 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5602 target
= gen_reg_rtx (GET_MODE (value
));
5603 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5604 code
== ZERO_EXTEND
);
5608 if (GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
5610 op2
= XEXP (value
, 1);
5611 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5613 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5616 op2
= negate_rtx (GET_MODE (value
), op2
);
5619 /* Check for an addition with OP2 a constant integer and our first
5620 operand a PLUS of a virtual register and something else. In that
5621 case, we want to emit the sum of the virtual register and the
5622 constant first and then add the other value. This allows virtual
5623 register instantiation to simply modify the constant rather than
5624 creating another one around this addition. */
5625 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5626 && GET_CODE (XEXP (value
, 0)) == PLUS
5627 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5628 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5629 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5631 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5632 XEXP (XEXP (value
, 0), 0), op2
,
5633 subtarget
, 0, OPTAB_LIB_WIDEN
);
5634 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5635 force_operand (XEXP (XEXP (value
,
5637 target
, 0, OPTAB_LIB_WIDEN
);
5640 op1
= force_operand (XEXP (value
, 0), subtarget
);
5641 op2
= force_operand (op2
, NULL_RTX
);
5645 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5647 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5648 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5649 target
, 1, OPTAB_LIB_WIDEN
);
5651 return expand_divmod (0,
5652 FLOAT_MODE_P (GET_MODE (value
))
5653 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5654 GET_MODE (value
), op1
, op2
, target
, 0);
5657 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5661 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5665 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5669 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5670 target
, 0, OPTAB_LIB_WIDEN
);
5673 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5674 target
, 1, OPTAB_LIB_WIDEN
);
5677 if (GET_RTX_CLASS (code
) == '1')
5679 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5680 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5683 #ifdef INSN_SCHEDULING
5684 /* On machines that have insn scheduling, we want all memory reference to be
5685 explicit, so we need to deal with such paradoxical SUBREGs. */
5686 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5687 && (GET_MODE_SIZE (GET_MODE (value
))
5688 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5690 = simplify_gen_subreg (GET_MODE (value
),
5691 force_reg (GET_MODE (SUBREG_REG (value
)),
5692 force_operand (SUBREG_REG (value
),
5694 GET_MODE (SUBREG_REG (value
)),
5695 SUBREG_BYTE (value
));
5701 /* Subroutine of expand_expr: return nonzero iff there is no way that
5702 EXP can reference X, which is being modified. TOP_P is nonzero if this
5703 call is going to be used to determine whether we need a temporary
5704 for EXP, as opposed to a recursive call to this function.
5706 It is always safe for this routine to return zero since it merely
5707 searches for optimization opportunities. */
5710 safe_from_p (rtx x
, tree exp
, int top_p
)
5714 static tree save_expr_list
;
5717 /* If EXP has varying size, we MUST use a target since we currently
5718 have no way of allocating temporaries of variable size
5719 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5720 So we assume here that something at a higher level has prevented a
5721 clash. This is somewhat bogus, but the best we can do. Only
5722 do this when X is BLKmode and when we are at the top level. */
5723 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5724 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5725 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5726 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5727 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5729 && GET_MODE (x
) == BLKmode
)
5730 /* If X is in the outgoing argument area, it is always safe. */
5731 || (GET_CODE (x
) == MEM
5732 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5733 || (GET_CODE (XEXP (x
, 0)) == PLUS
5734 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5737 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5738 find the underlying pseudo. */
5739 if (GET_CODE (x
) == SUBREG
)
5742 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5746 /* A SAVE_EXPR might appear many times in the expression passed to the
5747 top-level safe_from_p call, and if it has a complex subexpression,
5748 examining it multiple times could result in a combinatorial explosion.
5749 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5750 with optimization took about 28 minutes to compile -- even though it was
5751 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5752 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5753 we have processed. Note that the only test of top_p was above. */
5762 rtn
= safe_from_p (x
, exp
, 0);
5764 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5765 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5770 /* Now look at our tree code and possibly recurse. */
5771 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5774 exp_rtl
= DECL_RTL_IF_SET (exp
);
5781 if (TREE_CODE (exp
) == TREE_LIST
)
5785 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5787 exp
= TREE_CHAIN (exp
);
5790 if (TREE_CODE (exp
) != TREE_LIST
)
5791 return safe_from_p (x
, exp
, 0);
5794 else if (TREE_CODE (exp
) == ERROR_MARK
)
5795 return 1; /* An already-visited SAVE_EXPR? */
5801 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5806 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5810 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5811 the expression. If it is set, we conflict iff we are that rtx or
5812 both are in memory. Otherwise, we check all operands of the
5813 expression recursively. */
5815 switch (TREE_CODE (exp
))
5818 /* If the operand is static or we are static, we can't conflict.
5819 Likewise if we don't conflict with the operand at all. */
5820 if (staticp (TREE_OPERAND (exp
, 0))
5821 || TREE_STATIC (exp
)
5822 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5825 /* Otherwise, the only way this can conflict is if we are taking
5826 the address of a DECL a that address if part of X, which is
5828 exp
= TREE_OPERAND (exp
, 0);
5831 if (!DECL_RTL_SET_P (exp
)
5832 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5835 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5840 if (GET_CODE (x
) == MEM
5841 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5842 get_alias_set (exp
)))
5847 /* Assume that the call will clobber all hard registers and
5849 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5850 || GET_CODE (x
) == MEM
)
5855 /* If a sequence exists, we would have to scan every instruction
5856 in the sequence to see if it was safe. This is probably not
5858 if (RTL_EXPR_SEQUENCE (exp
))
5861 exp_rtl
= RTL_EXPR_RTL (exp
);
5864 case WITH_CLEANUP_EXPR
:
5865 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5868 case CLEANUP_POINT_EXPR
:
5869 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5872 exp_rtl
= SAVE_EXPR_RTL (exp
);
5876 /* If we've already scanned this, don't do it again. Otherwise,
5877 show we've scanned it and record for clearing the flag if we're
5879 if (TREE_PRIVATE (exp
))
5882 TREE_PRIVATE (exp
) = 1;
5883 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5885 TREE_PRIVATE (exp
) = 0;
5889 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5893 /* The only operand we look at is operand 1. The rest aren't
5894 part of the expression. */
5895 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5901 /* If we have an rtx, we do not need to scan our operands. */
5905 nops
= first_rtl_op (TREE_CODE (exp
));
5906 for (i
= 0; i
< nops
; i
++)
5907 if (TREE_OPERAND (exp
, i
) != 0
5908 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5911 /* If this is a language-specific tree code, it may require
5912 special handling. */
5913 if ((unsigned int) TREE_CODE (exp
)
5914 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5915 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5919 /* If we have an rtl, find any enclosed object. Then see if we conflict
5923 if (GET_CODE (exp_rtl
) == SUBREG
)
5925 exp_rtl
= SUBREG_REG (exp_rtl
);
5926 if (GET_CODE (exp_rtl
) == REG
5927 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5931 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5932 are memory and they conflict. */
5933 return ! (rtx_equal_p (x
, exp_rtl
)
5934 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5935 && true_dependence (exp_rtl
, VOIDmode
, x
,
5936 rtx_addr_varies_p
)));
5939 /* If we reach here, it is safe. */
5943 /* Subroutine of expand_expr: return rtx if EXP is a
5944 variable or parameter; else return 0. */
5950 switch (TREE_CODE (exp
))
5954 return DECL_RTL (exp
);
5960 /* Return the highest power of two that EXP is known to be a multiple of.
5961 This is used in updating alignment of MEMs in array references. */
5963 static unsigned HOST_WIDE_INT
5964 highest_pow2_factor (tree exp
)
5966 unsigned HOST_WIDE_INT c0
, c1
;
5968 switch (TREE_CODE (exp
))
5971 /* We can find the lowest bit that's a one. If the low
5972 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5973 We need to handle this case since we can find it in a COND_EXPR,
5974 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5975 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5977 if (TREE_CONSTANT_OVERFLOW (exp
))
5978 return BIGGEST_ALIGNMENT
;
5981 /* Note: tree_low_cst is intentionally not used here,
5982 we don't care about the upper bits. */
5983 c0
= TREE_INT_CST_LOW (exp
);
5985 return c0
? c0
: BIGGEST_ALIGNMENT
;
5989 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
5990 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5991 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5992 return MIN (c0
, c1
);
5995 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
5996 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
5999 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6001 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6002 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6004 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6005 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6006 return MAX (1, c0
/ c1
);
6010 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6011 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6012 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6015 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6018 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6019 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6020 return MIN (c0
, c1
);
6029 /* Similar, except that it is known that the expression must be a multiple
6030 of the alignment of TYPE. */
6032 static unsigned HOST_WIDE_INT
6033 highest_pow2_factor_for_type (tree type
, tree exp
)
6035 unsigned HOST_WIDE_INT type_align
, factor
;
6037 factor
= highest_pow2_factor (exp
);
6038 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6039 return MAX (factor
, type_align
);
6042 /* Return an object on the placeholder list that matches EXP, a
6043 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6044 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6045 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6046 is a location which initially points to a starting location in the
6047 placeholder list (zero means start of the list) and where a pointer into
6048 the placeholder list at which the object is found is placed. */
6051 find_placeholder (tree exp
, tree
*plist
)
6053 tree type
= TREE_TYPE (exp
);
6054 tree placeholder_expr
;
6056 for (placeholder_expr
6057 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6058 placeholder_expr
!= 0;
6059 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6061 tree need_type
= TYPE_MAIN_VARIANT (type
);
6064 /* Find the outermost reference that is of the type we want. If none,
6065 see if any object has a type that is a pointer to the type we
6067 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6068 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6069 || TREE_CODE (elt
) == COND_EXPR
)
6070 ? TREE_OPERAND (elt
, 1)
6071 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6072 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6073 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6074 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6075 ? TREE_OPERAND (elt
, 0) : 0))
6076 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6079 *plist
= placeholder_expr
;
6083 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6085 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6086 || TREE_CODE (elt
) == COND_EXPR
)
6087 ? TREE_OPERAND (elt
, 1)
6088 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6089 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6090 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6091 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6092 ? TREE_OPERAND (elt
, 0) : 0))
6093 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6094 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6098 *plist
= placeholder_expr
;
6099 return build1 (INDIRECT_REF
, need_type
, elt
);
6106 /* Subroutine of expand_expr. Expand the two operands of a binary
6107 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6108 The value may be stored in TARGET if TARGET is nonzero. The
6109 MODIFIER argument is as documented by expand_expr. */
6112 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6113 enum expand_modifier modifier
)
6115 if (! safe_from_p (target
, exp1
, 1))
6117 if (operand_equal_p (exp0
, exp1
, 0))
6119 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6120 *op1
= copy_rtx (*op0
);
6124 /* If we need to preserve evaluation order, copy exp0 into its own
6125 temporary variable so that it can't be clobbered by exp1. */
6126 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6127 exp0
= save_expr (exp0
);
6128 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6129 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6134 /* expand_expr: generate code for computing expression EXP.
6135 An rtx for the computed value is returned. The value is never null.
6136 In the case of a void EXP, const0_rtx is returned.
6138 The value may be stored in TARGET if TARGET is nonzero.
6139 TARGET is just a suggestion; callers must assume that
6140 the rtx returned may not be the same as TARGET.
6142 If TARGET is CONST0_RTX, it means that the value will be ignored.
6144 If TMODE is not VOIDmode, it suggests generating the
6145 result in mode TMODE. But this is done only when convenient.
6146 Otherwise, TMODE is ignored and the value generated in its natural mode.
6147 TMODE is just a suggestion; callers must assume that
6148 the rtx returned may not have mode TMODE.
6150 Note that TARGET may have neither TMODE nor MODE. In that case, it
6151 probably will not be used.
6153 If MODIFIER is EXPAND_SUM then when EXP is an addition
6154 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6155 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6156 products as above, or REG or MEM, or constant.
6157 Ordinarily in such cases we would output mul or add instructions
6158 and then return a pseudo reg containing the sum.
6160 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6161 it also marks a label as absolutely required (it can't be dead).
6162 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6163 This is used for outputting expressions used in initializers.
6165 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6166 with a constant address even if that address is not normally legitimate.
6167 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6169 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6170 a call parameter. Such targets require special care as we haven't yet
6171 marked TARGET so that it's safe from being trashed by libcalls. We
6172 don't want to use TARGET for anything but the final result;
6173 Intermediate values must go elsewhere. Additionally, calls to
6174 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6176 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6177 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6178 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6179 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6183 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6184 enum expand_modifier modifier
, rtx
*alt_rtl
)
6187 tree type
= TREE_TYPE (exp
);
6188 int unsignedp
= TREE_UNSIGNED (type
);
6189 enum machine_mode mode
;
6190 enum tree_code code
= TREE_CODE (exp
);
6192 rtx subtarget
, original_target
;
6196 /* Handle ERROR_MARK before anybody tries to access its type. */
6197 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6199 op0
= CONST0_RTX (tmode
);
6205 mode
= TYPE_MODE (type
);
6206 /* Use subtarget as the target for operand 0 of a binary operation. */
6207 subtarget
= get_subtarget (target
);
6208 original_target
= target
;
6209 ignore
= (target
== const0_rtx
6210 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6211 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6212 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6213 && TREE_CODE (type
) == VOID_TYPE
));
6215 /* If we are going to ignore this result, we need only do something
6216 if there is a side-effect somewhere in the expression. If there
6217 is, short-circuit the most common cases here. Note that we must
6218 not call expand_expr with anything but const0_rtx in case this
6219 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6223 if (! TREE_SIDE_EFFECTS (exp
))
6226 /* Ensure we reference a volatile object even if value is ignored, but
6227 don't do this if all we are doing is taking its address. */
6228 if (TREE_THIS_VOLATILE (exp
)
6229 && TREE_CODE (exp
) != FUNCTION_DECL
6230 && mode
!= VOIDmode
&& mode
!= BLKmode
6231 && modifier
!= EXPAND_CONST_ADDRESS
)
6233 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6234 if (GET_CODE (temp
) == MEM
)
6235 temp
= copy_to_reg (temp
);
6239 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6240 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6241 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6244 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6245 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6247 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6248 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6251 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6252 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6253 /* If the second operand has no side effects, just evaluate
6255 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6257 else if (code
== BIT_FIELD_REF
)
6259 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6260 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6261 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6268 /* If will do cse, generate all results into pseudo registers
6269 since 1) that allows cse to find more things
6270 and 2) otherwise cse could produce an insn the machine
6271 cannot support. An exception is a CONSTRUCTOR into a multi-word
6272 MEM: that's much more likely to be most efficient into the MEM.
6273 Another is a CALL_EXPR which must return in memory. */
6275 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6276 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6277 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6278 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6285 tree function
= decl_function_context (exp
);
6286 /* Labels in containing functions, or labels used from initializers,
6288 if (modifier
== EXPAND_INITIALIZER
6289 || (function
!= current_function_decl
6290 && function
!= inline_function_decl
6292 temp
= force_label_rtx (exp
);
6294 temp
= label_rtx (exp
);
6296 temp
= gen_rtx_MEM (FUNCTION_MODE
, gen_rtx_LABEL_REF (Pmode
, temp
));
6297 if (function
!= current_function_decl
6298 && function
!= inline_function_decl
&& function
!= 0)
6299 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6304 if (!DECL_RTL_SET_P (exp
))
6306 error ("%Jprior parameter's size depends on '%D'", exp
, exp
);
6307 return CONST0_RTX (mode
);
6310 /* ... fall through ... */
6313 /* If a static var's type was incomplete when the decl was written,
6314 but the type is complete now, lay out the decl now. */
6315 if (DECL_SIZE (exp
) == 0
6316 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6317 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6318 layout_decl (exp
, 0);
6320 /* ... fall through ... */
6324 if (DECL_RTL (exp
) == 0)
6327 /* Ensure variable marked as used even if it doesn't go through
6328 a parser. If it hasn't be used yet, write out an external
6330 if (! TREE_USED (exp
))
6332 assemble_external (exp
);
6333 TREE_USED (exp
) = 1;
6336 /* Show we haven't gotten RTL for this yet. */
6339 /* Handle variables inherited from containing functions. */
6340 context
= decl_function_context (exp
);
6342 /* We treat inline_function_decl as an alias for the current function
6343 because that is the inline function whose vars, types, etc.
6344 are being merged into the current function.
6345 See expand_inline_function. */
6347 if (context
!= 0 && context
!= current_function_decl
6348 && context
!= inline_function_decl
6349 /* If var is static, we don't need a static chain to access it. */
6350 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6351 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6355 /* Mark as non-local and addressable. */
6356 DECL_NONLOCAL (exp
) = 1;
6357 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6359 (*lang_hooks
.mark_addressable
) (exp
);
6360 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6362 addr
= XEXP (DECL_RTL (exp
), 0);
6363 if (GET_CODE (addr
) == MEM
)
6365 = replace_equiv_address (addr
,
6366 fix_lexical_addr (XEXP (addr
, 0), exp
));
6368 addr
= fix_lexical_addr (addr
, exp
);
6370 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6373 /* This is the case of an array whose size is to be determined
6374 from its initializer, while the initializer is still being parsed.
6377 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6378 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6379 temp
= validize_mem (DECL_RTL (exp
));
6381 /* If DECL_RTL is memory, we are in the normal case and either
6382 the address is not valid or it is not a register and -fforce-addr
6383 is specified, get the address into a register. */
6385 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6386 && modifier
!= EXPAND_CONST_ADDRESS
6387 && modifier
!= EXPAND_SUM
6388 && modifier
!= EXPAND_INITIALIZER
6389 && (! memory_address_p (DECL_MODE (exp
),
6390 XEXP (DECL_RTL (exp
), 0))
6392 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6395 *alt_rtl
= DECL_RTL (exp
);
6396 temp
= replace_equiv_address (DECL_RTL (exp
),
6397 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6400 /* If we got something, return it. But first, set the alignment
6401 if the address is a register. */
6404 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6405 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6410 /* If the mode of DECL_RTL does not match that of the decl, it
6411 must be a promoted value. We return a SUBREG of the wanted mode,
6412 but mark it so that we know that it was already extended. */
6414 if (GET_CODE (DECL_RTL (exp
)) == REG
6415 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6417 /* Get the signedness used for this variable. Ensure we get the
6418 same mode we got when the variable was declared. */
6419 if (GET_MODE (DECL_RTL (exp
))
6420 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6421 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6424 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6425 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6426 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6430 return DECL_RTL (exp
);
6433 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6434 TREE_INT_CST_HIGH (exp
), mode
);
6436 /* ??? If overflow is set, fold will have done an incomplete job,
6437 which can result in (plus xx (const_int 0)), which can get
6438 simplified by validate_replace_rtx during virtual register
6439 instantiation, which can result in unrecognizable insns.
6440 Avoid this by forcing all overflows into registers. */
6441 if (TREE_CONSTANT_OVERFLOW (exp
)
6442 && modifier
!= EXPAND_INITIALIZER
)
6443 temp
= force_reg (mode
, temp
);
6448 return const_vector_from_tree (exp
);
6451 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6454 /* If optimized, generate immediate CONST_DOUBLE
6455 which will be turned into memory by reload if necessary.
6457 We used to force a register so that loop.c could see it. But
6458 this does not allow gen_* patterns to perform optimizations with
6459 the constants. It also produces two insns in cases like "x = 1.0;".
6460 On most machines, floating-point constants are not permitted in
6461 many insns, so we'd end up copying it to a register in any case.
6463 Now, we do the copying in expand_binop, if appropriate. */
6464 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6465 TYPE_MODE (TREE_TYPE (exp
)));
6468 /* Handle evaluating a complex constant in a CONCAT target. */
6469 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6471 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6474 rtarg
= XEXP (original_target
, 0);
6475 itarg
= XEXP (original_target
, 1);
6477 /* Move the real and imaginary parts separately. */
6478 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6479 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6482 emit_move_insn (rtarg
, op0
);
6484 emit_move_insn (itarg
, op1
);
6486 return original_target
;
6489 /* ... fall through ... */
6492 temp
= output_constant_def (exp
, 1);
6494 /* temp contains a constant address.
6495 On RISC machines where a constant address isn't valid,
6496 make some insns to get that address into a register. */
6497 if (modifier
!= EXPAND_CONST_ADDRESS
6498 && modifier
!= EXPAND_INITIALIZER
6499 && modifier
!= EXPAND_SUM
6500 && (! memory_address_p (mode
, XEXP (temp
, 0))
6501 || flag_force_addr
))
6502 return replace_equiv_address (temp
,
6503 copy_rtx (XEXP (temp
, 0)));
6506 case EXPR_WITH_FILE_LOCATION
:
6509 struct file_stack fs
;
6511 fs
.location
= input_location
;
6512 fs
.next
= expr_wfl_stack
;
6513 input_filename
= EXPR_WFL_FILENAME (exp
);
6514 input_line
= EXPR_WFL_LINENO (exp
);
6515 expr_wfl_stack
= &fs
;
6516 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6517 emit_line_note (input_location
);
6518 /* Possibly avoid switching back and forth here. */
6519 to_return
= expand_expr (EXPR_WFL_NODE (exp
),
6520 (ignore
? const0_rtx
: target
),
6522 if (expr_wfl_stack
!= &fs
)
6524 input_location
= fs
.location
;
6525 expr_wfl_stack
= fs
.next
;
6530 context
= decl_function_context (exp
);
6532 /* If this SAVE_EXPR was at global context, assume we are an
6533 initialization function and move it into our context. */
6535 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6537 /* We treat inline_function_decl as an alias for the current function
6538 because that is the inline function whose vars, types, etc.
6539 are being merged into the current function.
6540 See expand_inline_function. */
6541 if (context
== current_function_decl
|| context
== inline_function_decl
)
6544 /* If this is non-local, handle it. */
6547 /* The following call just exists to abort if the context is
6548 not of a containing function. */
6549 find_function_data (context
);
6551 temp
= SAVE_EXPR_RTL (exp
);
6552 if (temp
&& GET_CODE (temp
) == REG
)
6554 put_var_into_stack (exp
, /*rescan=*/true);
6555 temp
= SAVE_EXPR_RTL (exp
);
6557 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6560 replace_equiv_address (temp
,
6561 fix_lexical_addr (XEXP (temp
, 0), exp
));
6563 if (SAVE_EXPR_RTL (exp
) == 0)
6565 if (mode
== VOIDmode
)
6568 temp
= assign_temp (build_qualified_type (type
,
6570 | TYPE_QUAL_CONST
)),
6573 SAVE_EXPR_RTL (exp
) = temp
;
6574 if (!optimize
&& GET_CODE (temp
) == REG
)
6575 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6578 /* If the mode of TEMP does not match that of the expression, it
6579 must be a promoted value. We pass store_expr a SUBREG of the
6580 wanted mode but mark it so that we know that it was already
6583 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6585 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6586 promote_mode (type
, mode
, &unsignedp
, 0);
6587 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6588 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6591 if (temp
== const0_rtx
)
6592 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6594 store_expr (TREE_OPERAND (exp
, 0), temp
,
6595 modifier
== EXPAND_STACK_PARM
? 2 : 0);
6597 TREE_USED (exp
) = 1;
6600 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6601 must be a promoted value. We return a SUBREG of the wanted mode,
6602 but mark it so that we know that it was already extended. */
6604 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6605 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6607 /* Compute the signedness and make the proper SUBREG. */
6608 promote_mode (type
, mode
, &unsignedp
, 0);
6609 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6610 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6611 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6615 return SAVE_EXPR_RTL (exp
);
6620 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6621 TREE_OPERAND (exp
, 0)
6622 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6626 case PLACEHOLDER_EXPR
:
6628 tree old_list
= placeholder_list
;
6629 tree placeholder_expr
= 0;
6631 exp
= find_placeholder (exp
, &placeholder_expr
);
6635 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6636 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6637 placeholder_list
= old_list
;
6641 case WITH_RECORD_EXPR
:
6642 /* Put the object on the placeholder list, expand our first operand,
6643 and pop the list. */
6644 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6646 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6648 placeholder_list
= TREE_CHAIN (placeholder_list
);
6652 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6653 expand_goto (TREE_OPERAND (exp
, 0));
6655 expand_computed_goto (TREE_OPERAND (exp
, 0));
6659 expand_exit_loop_if_false (NULL
,
6660 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6663 case LABELED_BLOCK_EXPR
:
6664 if (LABELED_BLOCK_BODY (exp
))
6665 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6666 /* Should perhaps use expand_label, but this is simpler and safer. */
6667 do_pending_stack_adjust ();
6668 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6671 case EXIT_BLOCK_EXPR
:
6672 if (EXIT_BLOCK_RETURN (exp
))
6673 sorry ("returned value in block_exit_expr");
6674 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6679 expand_start_loop (1);
6680 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6688 tree vars
= TREE_OPERAND (exp
, 0);
6690 /* Need to open a binding contour here because
6691 if there are any cleanups they must be contained here. */
6692 expand_start_bindings (2);
6694 /* Mark the corresponding BLOCK for output in its proper place. */
6695 if (TREE_OPERAND (exp
, 2) != 0
6696 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6697 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6699 /* If VARS have not yet been expanded, expand them now. */
6702 if (!DECL_RTL_SET_P (vars
))
6704 expand_decl_init (vars
);
6705 vars
= TREE_CHAIN (vars
);
6708 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6710 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6716 if (RTL_EXPR_SEQUENCE (exp
))
6718 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6720 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6721 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6723 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6724 free_temps_for_rtl_expr (exp
);
6726 *alt_rtl
= RTL_EXPR_ALT_RTL (exp
);
6727 return RTL_EXPR_RTL (exp
);
6730 /* If we don't need the result, just ensure we evaluate any
6736 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6737 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6742 /* All elts simple constants => refer to a constant in memory. But
6743 if this is a non-BLKmode mode, let it store a field at a time
6744 since that should make a CONST_INT or CONST_DOUBLE when we
6745 fold. Likewise, if we have a target we can use, it is best to
6746 store directly into the target unless the type is large enough
6747 that memcpy will be used. If we are making an initializer and
6748 all operands are constant, put it in memory as well.
6750 FIXME: Avoid trying to fill vector constructors piece-meal.
6751 Output them with output_constant_def below unless we're sure
6752 they're zeros. This should go away when vector initializers
6753 are treated like VECTOR_CST instead of arrays.
6755 else if ((TREE_STATIC (exp
)
6756 && ((mode
== BLKmode
6757 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6758 || TREE_ADDRESSABLE (exp
)
6759 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6760 && (! MOVE_BY_PIECES_P
6761 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6763 && ((TREE_CODE (type
) == VECTOR_TYPE
6764 && !is_zeros_p (exp
))
6765 || ! mostly_zeros_p (exp
)))))
6766 || ((modifier
== EXPAND_INITIALIZER
6767 || modifier
== EXPAND_CONST_ADDRESS
)
6768 && TREE_CONSTANT (exp
)))
6770 rtx constructor
= output_constant_def (exp
, 1);
6772 if (modifier
!= EXPAND_CONST_ADDRESS
6773 && modifier
!= EXPAND_INITIALIZER
6774 && modifier
!= EXPAND_SUM
)
6775 constructor
= validize_mem (constructor
);
6781 /* Handle calls that pass values in multiple non-contiguous
6782 locations. The Irix 6 ABI has examples of this. */
6783 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6784 || GET_CODE (target
) == PARALLEL
6785 || modifier
== EXPAND_STACK_PARM
)
6787 = assign_temp (build_qualified_type (type
,
6789 | (TREE_READONLY (exp
)
6790 * TYPE_QUAL_CONST
))),
6791 0, TREE_ADDRESSABLE (exp
), 1);
6793 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6799 tree exp1
= TREE_OPERAND (exp
, 0);
6801 tree string
= string_constant (exp1
, &index
);
6803 /* Try to optimize reads from const strings. */
6805 && TREE_CODE (string
) == STRING_CST
6806 && TREE_CODE (index
) == INTEGER_CST
6807 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6808 && GET_MODE_CLASS (mode
) == MODE_INT
6809 && GET_MODE_SIZE (mode
) == 1
6810 && modifier
!= EXPAND_WRITE
)
6811 return gen_int_mode (TREE_STRING_POINTER (string
)
6812 [TREE_INT_CST_LOW (index
)], mode
);
6814 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6815 op0
= memory_address (mode
, op0
);
6816 temp
= gen_rtx_MEM (mode
, op0
);
6817 set_mem_attributes (temp
, exp
, 0);
6819 /* If we are writing to this object and its type is a record with
6820 readonly fields, we must mark it as readonly so it will
6821 conflict with readonly references to those fields. */
6822 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6823 RTX_UNCHANGING_P (temp
) = 1;
6829 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6833 tree array
= TREE_OPERAND (exp
, 0);
6834 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6835 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6836 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6839 /* Optimize the special-case of a zero lower bound.
6841 We convert the low_bound to sizetype to avoid some problems
6842 with constant folding. (E.g. suppose the lower bound is 1,
6843 and its mode is QI. Without the conversion, (ARRAY
6844 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6845 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6847 if (! integer_zerop (low_bound
))
6848 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6850 /* Fold an expression like: "foo"[2].
6851 This is not done in fold so it won't happen inside &.
6852 Don't fold if this is for wide characters since it's too
6853 difficult to do correctly and this is a very rare case. */
6855 if (modifier
!= EXPAND_CONST_ADDRESS
6856 && modifier
!= EXPAND_INITIALIZER
6857 && modifier
!= EXPAND_MEMORY
6858 && TREE_CODE (array
) == STRING_CST
6859 && TREE_CODE (index
) == INTEGER_CST
6860 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6861 && GET_MODE_CLASS (mode
) == MODE_INT
6862 && GET_MODE_SIZE (mode
) == 1)
6863 return gen_int_mode (TREE_STRING_POINTER (array
)
6864 [TREE_INT_CST_LOW (index
)], mode
);
6866 /* If this is a constant index into a constant array,
6867 just get the value from the array. Handle both the cases when
6868 we have an explicit constructor and when our operand is a variable
6869 that was declared const. */
6871 if (modifier
!= EXPAND_CONST_ADDRESS
6872 && modifier
!= EXPAND_INITIALIZER
6873 && modifier
!= EXPAND_MEMORY
6874 && TREE_CODE (array
) == CONSTRUCTOR
6875 && ! TREE_SIDE_EFFECTS (array
)
6876 && TREE_CODE (index
) == INTEGER_CST
6877 && 0 > compare_tree_int (index
,
6878 list_length (CONSTRUCTOR_ELTS
6879 (TREE_OPERAND (exp
, 0)))))
6883 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6884 i
= TREE_INT_CST_LOW (index
);
6885 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6889 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6893 else if (optimize
>= 1
6894 && modifier
!= EXPAND_CONST_ADDRESS
6895 && modifier
!= EXPAND_INITIALIZER
6896 && modifier
!= EXPAND_MEMORY
6897 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6898 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6899 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
6900 && targetm
.binds_local_p (array
))
6902 if (TREE_CODE (index
) == INTEGER_CST
)
6904 tree init
= DECL_INITIAL (array
);
6906 if (TREE_CODE (init
) == CONSTRUCTOR
)
6910 for (elem
= CONSTRUCTOR_ELTS (init
);
6912 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6913 elem
= TREE_CHAIN (elem
))
6916 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6917 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6920 else if (TREE_CODE (init
) == STRING_CST
6921 && 0 > compare_tree_int (index
,
6922 TREE_STRING_LENGTH (init
)))
6924 tree type
= TREE_TYPE (TREE_TYPE (init
));
6925 enum machine_mode mode
= TYPE_MODE (type
);
6927 if (GET_MODE_CLASS (mode
) == MODE_INT
6928 && GET_MODE_SIZE (mode
) == 1)
6929 return gen_int_mode (TREE_STRING_POINTER (init
)
6930 [TREE_INT_CST_LOW (index
)], mode
);
6935 goto normal_inner_ref
;
6938 /* If the operand is a CONSTRUCTOR, we can just extract the
6939 appropriate field if it is present. */
6940 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
6944 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6945 elt
= TREE_CHAIN (elt
))
6946 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6947 /* We can normally use the value of the field in the
6948 CONSTRUCTOR. However, if this is a bitfield in
6949 an integral mode that we can fit in a HOST_WIDE_INT,
6950 we must mask only the number of bits in the bitfield,
6951 since this is done implicitly by the constructor. If
6952 the bitfield does not meet either of those conditions,
6953 we can't do this optimization. */
6954 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6955 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6957 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6958 <= HOST_BITS_PER_WIDE_INT
))))
6960 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6961 && modifier
== EXPAND_STACK_PARM
)
6963 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6964 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6966 HOST_WIDE_INT bitsize
6967 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6968 enum machine_mode imode
6969 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6971 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6973 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6974 op0
= expand_and (imode
, op0
, op1
, target
);
6979 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6982 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6984 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6992 goto normal_inner_ref
;
6995 case ARRAY_RANGE_REF
:
6998 enum machine_mode mode1
;
6999 HOST_WIDE_INT bitsize
, bitpos
;
7002 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7003 &mode1
, &unsignedp
, &volatilep
);
7006 /* If we got back the original object, something is wrong. Perhaps
7007 we are evaluating an expression too early. In any event, don't
7008 infinitely recurse. */
7012 /* If TEM's type is a union of variable size, pass TARGET to the inner
7013 computation, since it will need a temporary and TARGET is known
7014 to have to do. This occurs in unchecked conversion in Ada. */
7018 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7019 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7021 && modifier
!= EXPAND_STACK_PARM
7022 ? target
: NULL_RTX
),
7024 (modifier
== EXPAND_INITIALIZER
7025 || modifier
== EXPAND_CONST_ADDRESS
7026 || modifier
== EXPAND_STACK_PARM
)
7027 ? modifier
: EXPAND_NORMAL
);
7029 /* If this is a constant, put it into a register if it is a
7030 legitimate constant and OFFSET is 0 and memory if it isn't. */
7031 if (CONSTANT_P (op0
))
7033 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7034 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7036 op0
= force_reg (mode
, op0
);
7038 op0
= validize_mem (force_const_mem (mode
, op0
));
7041 /* Otherwise, if this object not in memory and we either have an
7042 offset or a BLKmode result, put it there. This case can't occur in
7043 C, but can in Ada if we have unchecked conversion of an expression
7044 from a scalar type to an array or record type or for an
7045 ARRAY_RANGE_REF whose type is BLKmode. */
7046 else if (GET_CODE (op0
) != MEM
7048 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7050 /* If the operand is a SAVE_EXPR, we can deal with this by
7051 forcing the SAVE_EXPR into memory. */
7052 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7054 put_var_into_stack (TREE_OPERAND (exp
, 0),
7056 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7061 = build_qualified_type (TREE_TYPE (tem
),
7062 (TYPE_QUALS (TREE_TYPE (tem
))
7063 | TYPE_QUAL_CONST
));
7064 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7066 emit_move_insn (memloc
, op0
);
7073 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7076 if (GET_CODE (op0
) != MEM
)
7079 #ifdef POINTERS_EXTEND_UNSIGNED
7080 if (GET_MODE (offset_rtx
) != Pmode
)
7081 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7083 if (GET_MODE (offset_rtx
) != ptr_mode
)
7084 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7087 if (GET_MODE (op0
) == BLKmode
7088 /* A constant address in OP0 can have VOIDmode, we must
7089 not try to call force_reg in that case. */
7090 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7092 && (bitpos
% bitsize
) == 0
7093 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7094 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7096 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7100 op0
= offset_address (op0
, offset_rtx
,
7101 highest_pow2_factor (offset
));
7104 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7105 record its alignment as BIGGEST_ALIGNMENT. */
7106 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7107 && is_aligning_offset (offset
, tem
))
7108 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7110 /* Don't forget about volatility even if this is a bitfield. */
7111 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7113 if (op0
== orig_op0
)
7114 op0
= copy_rtx (op0
);
7116 MEM_VOLATILE_P (op0
) = 1;
7119 /* The following code doesn't handle CONCAT.
7120 Assume only bitpos == 0 can be used for CONCAT, due to
7121 one element arrays having the same mode as its element. */
7122 if (GET_CODE (op0
) == CONCAT
)
7124 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7129 /* In cases where an aligned union has an unaligned object
7130 as a field, we might be extracting a BLKmode value from
7131 an integer-mode (e.g., SImode) object. Handle this case
7132 by doing the extract into an object as wide as the field
7133 (which we know to be the width of a basic mode), then
7134 storing into memory, and changing the mode to BLKmode. */
7135 if (mode1
== VOIDmode
7136 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7137 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7138 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7139 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7140 && modifier
!= EXPAND_CONST_ADDRESS
7141 && modifier
!= EXPAND_INITIALIZER
)
7142 /* If the field isn't aligned enough to fetch as a memref,
7143 fetch it as a bit field. */
7144 || (mode1
!= BLKmode
7145 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7146 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7147 || (GET_CODE (op0
) == MEM
7148 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7149 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7150 && ((modifier
== EXPAND_CONST_ADDRESS
7151 || modifier
== EXPAND_INITIALIZER
)
7153 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7154 || (bitpos
% BITS_PER_UNIT
!= 0)))
7155 /* If the type and the field are a constant size and the
7156 size of the type isn't the same size as the bitfield,
7157 we must use bitfield operations. */
7159 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7161 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7164 enum machine_mode ext_mode
= mode
;
7166 if (ext_mode
== BLKmode
7167 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7168 && GET_CODE (target
) == MEM
7169 && bitpos
% BITS_PER_UNIT
== 0))
7170 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7172 if (ext_mode
== BLKmode
)
7175 target
= assign_temp (type
, 0, 1, 1);
7180 /* In this case, BITPOS must start at a byte boundary and
7181 TARGET, if specified, must be a MEM. */
7182 if (GET_CODE (op0
) != MEM
7183 || (target
!= 0 && GET_CODE (target
) != MEM
)
7184 || bitpos
% BITS_PER_UNIT
!= 0)
7187 emit_block_move (target
,
7188 adjust_address (op0
, VOIDmode
,
7189 bitpos
/ BITS_PER_UNIT
),
7190 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7192 (modifier
== EXPAND_STACK_PARM
7193 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7198 op0
= validize_mem (op0
);
7200 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7201 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7203 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7204 (modifier
== EXPAND_STACK_PARM
7205 ? NULL_RTX
: target
),
7207 int_size_in_bytes (TREE_TYPE (tem
)));
7209 /* If the result is a record type and BITSIZE is narrower than
7210 the mode of OP0, an integral mode, and this is a big endian
7211 machine, we must put the field into the high-order bits. */
7212 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7213 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7214 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7215 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7216 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7220 if (mode
== BLKmode
)
7222 rtx
new = assign_temp (build_qualified_type
7223 ((*lang_hooks
.types
.type_for_mode
)
7225 TYPE_QUAL_CONST
), 0, 1, 1);
7227 emit_move_insn (new, op0
);
7228 op0
= copy_rtx (new);
7229 PUT_MODE (op0
, BLKmode
);
7230 set_mem_attributes (op0
, exp
, 1);
7236 /* If the result is BLKmode, use that to access the object
7238 if (mode
== BLKmode
)
7241 /* Get a reference to just this component. */
7242 if (modifier
== EXPAND_CONST_ADDRESS
7243 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7244 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7246 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7248 if (op0
== orig_op0
)
7249 op0
= copy_rtx (op0
);
7251 set_mem_attributes (op0
, exp
, 0);
7252 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7253 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7255 MEM_VOLATILE_P (op0
) |= volatilep
;
7256 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7257 || modifier
== EXPAND_CONST_ADDRESS
7258 || modifier
== EXPAND_INITIALIZER
)
7260 else if (target
== 0)
7261 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7263 convert_move (target
, op0
, unsignedp
);
7269 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7271 /* Evaluate the interior expression. */
7272 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7275 /* Get or create an instruction off which to hang a note. */
7276 if (REG_P (subtarget
))
7279 insn
= get_last_insn ();
7282 if (! INSN_P (insn
))
7283 insn
= prev_nonnote_insn (insn
);
7287 target
= gen_reg_rtx (GET_MODE (subtarget
));
7288 insn
= emit_move_insn (target
, subtarget
);
7291 /* Collect the data for the note. */
7292 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7293 vtbl_ref
= plus_constant (vtbl_ref
,
7294 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7295 /* Discard the initial CONST that was added. */
7296 vtbl_ref
= XEXP (vtbl_ref
, 0);
7299 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7304 /* Intended for a reference to a buffer of a file-object in Pascal.
7305 But it's not certain that a special tree code will really be
7306 necessary for these. INDIRECT_REF might work for them. */
7312 /* Pascal set IN expression.
7315 rlo = set_low - (set_low%bits_per_word);
7316 the_word = set [ (index - rlo)/bits_per_word ];
7317 bit_index = index % bits_per_word;
7318 bitmask = 1 << bit_index;
7319 return !!(the_word & bitmask); */
7321 tree set
= TREE_OPERAND (exp
, 0);
7322 tree index
= TREE_OPERAND (exp
, 1);
7323 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7324 tree set_type
= TREE_TYPE (set
);
7325 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7326 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7327 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7328 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7329 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7330 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7331 rtx setaddr
= XEXP (setval
, 0);
7332 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7334 rtx diff
, quo
, rem
, addr
, bit
, result
;
7336 /* If domain is empty, answer is no. Likewise if index is constant
7337 and out of bounds. */
7338 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7339 && TREE_CODE (set_low_bound
) == INTEGER_CST
7340 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7341 || (TREE_CODE (index
) == INTEGER_CST
7342 && TREE_CODE (set_low_bound
) == INTEGER_CST
7343 && tree_int_cst_lt (index
, set_low_bound
))
7344 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7345 && TREE_CODE (index
) == INTEGER_CST
7346 && tree_int_cst_lt (set_high_bound
, index
))))
7350 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7352 /* If we get here, we have to generate the code for both cases
7353 (in range and out of range). */
7355 op0
= gen_label_rtx ();
7356 op1
= gen_label_rtx ();
7358 if (! (GET_CODE (index_val
) == CONST_INT
7359 && GET_CODE (lo_r
) == CONST_INT
))
7360 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7361 GET_MODE (index_val
), iunsignedp
, op1
);
7363 if (! (GET_CODE (index_val
) == CONST_INT
7364 && GET_CODE (hi_r
) == CONST_INT
))
7365 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7366 GET_MODE (index_val
), iunsignedp
, op1
);
7368 /* Calculate the element number of bit zero in the first word
7370 if (GET_CODE (lo_r
) == CONST_INT
)
7371 rlow
= GEN_INT (INTVAL (lo_r
)
7372 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7374 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7375 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7376 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7378 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7379 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7381 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7382 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7383 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7384 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7386 addr
= memory_address (byte_mode
,
7387 expand_binop (index_mode
, add_optab
, diff
,
7388 setaddr
, NULL_RTX
, iunsignedp
,
7391 /* Extract the bit we want to examine. */
7392 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7393 gen_rtx_MEM (byte_mode
, addr
),
7394 make_tree (TREE_TYPE (index
), rem
),
7396 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7397 GET_MODE (target
) == byte_mode
? target
: 0,
7398 1, OPTAB_LIB_WIDEN
);
7400 if (result
!= target
)
7401 convert_move (target
, result
, 1);
7403 /* Output the code to handle the out-of-range case. */
7406 emit_move_insn (target
, const0_rtx
);
7411 case WITH_CLEANUP_EXPR
:
7412 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7414 WITH_CLEANUP_EXPR_RTL (exp
)
7415 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7416 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7417 CLEANUP_EH_ONLY (exp
));
7419 /* That's it for this cleanup. */
7420 TREE_OPERAND (exp
, 1) = 0;
7422 return WITH_CLEANUP_EXPR_RTL (exp
);
7424 case CLEANUP_POINT_EXPR
:
7426 /* Start a new binding layer that will keep track of all cleanup
7427 actions to be performed. */
7428 expand_start_bindings (2);
7430 target_temp_slot_level
= temp_slot_level
;
7432 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7433 /* If we're going to use this value, load it up now. */
7435 op0
= force_not_mem (op0
);
7436 preserve_temp_slots (op0
);
7437 expand_end_bindings (NULL_TREE
, 0, 0);
7442 /* Check for a built-in function. */
7443 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7444 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7446 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7448 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7449 == BUILT_IN_FRONTEND
)
7450 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7454 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7457 return expand_call (exp
, target
, ignore
);
7459 case NON_LVALUE_EXPR
:
7462 case REFERENCE_EXPR
:
7463 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7466 if (TREE_CODE (type
) == UNION_TYPE
)
7468 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7470 /* If both input and output are BLKmode, this conversion isn't doing
7471 anything except possibly changing memory attribute. */
7472 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7474 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7477 result
= copy_rtx (result
);
7478 set_mem_attributes (result
, exp
, 0);
7484 if (TYPE_MODE (type
) != BLKmode
)
7485 target
= gen_reg_rtx (TYPE_MODE (type
));
7487 target
= assign_temp (type
, 0, 1, 1);
7490 if (GET_CODE (target
) == MEM
)
7491 /* Store data into beginning of memory target. */
7492 store_expr (TREE_OPERAND (exp
, 0),
7493 adjust_address (target
, TYPE_MODE (valtype
), 0),
7494 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7496 else if (GET_CODE (target
) == REG
)
7497 /* Store this field into a union of the proper type. */
7498 store_field (target
,
7499 MIN ((int_size_in_bytes (TREE_TYPE
7500 (TREE_OPERAND (exp
, 0)))
7502 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7503 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7504 VOIDmode
, 0, type
, 0);
7508 /* Return the entire union. */
7512 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7514 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7517 /* If the signedness of the conversion differs and OP0 is
7518 a promoted SUBREG, clear that indication since we now
7519 have to do the proper extension. */
7520 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7521 && GET_CODE (op0
) == SUBREG
)
7522 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7527 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7528 if (GET_MODE (op0
) == mode
)
7531 /* If OP0 is a constant, just convert it into the proper mode. */
7532 if (CONSTANT_P (op0
))
7534 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7535 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7537 if (modifier
== EXPAND_INITIALIZER
)
7538 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7539 subreg_lowpart_offset (mode
,
7542 return convert_modes (mode
, inner_mode
, op0
,
7543 TREE_UNSIGNED (inner_type
));
7546 if (modifier
== EXPAND_INITIALIZER
)
7547 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7551 convert_to_mode (mode
, op0
,
7552 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7554 convert_move (target
, op0
,
7555 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7558 case VIEW_CONVERT_EXPR
:
7559 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7561 /* If the input and output modes are both the same, we are done.
7562 Otherwise, if neither mode is BLKmode and both are integral and within
7563 a word, we can use gen_lowpart. If neither is true, make sure the
7564 operand is in memory and convert the MEM to the new mode. */
7565 if (TYPE_MODE (type
) == GET_MODE (op0
))
7567 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7568 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7569 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7570 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7571 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7572 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7573 else if (GET_CODE (op0
) != MEM
)
7575 /* If the operand is not a MEM, force it into memory. Since we
7576 are going to be be changing the mode of the MEM, don't call
7577 force_const_mem for constants because we don't allow pool
7578 constants to change mode. */
7579 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7581 if (TREE_ADDRESSABLE (exp
))
7584 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7586 = assign_stack_temp_for_type
7587 (TYPE_MODE (inner_type
),
7588 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7590 emit_move_insn (target
, op0
);
7594 /* At this point, OP0 is in the correct mode. If the output type is such
7595 that the operand is known to be aligned, indicate that it is.
7596 Otherwise, we need only be concerned about alignment for non-BLKmode
7598 if (GET_CODE (op0
) == MEM
)
7600 op0
= copy_rtx (op0
);
7602 if (TYPE_ALIGN_OK (type
))
7603 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7604 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7605 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7607 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7608 HOST_WIDE_INT temp_size
7609 = MAX (int_size_in_bytes (inner_type
),
7610 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7611 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7612 temp_size
, 0, type
);
7613 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7615 if (TREE_ADDRESSABLE (exp
))
7618 if (GET_MODE (op0
) == BLKmode
)
7619 emit_block_move (new_with_op0_mode
, op0
,
7620 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7621 (modifier
== EXPAND_STACK_PARM
7622 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7624 emit_move_insn (new_with_op0_mode
, op0
);
7629 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7635 this_optab
= ! unsignedp
&& flag_trapv
7636 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7637 ? addv_optab
: add_optab
;
7639 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7640 something else, make sure we add the register to the constant and
7641 then to the other thing. This case can occur during strength
7642 reduction and doing it this way will produce better code if the
7643 frame pointer or argument pointer is eliminated.
7645 fold-const.c will ensure that the constant is always in the inner
7646 PLUS_EXPR, so the only case we need to do anything about is if
7647 sp, ap, or fp is our second argument, in which case we must swap
7648 the innermost first argument and our second argument. */
7650 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7651 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7652 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7653 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7654 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7655 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7657 tree t
= TREE_OPERAND (exp
, 1);
7659 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7660 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7663 /* If the result is to be ptr_mode and we are adding an integer to
7664 something, we might be forming a constant. So try to use
7665 plus_constant. If it produces a sum and we can't accept it,
7666 use force_operand. This allows P = &ARR[const] to generate
7667 efficient code on machines where a SYMBOL_REF is not a valid
7670 If this is an EXPAND_SUM call, always return the sum. */
7671 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7672 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7674 if (modifier
== EXPAND_STACK_PARM
)
7676 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7677 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7678 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7682 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7684 /* Use immed_double_const to ensure that the constant is
7685 truncated according to the mode of OP1, then sign extended
7686 to a HOST_WIDE_INT. Using the constant directly can result
7687 in non-canonical RTL in a 64x32 cross compile. */
7689 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7691 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7692 op1
= plus_constant (op1
, INTVAL (constant_part
));
7693 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7694 op1
= force_operand (op1
, target
);
7698 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7699 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7700 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7704 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7705 (modifier
== EXPAND_INITIALIZER
7706 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7707 if (! CONSTANT_P (op0
))
7709 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7710 VOIDmode
, modifier
);
7711 /* Return a PLUS if modifier says it's OK. */
7712 if (modifier
== EXPAND_SUM
7713 || modifier
== EXPAND_INITIALIZER
)
7714 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7717 /* Use immed_double_const to ensure that the constant is
7718 truncated according to the mode of OP1, then sign extended
7719 to a HOST_WIDE_INT. Using the constant directly can result
7720 in non-canonical RTL in a 64x32 cross compile. */
7722 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7724 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7725 op0
= plus_constant (op0
, INTVAL (constant_part
));
7726 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7727 op0
= force_operand (op0
, target
);
7732 /* No sense saving up arithmetic to be done
7733 if it's all in the wrong mode to form part of an address.
7734 And force_operand won't know whether to sign-extend or
7736 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7737 || mode
!= ptr_mode
)
7739 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7740 subtarget
, &op0
, &op1
, 0);
7741 if (op0
== const0_rtx
)
7743 if (op1
== const0_rtx
)
7748 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7749 subtarget
, &op0
, &op1
, modifier
);
7750 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7753 /* For initializers, we are allowed to return a MINUS of two
7754 symbolic constants. Here we handle all cases when both operands
7756 /* Handle difference of two symbolic constants,
7757 for the sake of an initializer. */
7758 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7759 && really_constant_p (TREE_OPERAND (exp
, 0))
7760 && really_constant_p (TREE_OPERAND (exp
, 1)))
7762 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7763 NULL_RTX
, &op0
, &op1
, modifier
);
7765 /* If the last operand is a CONST_INT, use plus_constant of
7766 the negated constant. Else make the MINUS. */
7767 if (GET_CODE (op1
) == CONST_INT
)
7768 return plus_constant (op0
, - INTVAL (op1
));
7770 return gen_rtx_MINUS (mode
, op0
, op1
);
7773 this_optab
= ! unsignedp
&& flag_trapv
7774 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7775 ? subv_optab
: sub_optab
;
7777 /* No sense saving up arithmetic to be done
7778 if it's all in the wrong mode to form part of an address.
7779 And force_operand won't know whether to sign-extend or
7781 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7782 || mode
!= ptr_mode
)
7785 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7786 subtarget
, &op0
, &op1
, modifier
);
7788 /* Convert A - const to A + (-const). */
7789 if (GET_CODE (op1
) == CONST_INT
)
7791 op1
= negate_rtx (mode
, op1
);
7792 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7798 /* If first operand is constant, swap them.
7799 Thus the following special case checks need only
7800 check the second operand. */
7801 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7803 tree t1
= TREE_OPERAND (exp
, 0);
7804 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7805 TREE_OPERAND (exp
, 1) = t1
;
7808 /* Attempt to return something suitable for generating an
7809 indexed address, for machines that support that. */
7811 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7812 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7814 tree exp1
= TREE_OPERAND (exp
, 1);
7816 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7819 if (GET_CODE (op0
) != REG
)
7820 op0
= force_operand (op0
, NULL_RTX
);
7821 if (GET_CODE (op0
) != REG
)
7822 op0
= copy_to_mode_reg (mode
, op0
);
7824 return gen_rtx_MULT (mode
, op0
,
7825 gen_int_mode (tree_low_cst (exp1
, 0),
7826 TYPE_MODE (TREE_TYPE (exp1
))));
7829 if (modifier
== EXPAND_STACK_PARM
)
7832 /* Check for multiplying things that have been extended
7833 from a narrower type. If this machine supports multiplying
7834 in that narrower type with a result in the desired type,
7835 do it that way, and avoid the explicit type-conversion. */
7836 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7837 && TREE_CODE (type
) == INTEGER_TYPE
7838 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7839 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7840 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7841 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7842 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7843 /* Don't use a widening multiply if a shift will do. */
7844 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7845 > HOST_BITS_PER_WIDE_INT
)
7846 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7848 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7849 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7851 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7852 /* If both operands are extended, they must either both
7853 be zero-extended or both be sign-extended. */
7854 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7856 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7858 enum machine_mode innermode
7859 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7860 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7861 ? smul_widen_optab
: umul_widen_optab
);
7862 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7863 ? umul_widen_optab
: smul_widen_optab
);
7864 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7866 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7868 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7869 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7870 TREE_OPERAND (exp
, 1),
7871 NULL_RTX
, &op0
, &op1
, 0);
7873 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7874 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7875 NULL_RTX
, &op0
, &op1
, 0);
7878 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7879 && innermode
== word_mode
)
7882 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7883 NULL_RTX
, VOIDmode
, 0);
7884 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7885 op1
= convert_modes (innermode
, mode
,
7886 expand_expr (TREE_OPERAND (exp
, 1),
7887 NULL_RTX
, VOIDmode
, 0),
7890 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7891 NULL_RTX
, VOIDmode
, 0);
7892 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7893 unsignedp
, OPTAB_LIB_WIDEN
);
7894 htem
= expand_mult_highpart_adjust (innermode
,
7895 gen_highpart (innermode
, temp
),
7897 gen_highpart (innermode
, temp
),
7899 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7904 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7905 subtarget
, &op0
, &op1
, 0);
7906 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7908 case TRUNC_DIV_EXPR
:
7909 case FLOOR_DIV_EXPR
:
7911 case ROUND_DIV_EXPR
:
7912 case EXACT_DIV_EXPR
:
7913 if (modifier
== EXPAND_STACK_PARM
)
7915 /* Possible optimization: compute the dividend with EXPAND_SUM
7916 then if the divisor is constant can optimize the case
7917 where some terms of the dividend have coeffs divisible by it. */
7918 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7919 subtarget
, &op0
, &op1
, 0);
7920 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7923 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7924 expensive divide. If not, combine will rebuild the original
7926 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7927 && TREE_CODE (type
) == REAL_TYPE
7928 && !real_onep (TREE_OPERAND (exp
, 0)))
7929 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7930 build (RDIV_EXPR
, type
,
7931 build_real (type
, dconst1
),
7932 TREE_OPERAND (exp
, 1))),
7933 target
, tmode
, modifier
);
7934 this_optab
= sdiv_optab
;
7937 case TRUNC_MOD_EXPR
:
7938 case FLOOR_MOD_EXPR
:
7940 case ROUND_MOD_EXPR
:
7941 if (modifier
== EXPAND_STACK_PARM
)
7943 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7944 subtarget
, &op0
, &op1
, 0);
7945 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7947 case FIX_ROUND_EXPR
:
7948 case FIX_FLOOR_EXPR
:
7950 abort (); /* Not used for C. */
7952 case FIX_TRUNC_EXPR
:
7953 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7954 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7955 target
= gen_reg_rtx (mode
);
7956 expand_fix (target
, op0
, unsignedp
);
7960 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7961 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7962 target
= gen_reg_rtx (mode
);
7963 /* expand_float can't figure out what to do if FROM has VOIDmode.
7964 So give it the correct mode. With -O, cse will optimize this. */
7965 if (GET_MODE (op0
) == VOIDmode
)
7966 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7968 expand_float (target
, op0
,
7969 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7973 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7974 if (modifier
== EXPAND_STACK_PARM
)
7976 temp
= expand_unop (mode
,
7977 ! unsignedp
&& flag_trapv
7978 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7979 ? negv_optab
: neg_optab
, op0
, target
, 0);
7985 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7986 if (modifier
== EXPAND_STACK_PARM
)
7989 /* ABS_EXPR is not valid for complex arguments. */
7990 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7991 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7994 /* Unsigned abs is simply the operand. Testing here means we don't
7995 risk generating incorrect code below. */
7996 if (TREE_UNSIGNED (type
))
7999 return expand_abs (mode
, op0
, target
, unsignedp
,
8000 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8004 target
= original_target
;
8006 || modifier
== EXPAND_STACK_PARM
8007 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8008 || GET_MODE (target
) != mode
8009 || (GET_CODE (target
) == REG
8010 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8011 target
= gen_reg_rtx (mode
);
8012 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8013 target
, &op0
, &op1
, 0);
8015 /* First try to do it with a special MIN or MAX instruction.
8016 If that does not win, use a conditional jump to select the proper
8018 this_optab
= (TREE_UNSIGNED (type
)
8019 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8020 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8022 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8027 /* At this point, a MEM target is no longer useful; we will get better
8030 if (GET_CODE (target
) == MEM
)
8031 target
= gen_reg_rtx (mode
);
8033 /* If op1 was placed in target, swap op0 and op1. */
8034 if (target
!= op0
&& target
== op1
)
8042 emit_move_insn (target
, op0
);
8044 op0
= gen_label_rtx ();
8046 /* If this mode is an integer too wide to compare properly,
8047 compare word by word. Rely on cse to optimize constant cases. */
8048 if (GET_MODE_CLASS (mode
) == MODE_INT
8049 && ! can_compare_p (GE
, mode
, ccp_jump
))
8051 if (code
== MAX_EXPR
)
8052 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8053 target
, op1
, NULL_RTX
, op0
);
8055 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8056 op1
, target
, NULL_RTX
, op0
);
8060 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8061 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8062 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8065 emit_move_insn (target
, op1
);
8070 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8071 if (modifier
== EXPAND_STACK_PARM
)
8073 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8078 /* ??? Can optimize bitwise operations with one arg constant.
8079 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8080 and (a bitwise1 b) bitwise2 b (etc)
8081 but that is probably not worth while. */
8083 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8084 boolean values when we want in all cases to compute both of them. In
8085 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8086 as actual zero-or-1 values and then bitwise anding. In cases where
8087 there cannot be any side effects, better code would be made by
8088 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8089 how to recognize those cases. */
8091 case TRUTH_AND_EXPR
:
8093 this_optab
= and_optab
;
8098 this_optab
= ior_optab
;
8101 case TRUTH_XOR_EXPR
:
8103 this_optab
= xor_optab
;
8110 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8112 if (modifier
== EXPAND_STACK_PARM
)
8114 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8115 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8118 /* Could determine the answer when only additive constants differ. Also,
8119 the addition of one can be handled by changing the condition. */
8126 case UNORDERED_EXPR
:
8133 temp
= do_store_flag (exp
,
8134 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8135 tmode
!= VOIDmode
? tmode
: mode
, 0);
8139 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8140 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8142 && GET_CODE (original_target
) == REG
8143 && (GET_MODE (original_target
)
8144 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8146 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8149 /* If temp is constant, we can just compute the result. */
8150 if (GET_CODE (temp
) == CONST_INT
)
8152 if (INTVAL (temp
) != 0)
8153 emit_move_insn (target
, const1_rtx
);
8155 emit_move_insn (target
, const0_rtx
);
8160 if (temp
!= original_target
)
8162 enum machine_mode mode1
= GET_MODE (temp
);
8163 if (mode1
== VOIDmode
)
8164 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8166 temp
= copy_to_mode_reg (mode1
, temp
);
8169 op1
= gen_label_rtx ();
8170 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8171 GET_MODE (temp
), unsignedp
, op1
);
8172 emit_move_insn (temp
, const1_rtx
);
8177 /* If no set-flag instruction, must generate a conditional
8178 store into a temporary variable. Drop through
8179 and handle this like && and ||. */
8181 case TRUTH_ANDIF_EXPR
:
8182 case TRUTH_ORIF_EXPR
:
8185 || modifier
== EXPAND_STACK_PARM
8186 || ! safe_from_p (target
, exp
, 1)
8187 /* Make sure we don't have a hard reg (such as function's return
8188 value) live across basic blocks, if not optimizing. */
8189 || (!optimize
&& GET_CODE (target
) == REG
8190 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8191 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8194 emit_clr_insn (target
);
8196 op1
= gen_label_rtx ();
8197 jumpifnot (exp
, op1
);
8200 emit_0_to_1_insn (target
);
8203 return ignore
? const0_rtx
: target
;
8205 case TRUTH_NOT_EXPR
:
8206 if (modifier
== EXPAND_STACK_PARM
)
8208 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8209 /* The parser is careful to generate TRUTH_NOT_EXPR
8210 only with operands that are always zero or one. */
8211 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8212 target
, 1, OPTAB_LIB_WIDEN
);
8218 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8220 return expand_expr_real (TREE_OPERAND (exp
, 1),
8221 (ignore
? const0_rtx
: target
),
8222 VOIDmode
, modifier
, alt_rtl
);
8225 /* If we would have a "singleton" (see below) were it not for a
8226 conversion in each arm, bring that conversion back out. */
8227 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8228 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8229 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8230 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8232 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8233 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8235 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8236 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8237 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8238 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8239 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8240 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8241 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8242 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8243 return expand_expr (build1 (NOP_EXPR
, type
,
8244 build (COND_EXPR
, TREE_TYPE (iftrue
),
8245 TREE_OPERAND (exp
, 0),
8247 target
, tmode
, modifier
);
8251 /* Note that COND_EXPRs whose type is a structure or union
8252 are required to be constructed to contain assignments of
8253 a temporary variable, so that we can evaluate them here
8254 for side effect only. If type is void, we must do likewise. */
8256 /* If an arm of the branch requires a cleanup,
8257 only that cleanup is performed. */
8260 tree binary_op
= 0, unary_op
= 0;
8262 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8263 convert it to our mode, if necessary. */
8264 if (integer_onep (TREE_OPERAND (exp
, 1))
8265 && integer_zerop (TREE_OPERAND (exp
, 2))
8266 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8270 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8275 if (modifier
== EXPAND_STACK_PARM
)
8277 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8278 if (GET_MODE (op0
) == mode
)
8282 target
= gen_reg_rtx (mode
);
8283 convert_move (target
, op0
, unsignedp
);
8287 /* Check for X ? A + B : A. If we have this, we can copy A to the
8288 output and conditionally add B. Similarly for unary operations.
8289 Don't do this if X has side-effects because those side effects
8290 might affect A or B and the "?" operation is a sequence point in
8291 ANSI. (operand_equal_p tests for side effects.) */
8293 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8294 && operand_equal_p (TREE_OPERAND (exp
, 2),
8295 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8296 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8297 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8298 && operand_equal_p (TREE_OPERAND (exp
, 1),
8299 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8300 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8301 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8302 && operand_equal_p (TREE_OPERAND (exp
, 2),
8303 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8304 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8305 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8306 && operand_equal_p (TREE_OPERAND (exp
, 1),
8307 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8308 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8310 /* If we are not to produce a result, we have no target. Otherwise,
8311 if a target was specified use it; it will not be used as an
8312 intermediate target unless it is safe. If no target, use a
8317 else if (modifier
== EXPAND_STACK_PARM
)
8318 temp
= assign_temp (type
, 0, 0, 1);
8319 else if (original_target
8320 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8321 || (singleton
&& GET_CODE (original_target
) == REG
8322 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8323 && original_target
== var_rtx (singleton
)))
8324 && GET_MODE (original_target
) == mode
8325 #ifdef HAVE_conditional_move
8326 && (! can_conditionally_move_p (mode
)
8327 || GET_CODE (original_target
) == REG
8328 || TREE_ADDRESSABLE (type
))
8330 && (GET_CODE (original_target
) != MEM
8331 || TREE_ADDRESSABLE (type
)))
8332 temp
= original_target
;
8333 else if (TREE_ADDRESSABLE (type
))
8336 temp
= assign_temp (type
, 0, 0, 1);
8338 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8339 do the test of X as a store-flag operation, do this as
8340 A + ((X != 0) << log C). Similarly for other simple binary
8341 operators. Only do for C == 1 if BRANCH_COST is low. */
8342 if (temp
&& singleton
&& binary_op
8343 && (TREE_CODE (binary_op
) == PLUS_EXPR
8344 || TREE_CODE (binary_op
) == MINUS_EXPR
8345 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8346 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8347 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8348 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8349 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8353 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8354 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8355 ? addv_optab
: add_optab
)
8356 : TREE_CODE (binary_op
) == MINUS_EXPR
8357 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8358 ? subv_optab
: sub_optab
)
8359 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8362 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8363 if (singleton
== TREE_OPERAND (exp
, 1))
8364 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8366 cond
= TREE_OPERAND (exp
, 0);
8368 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8370 mode
, BRANCH_COST
<= 1);
8372 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8373 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8374 build_int_2 (tree_log2
8378 (safe_from_p (temp
, singleton
, 1)
8379 ? temp
: NULL_RTX
), 0);
8383 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8384 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8385 unsignedp
, OPTAB_LIB_WIDEN
);
8389 do_pending_stack_adjust ();
8391 op0
= gen_label_rtx ();
8393 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8397 /* If the target conflicts with the other operand of the
8398 binary op, we can't use it. Also, we can't use the target
8399 if it is a hard register, because evaluating the condition
8400 might clobber it. */
8402 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8403 || (GET_CODE (temp
) == REG
8404 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8405 temp
= gen_reg_rtx (mode
);
8406 store_expr (singleton
, temp
,
8407 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8410 expand_expr (singleton
,
8411 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8412 if (singleton
== TREE_OPERAND (exp
, 1))
8413 jumpif (TREE_OPERAND (exp
, 0), op0
);
8415 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8417 start_cleanup_deferral ();
8418 if (binary_op
&& temp
== 0)
8419 /* Just touch the other operand. */
8420 expand_expr (TREE_OPERAND (binary_op
, 1),
8421 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8423 store_expr (build (TREE_CODE (binary_op
), type
,
8424 make_tree (type
, temp
),
8425 TREE_OPERAND (binary_op
, 1)),
8426 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8428 store_expr (build1 (TREE_CODE (unary_op
), type
,
8429 make_tree (type
, temp
)),
8430 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8433 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8434 comparison operator. If we have one of these cases, set the
8435 output to A, branch on A (cse will merge these two references),
8436 then set the output to FOO. */
8438 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8439 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8440 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8441 TREE_OPERAND (exp
, 1), 0)
8442 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8443 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8444 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8446 if (GET_CODE (temp
) == REG
8447 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8448 temp
= gen_reg_rtx (mode
);
8449 store_expr (TREE_OPERAND (exp
, 1), temp
,
8450 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8451 jumpif (TREE_OPERAND (exp
, 0), op0
);
8453 start_cleanup_deferral ();
8454 if (TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8455 store_expr (TREE_OPERAND (exp
, 2), temp
,
8456 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8458 expand_expr (TREE_OPERAND (exp
, 2),
8459 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8463 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8464 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8465 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8466 TREE_OPERAND (exp
, 2), 0)
8467 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8468 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8469 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8471 if (GET_CODE (temp
) == REG
8472 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8473 temp
= gen_reg_rtx (mode
);
8474 store_expr (TREE_OPERAND (exp
, 2), temp
,
8475 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8476 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8478 start_cleanup_deferral ();
8479 if (TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8480 store_expr (TREE_OPERAND (exp
, 1), temp
,
8481 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8483 expand_expr (TREE_OPERAND (exp
, 1),
8484 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8489 op1
= gen_label_rtx ();
8490 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8492 start_cleanup_deferral ();
8494 /* One branch of the cond can be void, if it never returns. For
8495 example A ? throw : E */
8497 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8498 store_expr (TREE_OPERAND (exp
, 1), temp
,
8499 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8501 expand_expr (TREE_OPERAND (exp
, 1),
8502 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8503 end_cleanup_deferral ();
8505 emit_jump_insn (gen_jump (op1
));
8508 start_cleanup_deferral ();
8510 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8511 store_expr (TREE_OPERAND (exp
, 2), temp
,
8512 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8514 expand_expr (TREE_OPERAND (exp
, 2),
8515 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8518 end_cleanup_deferral ();
8529 /* Something needs to be initialized, but we didn't know
8530 where that thing was when building the tree. For example,
8531 it could be the return value of a function, or a parameter
8532 to a function which lays down in the stack, or a temporary
8533 variable which must be passed by reference.
8535 We guarantee that the expression will either be constructed
8536 or copied into our original target. */
8538 tree slot
= TREE_OPERAND (exp
, 0);
8539 tree cleanups
= NULL_TREE
;
8542 if (TREE_CODE (slot
) != VAR_DECL
)
8546 target
= original_target
;
8548 /* Set this here so that if we get a target that refers to a
8549 register variable that's already been used, put_reg_into_stack
8550 knows that it should fix up those uses. */
8551 TREE_USED (slot
) = 1;
8555 if (DECL_RTL_SET_P (slot
))
8557 target
= DECL_RTL (slot
);
8558 /* If we have already expanded the slot, so don't do
8560 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8565 target
= assign_temp (type
, 2, 0, 1);
8566 /* All temp slots at this level must not conflict. */
8567 preserve_temp_slots (target
);
8568 SET_DECL_RTL (slot
, target
);
8569 if (TREE_ADDRESSABLE (slot
))
8570 put_var_into_stack (slot
, /*rescan=*/false);
8572 /* Since SLOT is not known to the called function
8573 to belong to its stack frame, we must build an explicit
8574 cleanup. This case occurs when we must build up a reference
8575 to pass the reference as an argument. In this case,
8576 it is very likely that such a reference need not be
8579 if (TREE_OPERAND (exp
, 2) == 0)
8580 TREE_OPERAND (exp
, 2)
8581 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8582 cleanups
= TREE_OPERAND (exp
, 2);
8587 /* This case does occur, when expanding a parameter which
8588 needs to be constructed on the stack. The target
8589 is the actual stack address that we want to initialize.
8590 The function we call will perform the cleanup in this case. */
8592 /* If we have already assigned it space, use that space,
8593 not target that we were passed in, as our target
8594 parameter is only a hint. */
8595 if (DECL_RTL_SET_P (slot
))
8597 target
= DECL_RTL (slot
);
8598 /* If we have already expanded the slot, so don't do
8600 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8605 SET_DECL_RTL (slot
, target
);
8606 /* If we must have an addressable slot, then make sure that
8607 the RTL that we just stored in slot is OK. */
8608 if (TREE_ADDRESSABLE (slot
))
8609 put_var_into_stack (slot
, /*rescan=*/true);
8613 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8614 /* Mark it as expanded. */
8615 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8617 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8619 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8626 tree lhs
= TREE_OPERAND (exp
, 0);
8627 tree rhs
= TREE_OPERAND (exp
, 1);
8629 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8635 /* If lhs is complex, expand calls in rhs before computing it.
8636 That's so we don't compute a pointer and save it over a
8637 call. If lhs is simple, compute it first so we can give it
8638 as a target if the rhs is just a call. This avoids an
8639 extra temp and copy and that prevents a partial-subsumption
8640 which makes bad code. Actually we could treat
8641 component_ref's of vars like vars. */
8643 tree lhs
= TREE_OPERAND (exp
, 0);
8644 tree rhs
= TREE_OPERAND (exp
, 1);
8648 /* Check for |= or &= of a bitfield of size one into another bitfield
8649 of size 1. In this case, (unless we need the result of the
8650 assignment) we can do this more efficiently with a
8651 test followed by an assignment, if necessary.
8653 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8654 things change so we do, this code should be enhanced to
8657 && TREE_CODE (lhs
) == COMPONENT_REF
8658 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8659 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8660 && TREE_OPERAND (rhs
, 0) == lhs
8661 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8662 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8663 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8665 rtx label
= gen_label_rtx ();
8667 do_jump (TREE_OPERAND (rhs
, 1),
8668 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8669 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8670 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8671 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8673 : integer_zero_node
)),
8675 do_pending_stack_adjust ();
8680 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8686 if (!TREE_OPERAND (exp
, 0))
8687 expand_null_return ();
8689 expand_return (TREE_OPERAND (exp
, 0));
8692 case PREINCREMENT_EXPR
:
8693 case PREDECREMENT_EXPR
:
8694 return expand_increment (exp
, 0, ignore
);
8696 case POSTINCREMENT_EXPR
:
8697 case POSTDECREMENT_EXPR
:
8698 /* Faster to treat as pre-increment if result is not used. */
8699 return expand_increment (exp
, ! ignore
, ignore
);
8702 if (modifier
== EXPAND_STACK_PARM
)
8704 /* Are we taking the address of a nested function? */
8705 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8706 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8707 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8708 && ! TREE_STATIC (exp
))
8710 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8711 op0
= force_operand (op0
, target
);
8713 /* If we are taking the address of something erroneous, just
8715 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8717 /* If we are taking the address of a constant and are at the
8718 top level, we have to use output_constant_def since we can't
8719 call force_const_mem at top level. */
8721 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8722 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8724 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8727 /* We make sure to pass const0_rtx down if we came in with
8728 ignore set, to avoid doing the cleanups twice for something. */
8729 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8730 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8731 (modifier
== EXPAND_INITIALIZER
8732 ? modifier
: EXPAND_CONST_ADDRESS
));
8734 /* If we are going to ignore the result, OP0 will have been set
8735 to const0_rtx, so just return it. Don't get confused and
8736 think we are taking the address of the constant. */
8740 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8741 clever and returns a REG when given a MEM. */
8742 op0
= protect_from_queue (op0
, 1);
8744 /* We would like the object in memory. If it is a constant, we can
8745 have it be statically allocated into memory. For a non-constant,
8746 we need to allocate some memory and store the value into it. */
8748 if (CONSTANT_P (op0
))
8749 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8751 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8752 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8753 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
8755 /* If the operand is a SAVE_EXPR, we can deal with this by
8756 forcing the SAVE_EXPR into memory. */
8757 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8759 put_var_into_stack (TREE_OPERAND (exp
, 0),
8761 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8765 /* If this object is in a register, it can't be BLKmode. */
8766 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8767 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8769 if (GET_CODE (op0
) == PARALLEL
)
8770 /* Handle calls that pass values in multiple
8771 non-contiguous locations. The Irix 6 ABI has examples
8773 emit_group_store (memloc
, op0
, inner_type
,
8774 int_size_in_bytes (inner_type
));
8776 emit_move_insn (memloc
, op0
);
8782 if (GET_CODE (op0
) != MEM
)
8785 mark_temp_addr_taken (op0
);
8786 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8788 op0
= XEXP (op0
, 0);
8789 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8790 op0
= convert_memory_address (ptr_mode
, op0
);
8794 /* If OP0 is not aligned as least as much as the type requires, we
8795 need to make a temporary, copy OP0 to it, and take the address of
8796 the temporary. We want to use the alignment of the type, not of
8797 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8798 the test for BLKmode means that can't happen. The test for
8799 BLKmode is because we never make mis-aligned MEMs with
8802 We don't need to do this at all if the machine doesn't have
8803 strict alignment. */
8804 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8805 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8807 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8809 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8812 if (TYPE_ALIGN_OK (inner_type
))
8815 if (TREE_ADDRESSABLE (inner_type
))
8817 /* We can't make a bitwise copy of this object, so fail. */
8818 error ("cannot take the address of an unaligned member");
8822 new = assign_stack_temp_for_type
8823 (TYPE_MODE (inner_type
),
8824 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8825 : int_size_in_bytes (inner_type
),
8826 1, build_qualified_type (inner_type
,
8827 (TYPE_QUALS (inner_type
)
8828 | TYPE_QUAL_CONST
)));
8830 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
8831 (modifier
== EXPAND_STACK_PARM
8832 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8837 op0
= force_operand (XEXP (op0
, 0), target
);
8841 && GET_CODE (op0
) != REG
8842 && modifier
!= EXPAND_CONST_ADDRESS
8843 && modifier
!= EXPAND_INITIALIZER
8844 && modifier
!= EXPAND_SUM
)
8845 op0
= force_reg (Pmode
, op0
);
8847 if (GET_CODE (op0
) == REG
8848 && ! REG_USERVAR_P (op0
))
8849 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8851 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8852 op0
= convert_memory_address (ptr_mode
, op0
);
8856 case ENTRY_VALUE_EXPR
:
8859 /* COMPLEX type for Extended Pascal & Fortran */
8862 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8865 /* Get the rtx code of the operands. */
8866 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8867 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8870 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8874 /* Move the real (op0) and imaginary (op1) parts to their location. */
8875 emit_move_insn (gen_realpart (mode
, target
), op0
);
8876 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8878 insns
= get_insns ();
8881 /* Complex construction should appear as a single unit. */
8882 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8883 each with a separate pseudo as destination.
8884 It's not correct for flow to treat them as a unit. */
8885 if (GET_CODE (target
) != CONCAT
)
8886 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8894 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8895 return gen_realpart (mode
, op0
);
8898 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8899 return gen_imagpart (mode
, op0
);
8903 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8907 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8910 target
= gen_reg_rtx (mode
);
8914 /* Store the realpart and the negated imagpart to target. */
8915 emit_move_insn (gen_realpart (partmode
, target
),
8916 gen_realpart (partmode
, op0
));
8918 imag_t
= gen_imagpart (partmode
, target
);
8919 temp
= expand_unop (partmode
,
8920 ! unsignedp
&& flag_trapv
8921 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8922 ? negv_optab
: neg_optab
,
8923 gen_imagpart (partmode
, op0
), imag_t
, 0);
8925 emit_move_insn (imag_t
, temp
);
8927 insns
= get_insns ();
8930 /* Conjugate should appear as a single unit
8931 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8932 each with a separate pseudo as destination.
8933 It's not correct for flow to treat them as a unit. */
8934 if (GET_CODE (target
) != CONCAT
)
8935 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8942 case TRY_CATCH_EXPR
:
8944 tree handler
= TREE_OPERAND (exp
, 1);
8946 expand_eh_region_start ();
8948 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8950 expand_eh_region_end_cleanup (handler
);
8955 case TRY_FINALLY_EXPR
:
8957 tree try_block
= TREE_OPERAND (exp
, 0);
8958 tree finally_block
= TREE_OPERAND (exp
, 1);
8960 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
8962 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8963 is not sufficient, so we cannot expand the block twice.
8964 So we play games with GOTO_SUBROUTINE_EXPR to let us
8965 expand the thing only once. */
8966 /* When not optimizing, we go ahead with this form since
8967 (1) user breakpoints operate more predictably without
8968 code duplication, and
8969 (2) we're not running any of the global optimizers
8970 that would explode in time/space with the highly
8971 connected CFG created by the indirect branching. */
8973 rtx finally_label
= gen_label_rtx ();
8974 rtx done_label
= gen_label_rtx ();
8975 rtx return_link
= gen_reg_rtx (Pmode
);
8976 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8977 (tree
) finally_label
, (tree
) return_link
);
8978 TREE_SIDE_EFFECTS (cleanup
) = 1;
8980 /* Start a new binding layer that will keep track of all cleanup
8981 actions to be performed. */
8982 expand_start_bindings (2);
8983 target_temp_slot_level
= temp_slot_level
;
8985 expand_decl_cleanup (NULL_TREE
, cleanup
);
8986 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8988 preserve_temp_slots (op0
);
8989 expand_end_bindings (NULL_TREE
, 0, 0);
8990 emit_jump (done_label
);
8991 emit_label (finally_label
);
8992 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8993 emit_indirect_jump (return_link
);
8994 emit_label (done_label
);
8998 expand_start_bindings (2);
8999 target_temp_slot_level
= temp_slot_level
;
9001 expand_decl_cleanup (NULL_TREE
, finally_block
);
9002 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9004 preserve_temp_slots (op0
);
9005 expand_end_bindings (NULL_TREE
, 0, 0);
9011 case GOTO_SUBROUTINE_EXPR
:
9013 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9014 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9015 rtx return_address
= gen_label_rtx ();
9016 emit_move_insn (return_link
,
9017 gen_rtx_LABEL_REF (Pmode
, return_address
));
9019 emit_label (return_address
);
9024 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9027 return get_exception_pointer (cfun
);
9030 /* Function descriptors are not valid except for as
9031 initialization constants, and should not be expanded. */
9035 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
,
9039 /* Here to do an ordinary binary operator, generating an instruction
9040 from the optab already placed in `this_optab'. */
9042 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9043 subtarget
, &op0
, &op1
, 0);
9045 if (modifier
== EXPAND_STACK_PARM
)
9047 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9048 unsignedp
, OPTAB_LIB_WIDEN
);
9054 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9055 when applied to the address of EXP produces an address known to be
9056 aligned more than BIGGEST_ALIGNMENT. */
9059 is_aligning_offset (tree offset
, tree exp
)
9061 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9062 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9063 || TREE_CODE (offset
) == NOP_EXPR
9064 || TREE_CODE (offset
) == CONVERT_EXPR
9065 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9066 offset
= TREE_OPERAND (offset
, 0);
9068 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9069 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9070 if (TREE_CODE (offset
) != BIT_AND_EXPR
9071 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9072 || compare_tree_int (TREE_OPERAND (offset
, 1), BIGGEST_ALIGNMENT
) <= 0
9073 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9076 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9077 It must be NEGATE_EXPR. Then strip any more conversions. */
9078 offset
= TREE_OPERAND (offset
, 0);
9079 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9080 || TREE_CODE (offset
) == NOP_EXPR
9081 || TREE_CODE (offset
) == CONVERT_EXPR
)
9082 offset
= TREE_OPERAND (offset
, 0);
9084 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9087 offset
= TREE_OPERAND (offset
, 0);
9088 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9089 || TREE_CODE (offset
) == NOP_EXPR
9090 || TREE_CODE (offset
) == CONVERT_EXPR
)
9091 offset
= TREE_OPERAND (offset
, 0);
9093 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9094 whose type is the same as EXP. */
9095 return (TREE_CODE (offset
) == ADDR_EXPR
9096 && (TREE_OPERAND (offset
, 0) == exp
9097 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9098 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9099 == TREE_TYPE (exp
)))));
9102 /* Return the tree node if an ARG corresponds to a string constant or zero
9103 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9104 in bytes within the string that ARG is accessing. The type of the
9105 offset will be `sizetype'. */
9108 string_constant (tree arg
, tree
*ptr_offset
)
9112 if (TREE_CODE (arg
) == ADDR_EXPR
9113 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9115 *ptr_offset
= size_zero_node
;
9116 return TREE_OPERAND (arg
, 0);
9118 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9120 tree arg0
= TREE_OPERAND (arg
, 0);
9121 tree arg1
= TREE_OPERAND (arg
, 1);
9126 if (TREE_CODE (arg0
) == ADDR_EXPR
9127 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9129 *ptr_offset
= convert (sizetype
, arg1
);
9130 return TREE_OPERAND (arg0
, 0);
9132 else if (TREE_CODE (arg1
) == ADDR_EXPR
9133 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9135 *ptr_offset
= convert (sizetype
, arg0
);
9136 return TREE_OPERAND (arg1
, 0);
9143 /* Expand code for a post- or pre- increment or decrement
9144 and return the RTX for the result.
9145 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9148 expand_increment (tree exp
, int post
, int ignore
)
9152 tree incremented
= TREE_OPERAND (exp
, 0);
9153 optab this_optab
= add_optab
;
9155 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9156 int op0_is_copy
= 0;
9157 int single_insn
= 0;
9158 /* 1 means we can't store into OP0 directly,
9159 because it is a subreg narrower than a word,
9160 and we don't dare clobber the rest of the word. */
9163 /* Stabilize any component ref that might need to be
9164 evaluated more than once below. */
9166 || TREE_CODE (incremented
) == BIT_FIELD_REF
9167 || (TREE_CODE (incremented
) == COMPONENT_REF
9168 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9169 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9170 incremented
= stabilize_reference (incremented
);
9171 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9172 ones into save exprs so that they don't accidentally get evaluated
9173 more than once by the code below. */
9174 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9175 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9176 incremented
= save_expr (incremented
);
9178 /* Compute the operands as RTX.
9179 Note whether OP0 is the actual lvalue or a copy of it:
9180 I believe it is a copy iff it is a register or subreg
9181 and insns were generated in computing it. */
9183 temp
= get_last_insn ();
9184 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9186 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9187 in place but instead must do sign- or zero-extension during assignment,
9188 so we copy it into a new register and let the code below use it as
9191 Note that we can safely modify this SUBREG since it is know not to be
9192 shared (it was made by the expand_expr call above). */
9194 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9197 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9201 else if (GET_CODE (op0
) == SUBREG
9202 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9204 /* We cannot increment this SUBREG in place. If we are
9205 post-incrementing, get a copy of the old value. Otherwise,
9206 just mark that we cannot increment in place. */
9208 op0
= copy_to_reg (op0
);
9213 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9214 && temp
!= get_last_insn ());
9215 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9217 /* Decide whether incrementing or decrementing. */
9218 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9219 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9220 this_optab
= sub_optab
;
9222 /* Convert decrement by a constant into a negative increment. */
9223 if (this_optab
== sub_optab
9224 && GET_CODE (op1
) == CONST_INT
)
9226 op1
= GEN_INT (-INTVAL (op1
));
9227 this_optab
= add_optab
;
9230 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9231 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9233 /* For a preincrement, see if we can do this with a single instruction. */
9236 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9237 if (icode
!= (int) CODE_FOR_nothing
9238 /* Make sure that OP0 is valid for operands 0 and 1
9239 of the insn we want to queue. */
9240 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9241 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9242 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9246 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9247 then we cannot just increment OP0. We must therefore contrive to
9248 increment the original value. Then, for postincrement, we can return
9249 OP0 since it is a copy of the old value. For preincrement, expand here
9250 unless we can do it with a single insn.
9252 Likewise if storing directly into OP0 would clobber high bits
9253 we need to preserve (bad_subreg). */
9254 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9256 /* This is the easiest way to increment the value wherever it is.
9257 Problems with multiple evaluation of INCREMENTED are prevented
9258 because either (1) it is a component_ref or preincrement,
9259 in which case it was stabilized above, or (2) it is an array_ref
9260 with constant index in an array in a register, which is
9261 safe to reevaluate. */
9262 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9263 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9264 ? MINUS_EXPR
: PLUS_EXPR
),
9267 TREE_OPERAND (exp
, 1));
9269 while (TREE_CODE (incremented
) == NOP_EXPR
9270 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9272 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9273 incremented
= TREE_OPERAND (incremented
, 0);
9276 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9277 return post
? op0
: temp
;
9282 /* We have a true reference to the value in OP0.
9283 If there is an insn to add or subtract in this mode, queue it.
9284 Queuing the increment insn avoids the register shuffling
9285 that often results if we must increment now and first save
9286 the old value for subsequent use. */
9288 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9289 op0
= stabilize (op0
);
9292 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9293 if (icode
!= (int) CODE_FOR_nothing
9294 /* Make sure that OP0 is valid for operands 0 and 1
9295 of the insn we want to queue. */
9296 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9297 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9299 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9300 op1
= force_reg (mode
, op1
);
9302 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9304 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9306 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9307 ? force_reg (Pmode
, XEXP (op0
, 0))
9308 : copy_to_reg (XEXP (op0
, 0)));
9311 op0
= replace_equiv_address (op0
, addr
);
9312 temp
= force_reg (GET_MODE (op0
), op0
);
9313 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9314 op1
= force_reg (mode
, op1
);
9316 /* The increment queue is LIFO, thus we have to `queue'
9317 the instructions in reverse order. */
9318 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9319 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9324 /* Preincrement, or we can't increment with one simple insn. */
9326 /* Save a copy of the value before inc or dec, to return it later. */
9327 temp
= value
= copy_to_reg (op0
);
9329 /* Arrange to return the incremented value. */
9330 /* Copy the rtx because expand_binop will protect from the queue,
9331 and the results of that would be invalid for us to return
9332 if our caller does emit_queue before using our result. */
9333 temp
= copy_rtx (value
= op0
);
9335 /* Increment however we can. */
9336 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9337 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9339 /* Make sure the value is stored into OP0. */
9341 emit_move_insn (op0
, op1
);
9346 /* Generate code to calculate EXP using a store-flag instruction
9347 and return an rtx for the result. EXP is either a comparison
9348 or a TRUTH_NOT_EXPR whose operand is a comparison.
9350 If TARGET is nonzero, store the result there if convenient.
9352 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9355 Return zero if there is no suitable set-flag instruction
9356 available on this machine.
9358 Once expand_expr has been called on the arguments of the comparison,
9359 we are committed to doing the store flag, since it is not safe to
9360 re-evaluate the expression. We emit the store-flag insn by calling
9361 emit_store_flag, but only expand the arguments if we have a reason
9362 to believe that emit_store_flag will be successful. If we think that
9363 it will, but it isn't, we have to simulate the store-flag with a
9364 set/jump/set sequence. */
9367 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9370 tree arg0
, arg1
, type
;
9372 enum machine_mode operand_mode
;
9376 enum insn_code icode
;
9377 rtx subtarget
= target
;
9380 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9381 result at the end. We can't simply invert the test since it would
9382 have already been inverted if it were valid. This case occurs for
9383 some floating-point comparisons. */
9385 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9386 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9388 arg0
= TREE_OPERAND (exp
, 0);
9389 arg1
= TREE_OPERAND (exp
, 1);
9391 /* Don't crash if the comparison was erroneous. */
9392 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9395 type
= TREE_TYPE (arg0
);
9396 operand_mode
= TYPE_MODE (type
);
9397 unsignedp
= TREE_UNSIGNED (type
);
9399 /* We won't bother with BLKmode store-flag operations because it would mean
9400 passing a lot of information to emit_store_flag. */
9401 if (operand_mode
== BLKmode
)
9404 /* We won't bother with store-flag operations involving function pointers
9405 when function pointers must be canonicalized before comparisons. */
9406 #ifdef HAVE_canonicalize_funcptr_for_compare
9407 if (HAVE_canonicalize_funcptr_for_compare
9408 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9409 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9411 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9412 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9413 == FUNCTION_TYPE
))))
9420 /* Get the rtx comparison code to use. We know that EXP is a comparison
9421 operation of some type. Some comparisons against 1 and -1 can be
9422 converted to comparisons with zero. Do so here so that the tests
9423 below will be aware that we have a comparison with zero. These
9424 tests will not catch constants in the first operand, but constants
9425 are rarely passed as the first operand. */
9427 switch (TREE_CODE (exp
))
9436 if (integer_onep (arg1
))
9437 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9439 code
= unsignedp
? LTU
: LT
;
9442 if (! unsignedp
&& integer_all_onesp (arg1
))
9443 arg1
= integer_zero_node
, code
= LT
;
9445 code
= unsignedp
? LEU
: LE
;
9448 if (! unsignedp
&& integer_all_onesp (arg1
))
9449 arg1
= integer_zero_node
, code
= GE
;
9451 code
= unsignedp
? GTU
: GT
;
9454 if (integer_onep (arg1
))
9455 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9457 code
= unsignedp
? GEU
: GE
;
9460 case UNORDERED_EXPR
:
9486 /* Put a constant second. */
9487 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9489 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9490 code
= swap_condition (code
);
9493 /* If this is an equality or inequality test of a single bit, we can
9494 do this by shifting the bit being tested to the low-order bit and
9495 masking the result with the constant 1. If the condition was EQ,
9496 we xor it with 1. This does not require an scc insn and is faster
9497 than an scc insn even if we have it.
9499 The code to make this transformation was moved into fold_single_bit_test,
9500 so we just call into the folder and expand its result. */
9502 if ((code
== NE
|| code
== EQ
)
9503 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9504 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9506 tree type
= (*lang_hooks
.types
.type_for_mode
) (mode
, unsignedp
);
9507 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9509 target
, VOIDmode
, EXPAND_NORMAL
);
9512 /* Now see if we are likely to be able to do this. Return if not. */
9513 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9516 icode
= setcc_gen_code
[(int) code
];
9517 if (icode
== CODE_FOR_nothing
9518 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9520 /* We can only do this if it is one of the special cases that
9521 can be handled without an scc insn. */
9522 if ((code
== LT
&& integer_zerop (arg1
))
9523 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9525 else if (BRANCH_COST
>= 0
9526 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9527 && TREE_CODE (type
) != REAL_TYPE
9528 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9529 != CODE_FOR_nothing
)
9530 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9531 != CODE_FOR_nothing
)))
9537 if (! get_subtarget (target
)
9538 || GET_MODE (subtarget
) != operand_mode
)
9541 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9544 target
= gen_reg_rtx (mode
);
9546 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9547 because, if the emit_store_flag does anything it will succeed and
9548 OP0 and OP1 will not be used subsequently. */
9550 result
= emit_store_flag (target
, code
,
9551 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9552 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9553 operand_mode
, unsignedp
, 1);
9558 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9559 result
, 0, OPTAB_LIB_WIDEN
);
9563 /* If this failed, we have to do this with set/compare/jump/set code. */
9564 if (GET_CODE (target
) != REG
9565 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9566 target
= gen_reg_rtx (GET_MODE (target
));
9568 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9569 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9570 operand_mode
, NULL_RTX
);
9571 if (GET_CODE (result
) == CONST_INT
)
9572 return (((result
== const0_rtx
&& ! invert
)
9573 || (result
!= const0_rtx
&& invert
))
9574 ? const0_rtx
: const1_rtx
);
9576 /* The code of RESULT may not match CODE if compare_from_rtx
9577 decided to swap its operands and reverse the original code.
9579 We know that compare_from_rtx returns either a CONST_INT or
9580 a new comparison code, so it is safe to just extract the
9581 code from RESULT. */
9582 code
= GET_CODE (result
);
9584 label
= gen_label_rtx ();
9585 if (bcc_gen_fctn
[(int) code
] == 0)
9588 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9589 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9596 /* Stubs in case we haven't got a casesi insn. */
9598 # define HAVE_casesi 0
9599 # define gen_casesi(a, b, c, d, e) (0)
9600 # define CODE_FOR_casesi CODE_FOR_nothing
9603 /* If the machine does not have a case insn that compares the bounds,
9604 this means extra overhead for dispatch tables, which raises the
9605 threshold for using them. */
9606 #ifndef CASE_VALUES_THRESHOLD
9607 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9608 #endif /* CASE_VALUES_THRESHOLD */
9611 case_values_threshold (void)
9613 return CASE_VALUES_THRESHOLD
;
9616 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9617 0 otherwise (i.e. if there is no casesi instruction). */
9619 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9620 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9622 enum machine_mode index_mode
= SImode
;
9623 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9624 rtx op1
, op2
, index
;
9625 enum machine_mode op_mode
;
9630 /* Convert the index to SImode. */
9631 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9633 enum machine_mode omode
= TYPE_MODE (index_type
);
9634 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9636 /* We must handle the endpoints in the original mode. */
9637 index_expr
= build (MINUS_EXPR
, index_type
,
9638 index_expr
, minval
);
9639 minval
= integer_zero_node
;
9640 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9641 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9642 omode
, 1, default_label
);
9643 /* Now we can safely truncate. */
9644 index
= convert_to_mode (index_mode
, index
, 0);
9648 if (TYPE_MODE (index_type
) != index_mode
)
9650 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
9651 (index_bits
, 0), index_expr
);
9652 index_type
= TREE_TYPE (index_expr
);
9655 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9658 index
= protect_from_queue (index
, 0);
9659 do_pending_stack_adjust ();
9661 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9662 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9664 index
= copy_to_mode_reg (op_mode
, index
);
9666 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
9668 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9669 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9670 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
9671 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9673 op1
= copy_to_mode_reg (op_mode
, op1
);
9675 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9677 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9678 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9679 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
9680 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9682 op2
= copy_to_mode_reg (op_mode
, op2
);
9684 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9685 table_label
, default_label
));
9689 /* Attempt to generate a tablejump instruction; same concept. */
9690 #ifndef HAVE_tablejump
9691 #define HAVE_tablejump 0
9692 #define gen_tablejump(x, y) (0)
9695 /* Subroutine of the next function.
9697 INDEX is the value being switched on, with the lowest value
9698 in the table already subtracted.
9699 MODE is its expected mode (needed if INDEX is constant).
9700 RANGE is the length of the jump table.
9701 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9703 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9704 index value is out of range. */
9707 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9712 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9713 cfun
->max_jumptable_ents
= INTVAL (range
);
9715 /* Do an unsigned comparison (in the proper mode) between the index
9716 expression and the value which represents the length of the range.
9717 Since we just finished subtracting the lower bound of the range
9718 from the index expression, this comparison allows us to simultaneously
9719 check that the original index expression value is both greater than
9720 or equal to the minimum value of the range and less than or equal to
9721 the maximum value of the range. */
9723 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9726 /* If index is in range, it must fit in Pmode.
9727 Convert to Pmode so we can index with it. */
9729 index
= convert_to_mode (Pmode
, index
, 1);
9731 /* Don't let a MEM slip through, because then INDEX that comes
9732 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9733 and break_out_memory_refs will go to work on it and mess it up. */
9734 #ifdef PIC_CASE_VECTOR_ADDRESS
9735 if (flag_pic
&& GET_CODE (index
) != REG
)
9736 index
= copy_to_mode_reg (Pmode
, index
);
9739 /* If flag_force_addr were to affect this address
9740 it could interfere with the tricky assumptions made
9741 about addresses that contain label-refs,
9742 which may be valid only very near the tablejump itself. */
9743 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9744 GET_MODE_SIZE, because this indicates how large insns are. The other
9745 uses should all be Pmode, because they are addresses. This code
9746 could fail if addresses and insns are not the same size. */
9747 index
= gen_rtx_PLUS (Pmode
,
9748 gen_rtx_MULT (Pmode
, index
,
9749 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9750 gen_rtx_LABEL_REF (Pmode
, table_label
));
9751 #ifdef PIC_CASE_VECTOR_ADDRESS
9753 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9756 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9757 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9758 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
9759 RTX_UNCHANGING_P (vector
) = 1;
9760 MEM_NOTRAP_P (vector
) = 1;
9761 convert_move (temp
, vector
, 0);
9763 emit_jump_insn (gen_tablejump (temp
, table_label
));
9765 /* If we are generating PIC code or if the table is PC-relative, the
9766 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9767 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9772 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9773 rtx table_label
, rtx default_label
)
9777 if (! HAVE_tablejump
)
9780 index_expr
= fold (build (MINUS_EXPR
, index_type
,
9781 convert (index_type
, index_expr
),
9782 convert (index_type
, minval
)));
9783 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9785 index
= protect_from_queue (index
, 0);
9786 do_pending_stack_adjust ();
9788 do_tablejump (index
, TYPE_MODE (index_type
),
9789 convert_modes (TYPE_MODE (index_type
),
9790 TYPE_MODE (TREE_TYPE (range
)),
9791 expand_expr (range
, NULL_RTX
,
9793 TREE_UNSIGNED (TREE_TYPE (range
))),
9794 table_label
, default_label
);
9798 /* Nonzero if the mode is a valid vector mode for this architecture.
9799 This returns nonzero even if there is no hardware support for the
9800 vector mode, but we can emulate with narrower modes. */
9803 vector_mode_valid_p (enum machine_mode mode
)
9805 enum mode_class
class = GET_MODE_CLASS (mode
);
9806 enum machine_mode innermode
;
9808 /* Doh! What's going on? */
9809 if (class != MODE_VECTOR_INT
9810 && class != MODE_VECTOR_FLOAT
)
9813 /* Hardware support. Woo hoo! */
9814 if (VECTOR_MODE_SUPPORTED_P (mode
))
9817 innermode
= GET_MODE_INNER (mode
);
9819 /* We should probably return 1 if requesting V4DI and we have no DI,
9820 but we have V2DI, but this is probably very unlikely. */
9822 /* If we have support for the inner mode, we can safely emulate it.
9823 We may not have V2DI, but me can emulate with a pair of DIs. */
9824 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
9827 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9829 const_vector_from_tree (tree exp
)
9834 enum machine_mode inner
, mode
;
9836 mode
= TYPE_MODE (TREE_TYPE (exp
));
9838 if (is_zeros_p (exp
))
9839 return CONST0_RTX (mode
);
9841 units
= GET_MODE_NUNITS (mode
);
9842 inner
= GET_MODE_INNER (mode
);
9844 v
= rtvec_alloc (units
);
9846 link
= TREE_VECTOR_CST_ELTS (exp
);
9847 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9849 elt
= TREE_VALUE (link
);
9851 if (TREE_CODE (elt
) == REAL_CST
)
9852 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9855 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9856 TREE_INT_CST_HIGH (elt
),
9860 /* Initialize remaining elements to 0. */
9861 for (; i
< units
; ++i
)
9862 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9864 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
9867 #include "gt-expr.h"