1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Don't check memory usage, since code is being emitted to check a memory
85 usage. Used when current_function_check_memory_usage is true, to avoid
86 infinite recursion. */
87 static int in_check_memory_usage
;
89 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
90 static tree placeholder_list
= 0;
92 /* This structure is used by move_by_pieces to describe the move to
103 int explicit_inc_from
;
104 unsigned HOST_WIDE_INT len
;
105 HOST_WIDE_INT offset
;
109 /* This structure is used by store_by_pieces to describe the clear to
112 struct store_by_pieces
118 unsigned HOST_WIDE_INT len
;
119 HOST_WIDE_INT offset
;
120 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
125 extern struct obstack permanent_obstack
;
127 static rtx get_push_address
PARAMS ((int));
129 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT
,
133 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
134 struct move_by_pieces
*));
135 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
137 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
139 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
141 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
143 struct store_by_pieces
*));
144 static rtx get_subtarget
PARAMS ((rtx
));
145 static int is_zeros_p
PARAMS ((tree
));
146 static int mostly_zeros_p
PARAMS ((tree
));
147 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
148 HOST_WIDE_INT
, enum machine_mode
,
149 tree
, tree
, int, int));
150 static void store_constructor
PARAMS ((tree
, rtx
, int, HOST_WIDE_INT
));
151 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
152 HOST_WIDE_INT
, enum machine_mode
,
153 tree
, enum machine_mode
, int, tree
,
155 static enum memory_use_mode
156 get_memory_usage_from_modifier
PARAMS ((enum expand_modifier
));
157 static rtx var_rtx
PARAMS ((tree
));
158 static HOST_WIDE_INT highest_pow2_factor
PARAMS ((tree
));
159 static rtx expand_increment
PARAMS ((tree
, int, int));
160 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
161 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
162 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
164 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
166 static void emit_single_push_insn
PARAMS ((enum machine_mode
, rtx
, tree
));
168 static void do_tablejump
PARAMS ((rtx
, enum machine_mode
, rtx
, rtx
, rtx
));
170 /* Record for each mode whether we can move a register directly to or
171 from an object of that mode in memory. If we can't, we won't try
172 to use that mode directly when accessing a field of that mode. */
174 static char direct_load
[NUM_MACHINE_MODES
];
175 static char direct_store
[NUM_MACHINE_MODES
];
177 /* If a memory-to-memory move would take MOVE_RATIO or more simple
178 move-instruction sequences, we will do a movstr or libcall instead. */
181 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
184 /* If we are optimizing for space (-Os), cut down the default move ratio. */
185 #define MOVE_RATIO (optimize_size ? 3 : 15)
189 /* This macro is used to determine whether move_by_pieces should be called
190 to perform a structure copy. */
191 #ifndef MOVE_BY_PIECES_P
192 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
196 /* This array records the insn_code of insns to perform block moves. */
197 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
199 /* This array records the insn_code of insns to perform block clears. */
200 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
202 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
204 #ifndef SLOW_UNALIGNED_ACCESS
205 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
208 /* This is run once per compilation to set up which modes can be used
209 directly in memory and to initialize the block move optab. */
215 enum machine_mode mode
;
221 /* Try indexing by frame ptr and try by stack ptr.
222 It is known that on the Convex the stack ptr isn't a valid index.
223 With luck, one or the other is valid on any machine. */
224 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
225 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
227 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
228 pat
= PATTERN (insn
);
230 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
231 mode
= (enum machine_mode
) ((int) mode
+ 1))
236 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
237 PUT_MODE (mem
, mode
);
238 PUT_MODE (mem1
, mode
);
240 /* See if there is some register that can be used in this mode and
241 directly loaded or stored from memory. */
243 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
244 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
245 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
248 if (! HARD_REGNO_MODE_OK (regno
, mode
))
251 reg
= gen_rtx_REG (mode
, regno
);
254 SET_DEST (pat
) = reg
;
255 if (recog (pat
, insn
, &num_clobbers
) >= 0)
256 direct_load
[(int) mode
] = 1;
258 SET_SRC (pat
) = mem1
;
259 SET_DEST (pat
) = reg
;
260 if (recog (pat
, insn
, &num_clobbers
) >= 0)
261 direct_load
[(int) mode
] = 1;
264 SET_DEST (pat
) = mem
;
265 if (recog (pat
, insn
, &num_clobbers
) >= 0)
266 direct_store
[(int) mode
] = 1;
269 SET_DEST (pat
) = mem1
;
270 if (recog (pat
, insn
, &num_clobbers
) >= 0)
271 direct_store
[(int) mode
] = 1;
278 /* This is run at the start of compiling a function. */
283 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
286 pending_stack_adjust
= 0;
287 stack_pointer_delta
= 0;
288 inhibit_defer_pop
= 0;
290 apply_args_value
= 0;
296 struct expr_status
*p
;
301 ggc_mark_rtx (p
->x_saveregs_value
);
302 ggc_mark_rtx (p
->x_apply_args_value
);
303 ggc_mark_rtx (p
->x_forced_labels
);
314 /* Small sanity check that the queue is empty at the end of a function. */
317 finish_expr_for_function ()
323 /* Manage the queue of increment instructions to be output
324 for POSTINCREMENT_EXPR expressions, etc. */
326 /* Queue up to increment (or change) VAR later. BODY says how:
327 BODY should be the same thing you would pass to emit_insn
328 to increment right away. It will go to emit_insn later on.
330 The value is a QUEUED expression to be used in place of VAR
331 where you want to guarantee the pre-incrementation value of VAR. */
334 enqueue_insn (var
, body
)
337 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
338 body
, pending_chain
);
339 return pending_chain
;
342 /* Use protect_from_queue to convert a QUEUED expression
343 into something that you can put immediately into an instruction.
344 If the queued incrementation has not happened yet,
345 protect_from_queue returns the variable itself.
346 If the incrementation has happened, protect_from_queue returns a temp
347 that contains a copy of the old value of the variable.
349 Any time an rtx which might possibly be a QUEUED is to be put
350 into an instruction, it must be passed through protect_from_queue first.
351 QUEUED expressions are not meaningful in instructions.
353 Do not pass a value through protect_from_queue and then hold
354 on to it for a while before putting it in an instruction!
355 If the queue is flushed in between, incorrect code will result. */
358 protect_from_queue (x
, modify
)
362 RTX_CODE code
= GET_CODE (x
);
364 #if 0 /* A QUEUED can hang around after the queue is forced out. */
365 /* Shortcut for most common case. */
366 if (pending_chain
== 0)
372 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
373 use of autoincrement. Make a copy of the contents of the memory
374 location rather than a copy of the address, but not if the value is
375 of mode BLKmode. Don't modify X in place since it might be
377 if (code
== MEM
&& GET_MODE (x
) != BLKmode
378 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
381 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
385 rtx temp
= gen_reg_rtx (GET_MODE (x
));
387 emit_insn_before (gen_move_insn (temp
, new),
392 /* Copy the address into a pseudo, so that the returned value
393 remains correct across calls to emit_queue. */
394 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
397 /* Otherwise, recursively protect the subexpressions of all
398 the kinds of rtx's that can contain a QUEUED. */
401 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
402 if (tem
!= XEXP (x
, 0))
408 else if (code
== PLUS
|| code
== MULT
)
410 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
411 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
412 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
421 /* If the increment has not happened, use the variable itself. Copy it
422 into a new pseudo so that the value remains correct across calls to
424 if (QUEUED_INSN (x
) == 0)
425 return copy_to_reg (QUEUED_VAR (x
));
426 /* If the increment has happened and a pre-increment copy exists,
428 if (QUEUED_COPY (x
) != 0)
429 return QUEUED_COPY (x
);
430 /* The increment has happened but we haven't set up a pre-increment copy.
431 Set one up now, and use it. */
432 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
433 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
435 return QUEUED_COPY (x
);
438 /* Return nonzero if X contains a QUEUED expression:
439 if it contains anything that will be altered by a queued increment.
440 We handle only combinations of MEM, PLUS, MINUS and MULT operators
441 since memory addresses generally contain only those. */
447 enum rtx_code code
= GET_CODE (x
);
453 return queued_subexp_p (XEXP (x
, 0));
457 return (queued_subexp_p (XEXP (x
, 0))
458 || queued_subexp_p (XEXP (x
, 1)));
464 /* Perform all the pending incrementations. */
470 while ((p
= pending_chain
))
472 rtx body
= QUEUED_BODY (p
);
474 if (GET_CODE (body
) == SEQUENCE
)
476 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
477 emit_insn (QUEUED_BODY (p
));
480 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
481 pending_chain
= QUEUED_NEXT (p
);
485 /* Copy data from FROM to TO, where the machine modes are not the same.
486 Both modes may be integer, or both may be floating.
487 UNSIGNEDP should be nonzero if FROM is an unsigned type.
488 This causes zero-extension instead of sign-extension. */
491 convert_move (to
, from
, unsignedp
)
495 enum machine_mode to_mode
= GET_MODE (to
);
496 enum machine_mode from_mode
= GET_MODE (from
);
497 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
498 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
502 /* rtx code for making an equivalent value. */
503 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
505 to
= protect_from_queue (to
, 1);
506 from
= protect_from_queue (from
, 0);
508 if (to_real
!= from_real
)
511 /* If FROM is a SUBREG that indicates that we have already done at least
512 the required extension, strip it. We don't handle such SUBREGs as
515 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
516 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
517 >= GET_MODE_SIZE (to_mode
))
518 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
519 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
521 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
524 if (to_mode
== from_mode
525 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
527 emit_move_insn (to
, from
);
531 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
533 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
536 if (VECTOR_MODE_P (to_mode
))
537 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
539 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
541 emit_move_insn (to
, from
);
545 if (to_real
!= from_real
)
552 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
554 /* Try converting directly if the insn is supported. */
555 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
558 emit_unop_insn (code
, to
, from
, UNKNOWN
);
563 #ifdef HAVE_trunchfqf2
564 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
566 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
570 #ifdef HAVE_trunctqfqf2
571 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
573 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
577 #ifdef HAVE_truncsfqf2
578 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
580 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
584 #ifdef HAVE_truncdfqf2
585 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
587 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
591 #ifdef HAVE_truncxfqf2
592 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
594 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
598 #ifdef HAVE_trunctfqf2
599 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
601 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
606 #ifdef HAVE_trunctqfhf2
607 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
609 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
613 #ifdef HAVE_truncsfhf2
614 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
616 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
620 #ifdef HAVE_truncdfhf2
621 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
623 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
627 #ifdef HAVE_truncxfhf2
628 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
630 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_trunctfhf2
635 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
637 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
642 #ifdef HAVE_truncsftqf2
643 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
645 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
649 #ifdef HAVE_truncdftqf2
650 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
652 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
656 #ifdef HAVE_truncxftqf2
657 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
659 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_trunctftqf2
664 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
666 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
671 #ifdef HAVE_truncdfsf2
672 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
674 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
678 #ifdef HAVE_truncxfsf2
679 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
681 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
685 #ifdef HAVE_trunctfsf2
686 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
688 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
692 #ifdef HAVE_truncxfdf2
693 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
695 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
699 #ifdef HAVE_trunctfdf2
700 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
702 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
714 libcall
= extendsfdf2_libfunc
;
718 libcall
= extendsfxf2_libfunc
;
722 libcall
= extendsftf2_libfunc
;
734 libcall
= truncdfsf2_libfunc
;
738 libcall
= extenddfxf2_libfunc
;
742 libcall
= extenddftf2_libfunc
;
754 libcall
= truncxfsf2_libfunc
;
758 libcall
= truncxfdf2_libfunc
;
770 libcall
= trunctfsf2_libfunc
;
774 libcall
= trunctfdf2_libfunc
;
786 if (libcall
== (rtx
) 0)
787 /* This conversion is not implemented yet. */
791 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
793 insns
= get_insns ();
795 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
800 /* Now both modes are integers. */
802 /* Handle expanding beyond a word. */
803 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
804 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
811 enum machine_mode lowpart_mode
;
812 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
814 /* Try converting directly if the insn is supported. */
815 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
818 /* If FROM is a SUBREG, put it into a register. Do this
819 so that we always generate the same set of insns for
820 better cse'ing; if an intermediate assignment occurred,
821 we won't be doing the operation directly on the SUBREG. */
822 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
823 from
= force_reg (from_mode
, from
);
824 emit_unop_insn (code
, to
, from
, equiv_code
);
827 /* Next, try converting via full word. */
828 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
829 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
830 != CODE_FOR_nothing
))
832 if (GET_CODE (to
) == REG
)
833 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
834 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
835 emit_unop_insn (code
, to
,
836 gen_lowpart (word_mode
, to
), equiv_code
);
840 /* No special multiword conversion insn; do it by hand. */
843 /* Since we will turn this into a no conflict block, we must ensure
844 that the source does not overlap the target. */
846 if (reg_overlap_mentioned_p (to
, from
))
847 from
= force_reg (from_mode
, from
);
849 /* Get a copy of FROM widened to a word, if necessary. */
850 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
851 lowpart_mode
= word_mode
;
853 lowpart_mode
= from_mode
;
855 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
857 lowpart
= gen_lowpart (lowpart_mode
, to
);
858 emit_move_insn (lowpart
, lowfrom
);
860 /* Compute the value to put in each remaining word. */
862 fill_value
= const0_rtx
;
867 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
868 && STORE_FLAG_VALUE
== -1)
870 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
872 fill_value
= gen_reg_rtx (word_mode
);
873 emit_insn (gen_slt (fill_value
));
879 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
880 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
882 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
886 /* Fill the remaining words. */
887 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
889 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
890 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
895 if (fill_value
!= subword
)
896 emit_move_insn (subword
, fill_value
);
899 insns
= get_insns ();
902 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
903 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
907 /* Truncating multi-word to a word or less. */
908 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
909 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
911 if (!((GET_CODE (from
) == MEM
912 && ! MEM_VOLATILE_P (from
)
913 && direct_load
[(int) to_mode
]
914 && ! mode_dependent_address_p (XEXP (from
, 0)))
915 || GET_CODE (from
) == REG
916 || GET_CODE (from
) == SUBREG
))
917 from
= force_reg (from_mode
, from
);
918 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
922 /* Handle pointer conversion. */ /* SPEE 900220. */
923 if (to_mode
== PQImode
)
925 if (from_mode
!= QImode
)
926 from
= convert_to_mode (QImode
, from
, unsignedp
);
928 #ifdef HAVE_truncqipqi2
929 if (HAVE_truncqipqi2
)
931 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
934 #endif /* HAVE_truncqipqi2 */
938 if (from_mode
== PQImode
)
940 if (to_mode
!= QImode
)
942 from
= convert_to_mode (QImode
, from
, unsignedp
);
947 #ifdef HAVE_extendpqiqi2
948 if (HAVE_extendpqiqi2
)
950 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
953 #endif /* HAVE_extendpqiqi2 */
958 if (to_mode
== PSImode
)
960 if (from_mode
!= SImode
)
961 from
= convert_to_mode (SImode
, from
, unsignedp
);
963 #ifdef HAVE_truncsipsi2
964 if (HAVE_truncsipsi2
)
966 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
969 #endif /* HAVE_truncsipsi2 */
973 if (from_mode
== PSImode
)
975 if (to_mode
!= SImode
)
977 from
= convert_to_mode (SImode
, from
, unsignedp
);
982 #ifdef HAVE_extendpsisi2
983 if (! unsignedp
&& HAVE_extendpsisi2
)
985 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
988 #endif /* HAVE_extendpsisi2 */
989 #ifdef HAVE_zero_extendpsisi2
990 if (unsignedp
&& HAVE_zero_extendpsisi2
)
992 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
995 #endif /* HAVE_zero_extendpsisi2 */
1000 if (to_mode
== PDImode
)
1002 if (from_mode
!= DImode
)
1003 from
= convert_to_mode (DImode
, from
, unsignedp
);
1005 #ifdef HAVE_truncdipdi2
1006 if (HAVE_truncdipdi2
)
1008 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1011 #endif /* HAVE_truncdipdi2 */
1015 if (from_mode
== PDImode
)
1017 if (to_mode
!= DImode
)
1019 from
= convert_to_mode (DImode
, from
, unsignedp
);
1024 #ifdef HAVE_extendpdidi2
1025 if (HAVE_extendpdidi2
)
1027 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1030 #endif /* HAVE_extendpdidi2 */
1035 /* Now follow all the conversions between integers
1036 no more than a word long. */
1038 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1039 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1040 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1041 GET_MODE_BITSIZE (from_mode
)))
1043 if (!((GET_CODE (from
) == MEM
1044 && ! MEM_VOLATILE_P (from
)
1045 && direct_load
[(int) to_mode
]
1046 && ! mode_dependent_address_p (XEXP (from
, 0)))
1047 || GET_CODE (from
) == REG
1048 || GET_CODE (from
) == SUBREG
))
1049 from
= force_reg (from_mode
, from
);
1050 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1051 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1052 from
= copy_to_reg (from
);
1053 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1057 /* Handle extension. */
1058 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1060 /* Convert directly if that works. */
1061 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1062 != CODE_FOR_nothing
)
1064 emit_unop_insn (code
, to
, from
, equiv_code
);
1069 enum machine_mode intermediate
;
1073 /* Search for a mode to convert via. */
1074 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1075 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1076 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1077 != CODE_FOR_nothing
)
1078 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1079 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1080 GET_MODE_BITSIZE (intermediate
))))
1081 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1082 != CODE_FOR_nothing
))
1084 convert_move (to
, convert_to_mode (intermediate
, from
,
1085 unsignedp
), unsignedp
);
1089 /* No suitable intermediate mode.
1090 Generate what we need with shifts. */
1091 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1092 - GET_MODE_BITSIZE (from_mode
), 0);
1093 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1094 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1096 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1099 emit_move_insn (to
, tmp
);
1104 /* Support special truncate insns for certain modes. */
1106 if (from_mode
== DImode
&& to_mode
== SImode
)
1108 #ifdef HAVE_truncdisi2
1109 if (HAVE_truncdisi2
)
1111 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1115 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1119 if (from_mode
== DImode
&& to_mode
== HImode
)
1121 #ifdef HAVE_truncdihi2
1122 if (HAVE_truncdihi2
)
1124 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1128 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1132 if (from_mode
== DImode
&& to_mode
== QImode
)
1134 #ifdef HAVE_truncdiqi2
1135 if (HAVE_truncdiqi2
)
1137 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1141 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1145 if (from_mode
== SImode
&& to_mode
== HImode
)
1147 #ifdef HAVE_truncsihi2
1148 if (HAVE_truncsihi2
)
1150 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1154 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1158 if (from_mode
== SImode
&& to_mode
== QImode
)
1160 #ifdef HAVE_truncsiqi2
1161 if (HAVE_truncsiqi2
)
1163 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1167 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1171 if (from_mode
== HImode
&& to_mode
== QImode
)
1173 #ifdef HAVE_trunchiqi2
1174 if (HAVE_trunchiqi2
)
1176 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1180 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1184 if (from_mode
== TImode
&& to_mode
== DImode
)
1186 #ifdef HAVE_trunctidi2
1187 if (HAVE_trunctidi2
)
1189 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1193 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1197 if (from_mode
== TImode
&& to_mode
== SImode
)
1199 #ifdef HAVE_trunctisi2
1200 if (HAVE_trunctisi2
)
1202 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1206 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1210 if (from_mode
== TImode
&& to_mode
== HImode
)
1212 #ifdef HAVE_trunctihi2
1213 if (HAVE_trunctihi2
)
1215 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1219 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1223 if (from_mode
== TImode
&& to_mode
== QImode
)
1225 #ifdef HAVE_trunctiqi2
1226 if (HAVE_trunctiqi2
)
1228 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1232 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1236 /* Handle truncation of volatile memrefs, and so on;
1237 the things that couldn't be truncated directly,
1238 and for which there was no special instruction. */
1239 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1241 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1242 emit_move_insn (to
, temp
);
1246 /* Mode combination is not recognized. */
1250 /* Return an rtx for a value that would result
1251 from converting X to mode MODE.
1252 Both X and MODE may be floating, or both integer.
1253 UNSIGNEDP is nonzero if X is an unsigned value.
1254 This can be done by referring to a part of X in place
1255 or by copying to a new temporary with conversion.
1257 This function *must not* call protect_from_queue
1258 except when putting X into an insn (in which case convert_move does it). */
1261 convert_to_mode (mode
, x
, unsignedp
)
1262 enum machine_mode mode
;
1266 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1269 /* Return an rtx for a value that would result
1270 from converting X from mode OLDMODE to mode MODE.
1271 Both modes may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1274 This can be done by referring to a part of X in place
1275 or by copying to a new temporary with conversion.
1277 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1279 This function *must not* call protect_from_queue
1280 except when putting X into an insn (in which case convert_move does it). */
1283 convert_modes (mode
, oldmode
, x
, unsignedp
)
1284 enum machine_mode mode
, oldmode
;
1290 /* If FROM is a SUBREG that indicates that we have already done at least
1291 the required extension, strip it. */
1293 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1294 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1295 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1296 x
= gen_lowpart (mode
, x
);
1298 if (GET_MODE (x
) != VOIDmode
)
1299 oldmode
= GET_MODE (x
);
1301 if (mode
== oldmode
)
1304 /* There is one case that we must handle specially: If we are converting
1305 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1306 we are to interpret the constant as unsigned, gen_lowpart will do
1307 the wrong if the constant appears negative. What we want to do is
1308 make the high-order word of the constant zero, not all ones. */
1310 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1311 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1312 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1314 HOST_WIDE_INT val
= INTVAL (x
);
1316 if (oldmode
!= VOIDmode
1317 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1319 int width
= GET_MODE_BITSIZE (oldmode
);
1321 /* We need to zero extend VAL. */
1322 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1325 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1328 /* We can do this with a gen_lowpart if both desired and current modes
1329 are integer, and this is either a constant integer, a register, or a
1330 non-volatile MEM. Except for the constant case where MODE is no
1331 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1333 if ((GET_CODE (x
) == CONST_INT
1334 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1335 || (GET_MODE_CLASS (mode
) == MODE_INT
1336 && GET_MODE_CLASS (oldmode
) == MODE_INT
1337 && (GET_CODE (x
) == CONST_DOUBLE
1338 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1339 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1340 && direct_load
[(int) mode
])
1341 || (GET_CODE (x
) == REG
1342 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1343 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1345 /* ?? If we don't know OLDMODE, we have to assume here that
1346 X does not need sign- or zero-extension. This may not be
1347 the case, but it's the best we can do. */
1348 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1349 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1351 HOST_WIDE_INT val
= INTVAL (x
);
1352 int width
= GET_MODE_BITSIZE (oldmode
);
1354 /* We must sign or zero-extend in this case. Start by
1355 zero-extending, then sign extend if we need to. */
1356 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1358 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1359 val
|= (HOST_WIDE_INT
) (-1) << width
;
1361 return GEN_INT (trunc_int_for_mode (val
, mode
));
1364 return gen_lowpart (mode
, x
);
1367 temp
= gen_reg_rtx (mode
);
1368 convert_move (temp
, x
, unsignedp
);
1372 /* This macro is used to determine what the largest unit size that
1373 move_by_pieces can use is. */
1375 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1376 move efficiently, as opposed to MOVE_MAX which is the maximum
1377 number of bytes we can move with a single instruction. */
1379 #ifndef MOVE_MAX_PIECES
1380 #define MOVE_MAX_PIECES MOVE_MAX
1383 /* Generate several move instructions to copy LEN bytes from block FROM to
1384 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1385 and TO through protect_from_queue before calling.
1387 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1388 used to push FROM to the stack.
1390 ALIGN is maximum alignment we can assume. */
1393 move_by_pieces (to
, from
, len
, align
)
1395 unsigned HOST_WIDE_INT len
;
1398 struct move_by_pieces data
;
1399 rtx to_addr
, from_addr
= XEXP (from
, 0);
1400 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1401 enum machine_mode mode
= VOIDmode
, tmode
;
1402 enum insn_code icode
;
1405 data
.from_addr
= from_addr
;
1408 to_addr
= XEXP (to
, 0);
1411 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1412 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1414 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1421 #ifdef STACK_GROWS_DOWNWARD
1427 data
.to_addr
= to_addr
;
1430 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1431 || GET_CODE (from_addr
) == POST_INC
1432 || GET_CODE (from_addr
) == POST_DEC
);
1434 data
.explicit_inc_from
= 0;
1435 data
.explicit_inc_to
= 0;
1436 if (data
.reverse
) data
.offset
= len
;
1439 /* If copying requires more than two move insns,
1440 copy addresses to registers (to make displacements shorter)
1441 and use post-increment if available. */
1442 if (!(data
.autinc_from
&& data
.autinc_to
)
1443 && move_by_pieces_ninsns (len
, align
) > 2)
1445 /* Find the mode of the largest move... */
1446 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1447 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1448 if (GET_MODE_SIZE (tmode
) < max_size
)
1451 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1453 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1454 data
.autinc_from
= 1;
1455 data
.explicit_inc_from
= -1;
1457 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1459 data
.from_addr
= copy_addr_to_reg (from_addr
);
1460 data
.autinc_from
= 1;
1461 data
.explicit_inc_from
= 1;
1463 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1464 data
.from_addr
= copy_addr_to_reg (from_addr
);
1465 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1467 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1469 data
.explicit_inc_to
= -1;
1471 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1473 data
.to_addr
= copy_addr_to_reg (to_addr
);
1475 data
.explicit_inc_to
= 1;
1477 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1478 data
.to_addr
= copy_addr_to_reg (to_addr
);
1481 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1482 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1483 align
= MOVE_MAX
* BITS_PER_UNIT
;
1485 /* First move what we can in the largest integer mode, then go to
1486 successively smaller modes. */
1488 while (max_size
> 1)
1490 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1491 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1492 if (GET_MODE_SIZE (tmode
) < max_size
)
1495 if (mode
== VOIDmode
)
1498 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1499 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1500 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1502 max_size
= GET_MODE_SIZE (mode
);
1505 /* The code above should have handled everything. */
1510 /* Return number of insns required to move L bytes by pieces.
1511 ALIGN (in bits) is maximum alignment we can assume. */
1513 static unsigned HOST_WIDE_INT
1514 move_by_pieces_ninsns (l
, align
)
1515 unsigned HOST_WIDE_INT l
;
1518 unsigned HOST_WIDE_INT n_insns
= 0;
1519 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1521 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1522 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1523 align
= MOVE_MAX
* BITS_PER_UNIT
;
1525 while (max_size
> 1)
1527 enum machine_mode mode
= VOIDmode
, tmode
;
1528 enum insn_code icode
;
1530 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1531 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1532 if (GET_MODE_SIZE (tmode
) < max_size
)
1535 if (mode
== VOIDmode
)
1538 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1539 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1540 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1542 max_size
= GET_MODE_SIZE (mode
);
1550 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1551 with move instructions for mode MODE. GENFUN is the gen_... function
1552 to make a move insn for that mode. DATA has all the other info. */
1555 move_by_pieces_1 (genfun
, mode
, data
)
1556 rtx (*genfun
) PARAMS ((rtx
, ...));
1557 enum machine_mode mode
;
1558 struct move_by_pieces
*data
;
1560 unsigned int size
= GET_MODE_SIZE (mode
);
1561 rtx to1
= NULL_RTX
, from1
;
1563 while (data
->len
>= size
)
1566 data
->offset
-= size
;
1570 if (data
->autinc_to
)
1571 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1574 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1577 if (data
->autinc_from
)
1578 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1581 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1583 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1584 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1585 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1586 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1589 emit_insn ((*genfun
) (to1
, from1
));
1592 #ifdef PUSH_ROUNDING
1593 emit_single_push_insn (mode
, from1
, NULL
);
1599 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1600 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1601 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1602 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1604 if (! data
->reverse
)
1605 data
->offset
+= size
;
1611 /* Emit code to move a block Y to a block X.
1612 This may be done with string-move instructions,
1613 with multiple scalar move instructions, or with a library call.
1615 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1617 SIZE is an rtx that says how long they are.
1618 ALIGN is the maximum alignment we can assume they have.
1620 Return the address of the new block, if memcpy is called and returns it,
1624 emit_block_move (x
, y
, size
)
1629 #ifdef TARGET_MEM_FUNCTIONS
1631 tree call_expr
, arg_list
;
1633 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1635 if (GET_MODE (x
) != BLKmode
)
1638 if (GET_MODE (y
) != BLKmode
)
1641 x
= protect_from_queue (x
, 1);
1642 y
= protect_from_queue (y
, 0);
1643 size
= protect_from_queue (size
, 0);
1645 if (GET_CODE (x
) != MEM
)
1647 if (GET_CODE (y
) != MEM
)
1652 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1653 move_by_pieces (x
, y
, INTVAL (size
), align
);
1656 /* Try the most limited insn first, because there's no point
1657 including more than one in the machine description unless
1658 the more limited one has some advantage. */
1660 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1661 enum machine_mode mode
;
1663 /* Since this is a move insn, we don't care about volatility. */
1666 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1667 mode
= GET_MODE_WIDER_MODE (mode
))
1669 enum insn_code code
= movstr_optab
[(int) mode
];
1670 insn_operand_predicate_fn pred
;
1672 if (code
!= CODE_FOR_nothing
1673 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1674 here because if SIZE is less than the mode mask, as it is
1675 returned by the macro, it will definitely be less than the
1676 actual mode mask. */
1677 && ((GET_CODE (size
) == CONST_INT
1678 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1679 <= (GET_MODE_MASK (mode
) >> 1)))
1680 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1681 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1682 || (*pred
) (x
, BLKmode
))
1683 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1684 || (*pred
) (y
, BLKmode
))
1685 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1686 || (*pred
) (opalign
, VOIDmode
)))
1689 rtx last
= get_last_insn ();
1692 op2
= convert_to_mode (mode
, size
, 1);
1693 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1694 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1695 op2
= copy_to_mode_reg (mode
, op2
);
1697 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1705 delete_insns_since (last
);
1711 /* X, Y, or SIZE may have been passed through protect_from_queue.
1713 It is unsafe to save the value generated by protect_from_queue
1714 and reuse it later. Consider what happens if emit_queue is
1715 called before the return value from protect_from_queue is used.
1717 Expansion of the CALL_EXPR below will call emit_queue before
1718 we are finished emitting RTL for argument setup. So if we are
1719 not careful we could get the wrong value for an argument.
1721 To avoid this problem we go ahead and emit code to copy X, Y &
1722 SIZE into new pseudos. We can then place those new pseudos
1723 into an RTL_EXPR and use them later, even after a call to
1726 Note this is not strictly needed for library calls since they
1727 do not call emit_queue before loading their arguments. However,
1728 we may need to have library calls call emit_queue in the future
1729 since failing to do so could cause problems for targets which
1730 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1731 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1732 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1734 #ifdef TARGET_MEM_FUNCTIONS
1735 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1737 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1738 TREE_UNSIGNED (integer_type_node
));
1739 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1742 #ifdef TARGET_MEM_FUNCTIONS
1743 /* It is incorrect to use the libcall calling conventions to call
1744 memcpy in this context.
1746 This could be a user call to memcpy and the user may wish to
1747 examine the return value from memcpy.
1749 For targets where libcalls and normal calls have different conventions
1750 for returning pointers, we could end up generating incorrect code.
1752 So instead of using a libcall sequence we build up a suitable
1753 CALL_EXPR and expand the call in the normal fashion. */
1754 if (fn
== NULL_TREE
)
1758 /* This was copied from except.c, I don't know if all this is
1759 necessary in this context or not. */
1760 fn
= get_identifier ("memcpy");
1761 fntype
= build_pointer_type (void_type_node
);
1762 fntype
= build_function_type (fntype
, NULL_TREE
);
1763 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1764 ggc_add_tree_root (&fn
, 1);
1765 DECL_EXTERNAL (fn
) = 1;
1766 TREE_PUBLIC (fn
) = 1;
1767 DECL_ARTIFICIAL (fn
) = 1;
1768 TREE_NOTHROW (fn
) = 1;
1769 make_decl_rtl (fn
, NULL
);
1770 assemble_external (fn
);
1773 /* We need to make an argument list for the function call.
1775 memcpy has three arguments, the first two are void * addresses and
1776 the last is a size_t byte count for the copy. */
1778 = build_tree_list (NULL_TREE
,
1779 make_tree (build_pointer_type (void_type_node
), x
));
1780 TREE_CHAIN (arg_list
)
1781 = build_tree_list (NULL_TREE
,
1782 make_tree (build_pointer_type (void_type_node
), y
));
1783 TREE_CHAIN (TREE_CHAIN (arg_list
))
1784 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1785 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1787 /* Now we have to build up the CALL_EXPR itself. */
1788 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1789 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1790 call_expr
, arg_list
, NULL_TREE
);
1791 TREE_SIDE_EFFECTS (call_expr
) = 1;
1793 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1795 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1796 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1797 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1798 TREE_UNSIGNED (integer_type_node
)),
1799 TYPE_MODE (integer_type_node
));
1802 /* If we are initializing a readonly value, show the above call
1803 clobbered it. Otherwise, a load from it may erroneously be hoisted
1805 if (RTX_UNCHANGING_P (x
))
1806 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
1812 /* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1816 move_block_to_reg (regno
, x
, nregs
, mode
)
1820 enum machine_mode mode
;
1823 #ifdef HAVE_load_multiple
1831 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1832 x
= validize_mem (force_const_mem (mode
, x
));
1834 /* See if the machine can do this with a load multiple insn. */
1835 #ifdef HAVE_load_multiple
1836 if (HAVE_load_multiple
)
1838 last
= get_last_insn ();
1839 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1847 delete_insns_since (last
);
1851 for (i
= 0; i
< nregs
; i
++)
1852 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1853 operand_subword_force (x
, i
, mode
));
1856 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1861 move_block_from_reg (regno
, x
, nregs
, size
)
1868 #ifdef HAVE_store_multiple
1872 enum machine_mode mode
;
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size
<= UNITS_PER_WORD
1880 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1882 emit_move_insn (adjust_address (x
, mode
, 0), gen_rtx_REG (mode
, regno
));
1886 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1887 to the left before storing to memory. Note that the previous test
1888 doesn't handle all cases (e.g. SIZE == 3). */
1889 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1891 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1897 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1898 gen_rtx_REG (word_mode
, regno
),
1899 build_int_2 ((UNITS_PER_WORD
- size
)
1900 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1901 emit_move_insn (tem
, shift
);
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple
)
1909 last
= get_last_insn ();
1910 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1918 delete_insns_since (last
);
1922 for (i
= 0; i
< nregs
; i
++)
1924 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1929 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. */
1936 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1937 the balance will be in what would be the low-order memory addresses, i.e.
1938 left justified for big endian, right justified for little endian. This
1939 happens to be true for the targets currently using this support. If this
1940 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1944 emit_group_load (dst
, orig_src
, ssize
)
1951 if (GET_CODE (dst
) != PARALLEL
)
1954 /* Check for a NULL entry, used to indicate that the parameter goes
1955 both on the stack and in registers. */
1956 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1961 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1963 /* Process the pieces. */
1964 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1966 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1967 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1968 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1971 /* Handle trailing fragments that run over the size of the struct. */
1972 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1974 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1975 bytelen
= ssize
- bytepos
;
1980 /* If we won't be loading directly from memory, protect the real source
1981 from strange tricks we might play; but make sure that the source can
1982 be loaded directly into the destination. */
1984 if (GET_CODE (orig_src
) != MEM
1985 && (!CONSTANT_P (orig_src
)
1986 || (GET_MODE (orig_src
) != mode
1987 && GET_MODE (orig_src
) != VOIDmode
)))
1989 if (GET_MODE (orig_src
) == VOIDmode
)
1990 src
= gen_reg_rtx (mode
);
1992 src
= gen_reg_rtx (GET_MODE (orig_src
));
1994 emit_move_insn (src
, orig_src
);
1997 /* Optimize the access just a bit. */
1998 if (GET_CODE (src
) == MEM
1999 && MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
)
2000 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2001 && bytelen
== GET_MODE_SIZE (mode
))
2003 tmps
[i
] = gen_reg_rtx (mode
);
2004 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2006 else if (GET_CODE (src
) == CONCAT
)
2009 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
2010 tmps
[i
] = XEXP (src
, 0);
2011 else if (bytepos
== (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
2012 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
2013 tmps
[i
] = XEXP (src
, 1);
2014 else if (bytepos
== 0)
2016 rtx mem
= assign_stack_temp (GET_MODE (src
),
2017 GET_MODE_SIZE (GET_MODE (src
)), 0);
2018 emit_move_insn (mem
, src
);
2019 tmps
[i
] = adjust_address (mem
, mode
, 0);
2024 else if (CONSTANT_P (src
)
2025 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2028 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2029 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2032 if (BYTES_BIG_ENDIAN
&& shift
)
2033 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2034 tmps
[i
], 0, OPTAB_WIDEN
);
2039 /* Copy the extracted pieces into the proper (probable) hard regs. */
2040 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2041 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2044 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2045 registers represented by a PARALLEL. SSIZE represents the total size of
2046 block DST, or -1 if not known. */
2049 emit_group_store (orig_dst
, src
, ssize
)
2056 if (GET_CODE (src
) != PARALLEL
)
2059 /* Check for a NULL entry, used to indicate that the parameter goes
2060 both on the stack and in registers. */
2061 if (XEXP (XVECEXP (src
, 0, 0), 0))
2066 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2068 /* Copy the (probable) hard regs into pseudos. */
2069 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2071 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2072 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2073 emit_move_insn (tmps
[i
], reg
);
2077 /* If we won't be storing directly into memory, protect the real destination
2078 from strange tricks we might play. */
2080 if (GET_CODE (dst
) == PARALLEL
)
2084 /* We can get a PARALLEL dst if there is a conditional expression in
2085 a return statement. In that case, the dst and src are the same,
2086 so no action is necessary. */
2087 if (rtx_equal_p (dst
, src
))
2090 /* It is unclear if we can ever reach here, but we may as well handle
2091 it. Allocate a temporary, and split this into a store/load to/from
2094 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2095 emit_group_store (temp
, src
, ssize
);
2096 emit_group_load (dst
, temp
, ssize
);
2099 else if (GET_CODE (dst
) != MEM
)
2101 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2102 /* Make life a bit easier for combine. */
2103 emit_move_insn (dst
, const0_rtx
);
2106 /* Process the pieces. */
2107 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2109 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2110 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2111 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2113 /* Handle trailing fragments that run over the size of the struct. */
2114 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2116 if (BYTES_BIG_ENDIAN
)
2118 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2119 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2120 tmps
[i
], 0, OPTAB_WIDEN
);
2122 bytelen
= ssize
- bytepos
;
2125 /* Optimize the access just a bit. */
2126 if (GET_CODE (dst
) == MEM
2127 && MEM_ALIGN (dst
) >= GET_MODE_ALIGNMENT (mode
)
2128 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2129 && bytelen
== GET_MODE_SIZE (mode
))
2130 emit_move_insn (adjust_address (dst
, mode
, bytepos
), tmps
[i
]);
2132 store_bit_field (dst
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2133 mode
, tmps
[i
], ssize
);
2138 /* Copy from the pseudo into the (probable) hard reg. */
2139 if (GET_CODE (dst
) == REG
)
2140 emit_move_insn (orig_dst
, dst
);
2143 /* Generate code to copy a BLKmode object of TYPE out of a
2144 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2145 is null, a stack temporary is created. TGTBLK is returned.
2147 The primary purpose of this routine is to handle functions
2148 that return BLKmode structures in registers. Some machines
2149 (the PA for example) want to return all small structures
2150 in registers regardless of the structure's alignment. */
2153 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2158 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2159 rtx src
= NULL
, dst
= NULL
;
2160 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2161 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2165 tgtblk
= assign_temp (build_qualified_type (type
,
2167 | TYPE_QUAL_CONST
)),
2169 preserve_temp_slots (tgtblk
);
2172 /* This code assumes srcreg is at least a full word. If it isn't,
2173 copy it into a new pseudo which is a full word. */
2174 if (GET_MODE (srcreg
) != BLKmode
2175 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2176 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2178 /* Structures whose size is not a multiple of a word are aligned
2179 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2180 machine, this means we must skip the empty high order bytes when
2181 calculating the bit offset. */
2182 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2183 big_endian_correction
2184 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2186 /* Copy the structure BITSIZE bites at a time.
2188 We could probably emit more efficient code for machines which do not use
2189 strict alignment, but it doesn't seem worth the effort at the current
2191 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2192 bitpos
< bytes
* BITS_PER_UNIT
;
2193 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2195 /* We need a new source operand each time xbitpos is on a
2196 word boundary and when xbitpos == big_endian_correction
2197 (the first time through). */
2198 if (xbitpos
% BITS_PER_WORD
== 0
2199 || xbitpos
== big_endian_correction
)
2200 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2203 /* We need a new destination operand each time bitpos is on
2205 if (bitpos
% BITS_PER_WORD
== 0)
2206 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2208 /* Use xbitpos for the source extraction (right justified) and
2209 xbitpos for the destination store (left justified). */
2210 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2211 extract_bit_field (src
, bitsize
,
2212 xbitpos
% BITS_PER_WORD
, 1,
2213 NULL_RTX
, word_mode
, word_mode
,
2221 /* Add a USE expression for REG to the (possibly empty) list pointed
2222 to by CALL_FUSAGE. REG must denote a hard register. */
2225 use_reg (call_fusage
, reg
)
2226 rtx
*call_fusage
, reg
;
2228 if (GET_CODE (reg
) != REG
2229 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2233 = gen_rtx_EXPR_LIST (VOIDmode
,
2234 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2237 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2238 starting at REGNO. All of these registers must be hard registers. */
2241 use_regs (call_fusage
, regno
, nregs
)
2248 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2251 for (i
= 0; i
< nregs
; i
++)
2252 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2255 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2256 PARALLEL REGS. This is for calls that pass values in multiple
2257 non-contiguous locations. The Irix 6 ABI has examples of this. */
2260 use_group_regs (call_fusage
, regs
)
2266 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2268 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2270 /* A NULL entry means the parameter goes both on the stack and in
2271 registers. This can also be a MEM for targets that pass values
2272 partially on the stack and partially in registers. */
2273 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2274 use_reg (call_fusage
, reg
);
2280 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2281 unsigned HOST_WIDE_INT len
;
2282 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2286 unsigned HOST_WIDE_INT max_size
, l
;
2287 HOST_WIDE_INT offset
= 0;
2288 enum machine_mode mode
, tmode
;
2289 enum insn_code icode
;
2293 if (! MOVE_BY_PIECES_P (len
, align
))
2296 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2297 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2298 align
= MOVE_MAX
* BITS_PER_UNIT
;
2300 /* We would first store what we can in the largest integer mode, then go to
2301 successively smaller modes. */
2304 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2309 max_size
= MOVE_MAX_PIECES
+ 1;
2310 while (max_size
> 1)
2312 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2313 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2314 if (GET_MODE_SIZE (tmode
) < max_size
)
2317 if (mode
== VOIDmode
)
2320 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2321 if (icode
!= CODE_FOR_nothing
2322 && align
>= GET_MODE_ALIGNMENT (mode
))
2324 unsigned int size
= GET_MODE_SIZE (mode
);
2331 cst
= (*constfun
) (constfundata
, offset
, mode
);
2332 if (!LEGITIMATE_CONSTANT_P (cst
))
2342 max_size
= GET_MODE_SIZE (mode
);
2345 /* The code above should have handled everything. */
2353 /* Generate several move instructions to store LEN bytes generated by
2354 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2355 pointer which will be passed as argument in every CONSTFUN call.
2356 ALIGN is maximum alignment we can assume. */
2359 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2361 unsigned HOST_WIDE_INT len
;
2362 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2366 struct store_by_pieces data
;
2368 if (! MOVE_BY_PIECES_P (len
, align
))
2370 to
= protect_from_queue (to
, 1);
2371 data
.constfun
= constfun
;
2372 data
.constfundata
= constfundata
;
2375 store_by_pieces_1 (&data
, align
);
2378 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2379 rtx with BLKmode). The caller must pass TO through protect_from_queue
2380 before calling. ALIGN is maximum alignment we can assume. */
2383 clear_by_pieces (to
, len
, align
)
2385 unsigned HOST_WIDE_INT len
;
2388 struct store_by_pieces data
;
2390 data
.constfun
= clear_by_pieces_1
;
2391 data
.constfundata
= NULL
;
2394 store_by_pieces_1 (&data
, align
);
2397 /* Callback routine for clear_by_pieces.
2398 Return const0_rtx unconditionally. */
2401 clear_by_pieces_1 (data
, offset
, mode
)
2402 PTR data ATTRIBUTE_UNUSED
;
2403 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2404 enum machine_mode mode ATTRIBUTE_UNUSED
;
2409 /* Subroutine of clear_by_pieces and store_by_pieces.
2410 Generate several move instructions to store LEN bytes of block TO. (A MEM
2411 rtx with BLKmode). The caller must pass TO through protect_from_queue
2412 before calling. ALIGN is maximum alignment we can assume. */
2415 store_by_pieces_1 (data
, align
)
2416 struct store_by_pieces
*data
;
2419 rtx to_addr
= XEXP (data
->to
, 0);
2420 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2421 enum machine_mode mode
= VOIDmode
, tmode
;
2422 enum insn_code icode
;
2425 data
->to_addr
= to_addr
;
2427 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2428 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2430 data
->explicit_inc_to
= 0;
2432 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2434 data
->offset
= data
->len
;
2436 /* If storing requires more than two move insns,
2437 copy addresses to registers (to make displacements shorter)
2438 and use post-increment if available. */
2439 if (!data
->autinc_to
2440 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2442 /* Determine the main mode we'll be using. */
2443 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2444 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2445 if (GET_MODE_SIZE (tmode
) < max_size
)
2448 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2450 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2451 data
->autinc_to
= 1;
2452 data
->explicit_inc_to
= -1;
2455 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2456 && ! data
->autinc_to
)
2458 data
->to_addr
= copy_addr_to_reg (to_addr
);
2459 data
->autinc_to
= 1;
2460 data
->explicit_inc_to
= 1;
2463 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2464 data
->to_addr
= copy_addr_to_reg (to_addr
);
2467 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2468 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2469 align
= MOVE_MAX
* BITS_PER_UNIT
;
2471 /* First store what we can in the largest integer mode, then go to
2472 successively smaller modes. */
2474 while (max_size
> 1)
2476 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2477 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2478 if (GET_MODE_SIZE (tmode
) < max_size
)
2481 if (mode
== VOIDmode
)
2484 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2485 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2486 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2488 max_size
= GET_MODE_SIZE (mode
);
2491 /* The code above should have handled everything. */
2496 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2497 with move instructions for mode MODE. GENFUN is the gen_... function
2498 to make a move insn for that mode. DATA has all the other info. */
2501 store_by_pieces_2 (genfun
, mode
, data
)
2502 rtx (*genfun
) PARAMS ((rtx
, ...));
2503 enum machine_mode mode
;
2504 struct store_by_pieces
*data
;
2506 unsigned int size
= GET_MODE_SIZE (mode
);
2509 while (data
->len
>= size
)
2512 data
->offset
-= size
;
2514 if (data
->autinc_to
)
2515 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2518 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2520 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2521 emit_insn (gen_add2_insn (data
->to_addr
,
2522 GEN_INT (-(HOST_WIDE_INT
) size
)));
2524 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2525 emit_insn ((*genfun
) (to1
, cst
));
2527 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2528 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2530 if (! data
->reverse
)
2531 data
->offset
+= size
;
2537 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2538 its length in bytes. */
2541 clear_storage (object
, size
)
2545 #ifdef TARGET_MEM_FUNCTIONS
2547 tree call_expr
, arg_list
;
2550 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2551 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2553 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2554 just move a zero. Otherwise, do this a piece at a time. */
2555 if (GET_MODE (object
) != BLKmode
2556 && GET_CODE (size
) == CONST_INT
2557 && GET_MODE_SIZE (GET_MODE (object
)) == (unsigned int) INTVAL (size
))
2558 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2561 object
= protect_from_queue (object
, 1);
2562 size
= protect_from_queue (size
, 0);
2564 if (GET_CODE (size
) == CONST_INT
2565 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2566 clear_by_pieces (object
, INTVAL (size
), align
);
2569 /* Try the most limited insn first, because there's no point
2570 including more than one in the machine description unless
2571 the more limited one has some advantage. */
2573 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2574 enum machine_mode mode
;
2576 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2577 mode
= GET_MODE_WIDER_MODE (mode
))
2579 enum insn_code code
= clrstr_optab
[(int) mode
];
2580 insn_operand_predicate_fn pred
;
2582 if (code
!= CODE_FOR_nothing
2583 /* We don't need MODE to be narrower than
2584 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2585 the mode mask, as it is returned by the macro, it will
2586 definitely be less than the actual mode mask. */
2587 && ((GET_CODE (size
) == CONST_INT
2588 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2589 <= (GET_MODE_MASK (mode
) >> 1)))
2590 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2591 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2592 || (*pred
) (object
, BLKmode
))
2593 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2594 || (*pred
) (opalign
, VOIDmode
)))
2597 rtx last
= get_last_insn ();
2600 op1
= convert_to_mode (mode
, size
, 1);
2601 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2602 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2603 op1
= copy_to_mode_reg (mode
, op1
);
2605 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2612 delete_insns_since (last
);
2616 /* OBJECT or SIZE may have been passed through protect_from_queue.
2618 It is unsafe to save the value generated by protect_from_queue
2619 and reuse it later. Consider what happens if emit_queue is
2620 called before the return value from protect_from_queue is used.
2622 Expansion of the CALL_EXPR below will call emit_queue before
2623 we are finished emitting RTL for argument setup. So if we are
2624 not careful we could get the wrong value for an argument.
2626 To avoid this problem we go ahead and emit code to copy OBJECT
2627 and SIZE into new pseudos. We can then place those new pseudos
2628 into an RTL_EXPR and use them later, even after a call to
2631 Note this is not strictly needed for library calls since they
2632 do not call emit_queue before loading their arguments. However,
2633 we may need to have library calls call emit_queue in the future
2634 since failing to do so could cause problems for targets which
2635 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2636 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2638 #ifdef TARGET_MEM_FUNCTIONS
2639 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2641 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2642 TREE_UNSIGNED (integer_type_node
));
2643 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 /* It is incorrect to use the libcall calling conventions to call
2648 memset in this context.
2650 This could be a user call to memset and the user may wish to
2651 examine the return value from memset.
2653 For targets where libcalls and normal calls have different
2654 conventions for returning pointers, we could end up generating
2657 So instead of using a libcall sequence we build up a suitable
2658 CALL_EXPR and expand the call in the normal fashion. */
2659 if (fn
== NULL_TREE
)
2663 /* This was copied from except.c, I don't know if all this is
2664 necessary in this context or not. */
2665 fn
= get_identifier ("memset");
2666 fntype
= build_pointer_type (void_type_node
);
2667 fntype
= build_function_type (fntype
, NULL_TREE
);
2668 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2669 ggc_add_tree_root (&fn
, 1);
2670 DECL_EXTERNAL (fn
) = 1;
2671 TREE_PUBLIC (fn
) = 1;
2672 DECL_ARTIFICIAL (fn
) = 1;
2673 TREE_NOTHROW (fn
) = 1;
2674 make_decl_rtl (fn
, NULL
);
2675 assemble_external (fn
);
2678 /* We need to make an argument list for the function call.
2680 memset has three arguments, the first is a void * addresses, the
2681 second an integer with the initialization value, the last is a
2682 size_t byte count for the copy. */
2684 = build_tree_list (NULL_TREE
,
2685 make_tree (build_pointer_type (void_type_node
),
2687 TREE_CHAIN (arg_list
)
2688 = build_tree_list (NULL_TREE
,
2689 make_tree (integer_type_node
, const0_rtx
));
2690 TREE_CHAIN (TREE_CHAIN (arg_list
))
2691 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2692 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2694 /* Now we have to build up the CALL_EXPR itself. */
2695 call_expr
= build1 (ADDR_EXPR
,
2696 build_pointer_type (TREE_TYPE (fn
)), fn
);
2697 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2698 call_expr
, arg_list
, NULL_TREE
);
2699 TREE_SIDE_EFFECTS (call_expr
) = 1;
2701 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2703 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2704 VOIDmode
, 2, object
, Pmode
, size
,
2705 TYPE_MODE (integer_type_node
));
2708 /* If we are initializing a readonly value, show the above call
2709 clobbered it. Otherwise, a load from it may erroneously be
2710 hoisted from a loop. */
2711 if (RTX_UNCHANGING_P (object
))
2712 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2719 /* Generate code to copy Y into X.
2720 Both Y and X must have the same mode, except that
2721 Y can be a constant with VOIDmode.
2722 This mode cannot be BLKmode; use emit_block_move for that.
2724 Return the last instruction emitted. */
2727 emit_move_insn (x
, y
)
2730 enum machine_mode mode
= GET_MODE (x
);
2731 rtx y_cst
= NULL_RTX
;
2734 x
= protect_from_queue (x
, 1);
2735 y
= protect_from_queue (y
, 0);
2737 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2740 /* Never force constant_p_rtx to memory. */
2741 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2743 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2746 y
= force_const_mem (mode
, y
);
2749 /* If X or Y are memory references, verify that their addresses are valid
2751 if (GET_CODE (x
) == MEM
2752 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2753 && ! push_operand (x
, GET_MODE (x
)))
2755 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2756 x
= validize_mem (x
);
2758 if (GET_CODE (y
) == MEM
2759 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2761 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2762 y
= validize_mem (y
);
2764 if (mode
== BLKmode
)
2767 last_insn
= emit_move_insn_1 (x
, y
);
2769 if (y_cst
&& GET_CODE (x
) == REG
)
2770 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2780 emit_move_insn_1 (x
, y
)
2783 enum machine_mode mode
= GET_MODE (x
);
2784 enum machine_mode submode
;
2785 enum mode_class
class = GET_MODE_CLASS (mode
);
2788 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2791 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2793 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2797 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT
: MODE_FLOAT
),
2802 && (mov_optab
->handlers
[(int) submode
].insn_code
2803 != CODE_FOR_nothing
))
2805 /* Don't split destination if it is a stack push. */
2806 int stack
= push_operand (x
, GET_MODE (x
));
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode
)) != GET_MODE_SIZE (submode
))
2815 int offset1
, offset2
;
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp
= expand_binop (Pmode
,
2820 #ifdef STACK_GROWS_DOWNWARD
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))),
2831 if (temp
!= stack_pointer_rtx
)
2832 emit_move_insn (stack_pointer_rtx
, temp
);
2833 #ifdef STACK_GROWS_DOWNWARD
2835 offset2
= GET_MODE_SIZE (submode
);
2837 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2838 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2839 + GET_MODE_SIZE (submode
));
2841 emit_move_insn (change_address (x
, submode
,
2842 gen_rtx_PLUS (Pmode
,
2844 GEN_INT (offset1
))),
2845 gen_realpart (submode
, y
));
2846 emit_move_insn (change_address (x
, submode
,
2847 gen_rtx_PLUS (Pmode
,
2849 GEN_INT (offset2
))),
2850 gen_imagpart (submode
, y
));
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2865 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2866 gen_imagpart (submode
, y
)));
2867 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2868 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2869 gen_realpart (submode
, y
)));
2871 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2872 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2873 gen_realpart (submode
, y
)));
2874 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2875 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2876 gen_imagpart (submode
, y
)));
2881 rtx realpart_x
, realpart_y
;
2882 rtx imagpart_x
, imagpart_y
;
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2894 && (reload_in_progress
| reload_completed
) == 0)
2896 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2897 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2899 if (packed_dest_p
|| packed_src_p
)
2901 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2902 ? MODE_FLOAT
: MODE_INT
);
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2907 if (reg_mode
!= BLKmode
)
2909 rtx mem
= assign_stack_temp (reg_mode
,
2910 GET_MODE_SIZE (mode
), 0);
2911 rtx cmem
= adjust_address (mem
, mode
, 0);
2914 = N_("function using short complex types cannot be inline");
2918 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2919 emit_move_insn_1 (cmem
, y
);
2920 return emit_move_insn_1 (sreg
, mem
);
2924 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2925 emit_move_insn_1 (mem
, sreg
);
2926 return emit_move_insn_1 (x
, cmem
);
2932 realpart_x
= gen_realpart (submode
, x
);
2933 realpart_y
= gen_realpart (submode
, y
);
2934 imagpart_x
= gen_imagpart (submode
, x
);
2935 imagpart_y
= gen_imagpart (submode
, y
);
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2942 && ! (reload_in_progress
|| reload_completed
)
2943 && (GET_CODE (realpart_x
) == SUBREG
2944 || GET_CODE (imagpart_x
) == SUBREG
))
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2949 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2950 (realpart_x
, realpart_y
));
2951 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2952 (imagpart_x
, imagpart_y
));
2955 return get_last_insn ();
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2967 #ifdef PUSH_ROUNDING
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x
, GET_MODE (x
)))
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp
= expand_binop (Pmode
,
2979 #ifdef STACK_GROWS_DOWNWARD
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))),
2990 if (temp
!= stack_pointer_rtx
)
2991 emit_move_insn (stack_pointer_rtx
, temp
);
2993 code
= GET_CODE (XEXP (x
, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code
== POST_INC
)
2996 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
2997 GEN_INT (-(HOST_WIDE_INT
)
2998 GET_MODE_SIZE (GET_MODE (x
))));
2999 else if (code
== POST_DEC
)
3000 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3003 temp
= stack_pointer_rtx
;
3005 x
= change_address (x
, VOIDmode
, temp
);
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress
&& GET_CODE (x
) == MEM
3012 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3013 x
= replace_equiv_address_nv (x
, inner
);
3014 if (reload_in_progress
&& GET_CODE (y
) == MEM
3015 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3016 y
= replace_equiv_address_nv (y
, inner
);
3022 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3025 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3026 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart
== 0 && CONSTANT_P (y
))
3033 y
= force_const_mem (mode
, y
);
3034 ypart
= operand_subword (y
, i
, 1, mode
);
3036 else if (ypart
== 0)
3037 ypart
= operand_subword_force (y
, i
, mode
);
3039 if (xpart
== 0 || ypart
== 0)
3042 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3044 last_insn
= emit_move_insn (xpart
, ypart
);
3047 seq
= gen_sequence ();
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3055 && ! (reload_in_progress
|| reload_completed
)
3056 && need_clobber
!= 0)
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3069 /* Pushing data onto the stack. */
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3081 push_block (size
, extra
, below
)
3087 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3088 if (CONSTANT_P (size
))
3089 anti_adjust_stack (plus_constant (size
, extra
));
3090 else if (GET_CODE (size
) == REG
&& extra
== 0)
3091 anti_adjust_stack (size
);
3094 temp
= copy_to_mode_reg (Pmode
, size
);
3096 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3097 temp
, 0, OPTAB_LIB_WIDEN
);
3098 anti_adjust_stack (temp
);
3101 #ifndef STACK_GROWS_DOWNWARD
3107 temp
= virtual_outgoing_args_rtx
;
3108 if (extra
!= 0 && below
)
3109 temp
= plus_constant (temp
, extra
);
3113 if (GET_CODE (size
) == CONST_INT
)
3114 temp
= plus_constant (virtual_outgoing_args_rtx
,
3115 -INTVAL (size
) - (below
? 0 : extra
));
3116 else if (extra
!= 0 && !below
)
3117 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3118 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3120 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3121 negate_rtx (Pmode
, size
));
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3132 get_push_address (size
)
3137 if (STACK_PUSH_CODE
== POST_DEC
)
3138 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3139 else if (STACK_PUSH_CODE
== POST_INC
)
3140 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3142 temp
= stack_pointer_rtx
;
3144 return copy_to_reg (temp
);
3147 #ifdef PUSH_ROUNDING
3149 /* Emit single push insn. */
3152 emit_single_push_insn (mode
, x
, type
)
3154 enum machine_mode mode
;
3158 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3160 enum insn_code icode
;
3161 insn_operand_predicate_fn pred
;
3163 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3167 if (icode
!= CODE_FOR_nothing
)
3169 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3170 && !((*pred
) (x
, mode
))))
3171 x
= force_reg (mode
, x
);
3172 emit_insn (GEN_FCN (icode
) (x
));
3175 if (GET_MODE_SIZE (mode
) == rounded_size
)
3176 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3181 GEN_INT (-(HOST_WIDE_INT
)rounded_size
));
3183 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3184 GEN_INT (rounded_size
));
3186 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3189 dest
= gen_rtx_MEM (mode
, dest_addr
);
3193 set_mem_attributes (dest
, type
, 1);
3195 if (flag_optimize_sibling_calls
)
3196 /* Function incoming arguments may overlap with sibling call
3197 outgoing arguments and we cannot allow reordering of reads
3198 from function arguments with stores to outgoing arguments
3199 of sibling calls. */
3200 set_mem_alias_set (dest
, 0);
3202 emit_move_insn (dest
, x
);
3206 /* Generate code to push X onto the stack, assuming it has mode MODE and
3208 MODE is redundant except when X is a CONST_INT (since they don't
3210 SIZE is an rtx for the size of data to be copied (in bytes),
3211 needed only if X is BLKmode.
3213 ALIGN (in bits) is maximum alignment we can assume.
3215 If PARTIAL and REG are both nonzero, then copy that many of the first
3216 words of X into registers starting with REG, and push the rest of X.
3217 The amount of space pushed is decreased by PARTIAL words,
3218 rounded *down* to a multiple of PARM_BOUNDARY.
3219 REG must be a hard register in this case.
3220 If REG is zero but PARTIAL is not, take any all others actions for an
3221 argument partially in registers, but do not actually load any
3224 EXTRA is the amount in bytes of extra space to leave next to this arg.
3225 This is ignored if an argument block has already been allocated.
3227 On a machine that lacks real push insns, ARGS_ADDR is the address of
3228 the bottom of the argument block for this call. We use indexing off there
3229 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3230 argument block has not been preallocated.
3232 ARGS_SO_FAR is the size of args previously pushed for this call.
3234 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3235 for arguments passed in registers. If nonzero, it will be the number
3236 of bytes required. */
3239 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3240 args_addr
, args_so_far
, reg_parm_stack_space
,
3243 enum machine_mode mode
;
3252 int reg_parm_stack_space
;
3256 enum direction stack_direction
3257 #ifdef STACK_GROWS_DOWNWARD
3263 /* Decide where to pad the argument: `downward' for below,
3264 `upward' for above, or `none' for don't pad it.
3265 Default is below for small data on big-endian machines; else above. */
3266 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3268 /* Invert direction if stack is post-decrement.
3270 if (STACK_PUSH_CODE
== POST_DEC
)
3271 if (where_pad
!= none
)
3272 where_pad
= (where_pad
== downward
? upward
: downward
);
3274 xinner
= x
= protect_from_queue (x
, 0);
3276 if (mode
== BLKmode
)
3278 /* Copy a block into the stack, entirely or partially. */
3281 int used
= partial
* UNITS_PER_WORD
;
3282 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3290 /* USED is now the # of bytes we need not copy to the stack
3291 because registers will take care of them. */
3294 xinner
= adjust_address (xinner
, BLKmode
, used
);
3296 /* If the partial register-part of the arg counts in its stack size,
3297 skip the part of stack space corresponding to the registers.
3298 Otherwise, start copying to the beginning of the stack space,
3299 by setting SKIP to 0. */
3300 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3302 #ifdef PUSH_ROUNDING
3303 /* Do it with several push insns if that doesn't take lots of insns
3304 and if there is no difficulty with push insns that skip bytes
3305 on the stack for alignment purposes. */
3308 && GET_CODE (size
) == CONST_INT
3310 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3311 /* Here we avoid the case of a structure whose weak alignment
3312 forces many pushes of a small amount of data,
3313 and such small pushes do rounding that causes trouble. */
3314 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3315 || align
>= BIGGEST_ALIGNMENT
3316 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3317 == (align
/ BITS_PER_UNIT
)))
3318 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3320 /* Push padding now if padding above and stack grows down,
3321 or if padding below and stack grows up.
3322 But if space already allocated, this has already been done. */
3323 if (extra
&& args_addr
== 0
3324 && where_pad
!= none
&& where_pad
!= stack_direction
)
3325 anti_adjust_stack (GEN_INT (extra
));
3327 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
);
3329 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3333 in_check_memory_usage
= 1;
3334 temp
= get_push_address (INTVAL (size
) - used
);
3335 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3336 emit_library_call (chkr_copy_bitmap_libfunc
,
3337 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3338 Pmode
, XEXP (xinner
, 0), Pmode
,
3339 GEN_INT (INTVAL (size
) - used
),
3340 TYPE_MODE (sizetype
));
3342 emit_library_call (chkr_set_right_libfunc
,
3343 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3344 Pmode
, GEN_INT (INTVAL (size
) - used
),
3345 TYPE_MODE (sizetype
),
3346 GEN_INT (MEMORY_USE_RW
),
3347 TYPE_MODE (integer_type_node
));
3348 in_check_memory_usage
= 0;
3352 #endif /* PUSH_ROUNDING */
3356 /* Otherwise make space on the stack and copy the data
3357 to the address of that space. */
3359 /* Deduct words put into registers from the size we must copy. */
3362 if (GET_CODE (size
) == CONST_INT
)
3363 size
= GEN_INT (INTVAL (size
) - used
);
3365 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3366 GEN_INT (used
), NULL_RTX
, 0,
3370 /* Get the address of the stack space.
3371 In this case, we do not deal with EXTRA separately.
3372 A single stack adjust will do. */
3375 temp
= push_block (size
, extra
, where_pad
== downward
);
3378 else if (GET_CODE (args_so_far
) == CONST_INT
)
3379 temp
= memory_address (BLKmode
,
3380 plus_constant (args_addr
,
3381 skip
+ INTVAL (args_so_far
)));
3383 temp
= memory_address (BLKmode
,
3384 plus_constant (gen_rtx_PLUS (Pmode
,
3388 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3390 in_check_memory_usage
= 1;
3391 target
= copy_to_reg (temp
);
3392 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3393 emit_library_call (chkr_copy_bitmap_libfunc
,
3394 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3396 XEXP (xinner
, 0), Pmode
,
3397 size
, TYPE_MODE (sizetype
));
3399 emit_library_call (chkr_set_right_libfunc
,
3400 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3402 size
, TYPE_MODE (sizetype
),
3403 GEN_INT (MEMORY_USE_RW
),
3404 TYPE_MODE (integer_type_node
));
3405 in_check_memory_usage
= 0;
3408 target
= gen_rtx_MEM (BLKmode
, temp
);
3412 set_mem_attributes (target
, type
, 1);
3413 /* Function incoming arguments may overlap with sibling call
3414 outgoing arguments and we cannot allow reordering of reads
3415 from function arguments with stores to outgoing arguments
3416 of sibling calls. */
3417 set_mem_alias_set (target
, 0);
3420 set_mem_align (target
, align
);
3422 /* TEMP is the address of the block. Copy the data there. */
3423 if (GET_CODE (size
) == CONST_INT
3424 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3426 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3431 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3432 enum machine_mode mode
;
3434 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3436 mode
= GET_MODE_WIDER_MODE (mode
))
3438 enum insn_code code
= movstr_optab
[(int) mode
];
3439 insn_operand_predicate_fn pred
;
3441 if (code
!= CODE_FOR_nothing
3442 && ((GET_CODE (size
) == CONST_INT
3443 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3444 <= (GET_MODE_MASK (mode
) >> 1)))
3445 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3446 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3447 || ((*pred
) (target
, BLKmode
)))
3448 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3449 || ((*pred
) (xinner
, BLKmode
)))
3450 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3451 || ((*pred
) (opalign
, VOIDmode
))))
3453 rtx op2
= convert_to_mode (mode
, size
, 1);
3454 rtx last
= get_last_insn ();
3457 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3458 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3459 op2
= copy_to_mode_reg (mode
, op2
);
3461 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3469 delete_insns_since (last
);
3474 if (!ACCUMULATE_OUTGOING_ARGS
)
3476 /* If the source is referenced relative to the stack pointer,
3477 copy it to another register to stabilize it. We do not need
3478 to do this if we know that we won't be changing sp. */
3480 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3481 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3482 temp
= copy_to_reg (temp
);
3485 /* Make inhibit_defer_pop nonzero around the library call
3486 to force it to pop the bcopy-arguments right away. */
3488 #ifdef TARGET_MEM_FUNCTIONS
3489 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3490 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3491 convert_to_mode (TYPE_MODE (sizetype
),
3492 size
, TREE_UNSIGNED (sizetype
)),
3493 TYPE_MODE (sizetype
));
3495 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3496 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3497 convert_to_mode (TYPE_MODE (integer_type_node
),
3499 TREE_UNSIGNED (integer_type_node
)),
3500 TYPE_MODE (integer_type_node
));
3505 else if (partial
> 0)
3507 /* Scalar partly in registers. */
3509 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3512 /* # words of start of argument
3513 that we must make space for but need not store. */
3514 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3515 int args_offset
= INTVAL (args_so_far
);
3518 /* Push padding now if padding above and stack grows down,
3519 or if padding below and stack grows up.
3520 But if space already allocated, this has already been done. */
3521 if (extra
&& args_addr
== 0
3522 && where_pad
!= none
&& where_pad
!= stack_direction
)
3523 anti_adjust_stack (GEN_INT (extra
));
3525 /* If we make space by pushing it, we might as well push
3526 the real data. Otherwise, we can leave OFFSET nonzero
3527 and leave the space uninitialized. */
3531 /* Now NOT_STACK gets the number of words that we don't need to
3532 allocate on the stack. */
3533 not_stack
= partial
- offset
;
3535 /* If the partial register-part of the arg counts in its stack size,
3536 skip the part of stack space corresponding to the registers.
3537 Otherwise, start copying to the beginning of the stack space,
3538 by setting SKIP to 0. */
3539 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3541 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3542 x
= validize_mem (force_const_mem (mode
, x
));
3544 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3545 SUBREGs of such registers are not allowed. */
3546 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3547 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3548 x
= copy_to_reg (x
);
3550 /* Loop over all the words allocated on the stack for this arg. */
3551 /* We can do it by words, because any scalar bigger than a word
3552 has a size a multiple of a word. */
3553 #ifndef PUSH_ARGS_REVERSED
3554 for (i
= not_stack
; i
< size
; i
++)
3556 for (i
= size
- 1; i
>= not_stack
; i
--)
3558 if (i
>= not_stack
+ offset
)
3559 emit_push_insn (operand_subword_force (x
, i
, mode
),
3560 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3562 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3564 reg_parm_stack_space
, alignment_pad
);
3569 rtx target
= NULL_RTX
;
3572 /* Push padding now if padding above and stack grows down,
3573 or if padding below and stack grows up.
3574 But if space already allocated, this has already been done. */
3575 if (extra
&& args_addr
== 0
3576 && where_pad
!= none
&& where_pad
!= stack_direction
)
3577 anti_adjust_stack (GEN_INT (extra
));
3579 #ifdef PUSH_ROUNDING
3580 if (args_addr
== 0 && PUSH_ARGS
)
3581 emit_single_push_insn (mode
, x
, type
);
3585 if (GET_CODE (args_so_far
) == CONST_INT
)
3587 = memory_address (mode
,
3588 plus_constant (args_addr
,
3589 INTVAL (args_so_far
)));
3591 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3594 dest
= gen_rtx_MEM (mode
, addr
);
3597 set_mem_attributes (dest
, type
, 1);
3598 /* Function incoming arguments may overlap with sibling call
3599 outgoing arguments and we cannot allow reordering of reads
3600 from function arguments with stores to outgoing arguments
3601 of sibling calls. */
3602 set_mem_alias_set (dest
, 0);
3605 emit_move_insn (dest
, x
);
3608 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3610 in_check_memory_usage
= 1;
3612 target
= get_push_address (GET_MODE_SIZE (mode
));
3614 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3615 emit_library_call (chkr_copy_bitmap_libfunc
,
3616 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3617 Pmode
, XEXP (x
, 0), Pmode
,
3618 GEN_INT (GET_MODE_SIZE (mode
)),
3619 TYPE_MODE (sizetype
));
3621 emit_library_call (chkr_set_right_libfunc
,
3622 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3623 Pmode
, GEN_INT (GET_MODE_SIZE (mode
)),
3624 TYPE_MODE (sizetype
),
3625 GEN_INT (MEMORY_USE_RW
),
3626 TYPE_MODE (integer_type_node
));
3627 in_check_memory_usage
= 0;
3632 /* If part should go in registers, copy that part
3633 into the appropriate registers. Do this now, at the end,
3634 since mem-to-mem copies above may do function calls. */
3635 if (partial
> 0 && reg
!= 0)
3637 /* Handle calls that pass values in multiple non-contiguous locations.
3638 The Irix 6 ABI has examples of this. */
3639 if (GET_CODE (reg
) == PARALLEL
)
3640 emit_group_load (reg
, x
, -1); /* ??? size? */
3642 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3645 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3646 anti_adjust_stack (GEN_INT (extra
));
3648 if (alignment_pad
&& args_addr
== 0)
3649 anti_adjust_stack (alignment_pad
);
3652 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3660 /* Only registers can be subtargets. */
3661 || GET_CODE (x
) != REG
3662 /* If the register is readonly, it can't be set more than once. */
3663 || RTX_UNCHANGING_P (x
)
3664 /* Don't use hard regs to avoid extending their life. */
3665 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3666 /* Avoid subtargets inside loops,
3667 since they hide some invariant expressions. */
3668 || preserve_subexpressions_p ())
3672 /* Expand an assignment that stores the value of FROM into TO.
3673 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3674 (This may contain a QUEUED rtx;
3675 if the value is constant, this rtx is a constant.)
3676 Otherwise, the returned value is NULL_RTX.
3678 SUGGEST_REG is no longer actually used.
3679 It used to mean, copy the value through a register
3680 and return that register, if that is possible.
3681 We now use WANT_VALUE to decide whether to do this. */
3684 expand_assignment (to
, from
, want_value
, suggest_reg
)
3687 int suggest_reg ATTRIBUTE_UNUSED
;
3692 /* Don't crash if the lhs of the assignment was erroneous. */
3694 if (TREE_CODE (to
) == ERROR_MARK
)
3696 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3697 return want_value
? result
: NULL_RTX
;
3700 /* Assignment of a structure component needs special treatment
3701 if the structure component's rtx is not simply a MEM.
3702 Assignment of an array element at a constant index, and assignment of
3703 an array element in an unaligned packed structure field, has the same
3706 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3707 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
)
3709 enum machine_mode mode1
;
3710 HOST_WIDE_INT bitsize
, bitpos
;
3718 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3719 &unsignedp
, &volatilep
);
3721 /* If we are going to use store_bit_field and extract_bit_field,
3722 make sure to_rtx will be safe for multiple use. */
3724 if (mode1
== VOIDmode
&& want_value
)
3725 tem
= stabilize_reference (tem
);
3727 orig_to_rtx
= to_rtx
3728 = expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3731 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3733 if (GET_CODE (to_rtx
) != MEM
)
3736 if (GET_MODE (offset_rtx
) != ptr_mode
)
3737 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3739 #ifdef POINTERS_EXTEND_UNSIGNED
3740 if (GET_MODE (offset_rtx
) != Pmode
)
3741 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
3744 /* A constant address in TO_RTX can have VOIDmode, we must not try
3745 to call force_reg for that case. Avoid that case. */
3746 if (GET_CODE (to_rtx
) == MEM
3747 && GET_MODE (to_rtx
) == BLKmode
3748 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3750 && (bitpos
% bitsize
) == 0
3751 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3752 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3755 = adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3757 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3760 to_rtx
= (replace_equiv_address
3761 (to_rtx
, force_reg (GET_MODE (XEXP (temp
, 0)),
3766 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3767 highest_pow2_factor (offset
));
3770 if (GET_CODE (to_rtx
) == MEM
)
3772 tree old_expr
= MEM_EXPR (to_rtx
);
3774 /* If the field is at offset zero, we could have been given the
3775 DECL_RTX of the parent struct. Don't munge it. */
3776 to_rtx
= shallow_copy_rtx (to_rtx
);
3778 set_mem_attributes (to_rtx
, to
, 0);
3780 /* If we changed MEM_EXPR, that means we're now referencing
3781 the COMPONENT_REF, which means that MEM_OFFSET must be
3782 relative to that field. But we've not yet reflected BITPOS
3783 in TO_RTX. This will be done in store_field. Adjust for
3784 that by biasing MEM_OFFSET by -bitpos. */
3785 if (MEM_EXPR (to_rtx
) != old_expr
&& MEM_OFFSET (to_rtx
)
3786 && (bitpos
/ BITS_PER_UNIT
) != 0)
3787 set_mem_offset (to_rtx
, GEN_INT (INTVAL (MEM_OFFSET (to_rtx
))
3788 - (bitpos
/ BITS_PER_UNIT
)));
3791 /* Deal with volatile and readonly fields. The former is only done
3792 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3793 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3795 if (to_rtx
== orig_to_rtx
)
3796 to_rtx
= copy_rtx (to_rtx
);
3797 MEM_VOLATILE_P (to_rtx
) = 1;
3800 if (TREE_CODE (to
) == COMPONENT_REF
3801 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3803 if (to_rtx
== orig_to_rtx
)
3804 to_rtx
= copy_rtx (to_rtx
);
3805 RTX_UNCHANGING_P (to_rtx
) = 1;
3808 if (! can_address_p (to
))
3810 if (to_rtx
== orig_to_rtx
)
3811 to_rtx
= copy_rtx (to_rtx
);
3812 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3815 /* Check the access. */
3816 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3821 enum machine_mode best_mode
;
3823 best_mode
= get_best_mode (bitsize
, bitpos
,
3824 TYPE_ALIGN (TREE_TYPE (tem
)),
3826 if (best_mode
== VOIDmode
)
3829 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3830 to_addr
= plus_constant (XEXP (to_rtx
, 0), bitpos
/ BITS_PER_UNIT
);
3831 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3832 size
*= GET_MODE_SIZE (best_mode
);
3834 /* Check the access right of the pointer. */
3835 in_check_memory_usage
= 1;
3837 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
3838 VOIDmode
, 3, to_addr
, Pmode
,
3839 GEN_INT (size
), TYPE_MODE (sizetype
),
3840 GEN_INT (MEMORY_USE_WO
),
3841 TYPE_MODE (integer_type_node
));
3842 in_check_memory_usage
= 0;
3845 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3847 /* Spurious cast for HPUX compiler. */
3848 ? ((enum machine_mode
)
3849 TYPE_MODE (TREE_TYPE (to
)))
3851 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3853 preserve_temp_slots (result
);
3857 /* If the value is meaningful, convert RESULT to the proper mode.
3858 Otherwise, return nothing. */
3859 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3860 TYPE_MODE (TREE_TYPE (from
)),
3862 TREE_UNSIGNED (TREE_TYPE (to
)))
3866 /* If the rhs is a function call and its value is not an aggregate,
3867 call the function before we start to compute the lhs.
3868 This is needed for correct code for cases such as
3869 val = setjmp (buf) on machines where reference to val
3870 requires loading up part of an address in a separate insn.
3872 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3873 since it might be a promoted variable where the zero- or sign- extension
3874 needs to be done. Handling this in the normal way is safe because no
3875 computation is done before the call. */
3876 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3877 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3878 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3879 && GET_CODE (DECL_RTL (to
)) == REG
))
3884 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3886 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3888 /* Handle calls that return values in multiple non-contiguous locations.
3889 The Irix 6 ABI has examples of this. */
3890 if (GET_CODE (to_rtx
) == PARALLEL
)
3891 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)));
3892 else if (GET_MODE (to_rtx
) == BLKmode
)
3893 emit_block_move (to_rtx
, value
, expr_size (from
));
3896 #ifdef POINTERS_EXTEND_UNSIGNED
3897 if (POINTER_TYPE_P (TREE_TYPE (to
))
3898 && GET_MODE (to_rtx
) != GET_MODE (value
))
3899 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3901 emit_move_insn (to_rtx
, value
);
3903 preserve_temp_slots (to_rtx
);
3906 return want_value
? to_rtx
: NULL_RTX
;
3909 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3910 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3913 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3915 /* Don't move directly into a return register. */
3916 if (TREE_CODE (to
) == RESULT_DECL
3917 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3922 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3924 if (GET_CODE (to_rtx
) == PARALLEL
)
3925 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)));
3927 emit_move_insn (to_rtx
, temp
);
3929 preserve_temp_slots (to_rtx
);
3932 return want_value
? to_rtx
: NULL_RTX
;
3935 /* In case we are returning the contents of an object which overlaps
3936 the place the value is being stored, use a safe function when copying
3937 a value through a pointer into a structure value return block. */
3938 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3939 && current_function_returns_struct
3940 && !current_function_returns_pcc_struct
)
3945 size
= expr_size (from
);
3946 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3947 EXPAND_MEMORY_USE_DONT
);
3949 /* Copy the rights of the bitmap. */
3950 if (current_function_check_memory_usage
)
3951 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
3952 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3953 XEXP (from_rtx
, 0), Pmode
,
3954 convert_to_mode (TYPE_MODE (sizetype
),
3955 size
, TREE_UNSIGNED (sizetype
)),
3956 TYPE_MODE (sizetype
));
3958 #ifdef TARGET_MEM_FUNCTIONS
3959 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3960 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3961 XEXP (from_rtx
, 0), Pmode
,
3962 convert_to_mode (TYPE_MODE (sizetype
),
3963 size
, TREE_UNSIGNED (sizetype
)),
3964 TYPE_MODE (sizetype
));
3966 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3967 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3968 XEXP (to_rtx
, 0), Pmode
,
3969 convert_to_mode (TYPE_MODE (integer_type_node
),
3970 size
, TREE_UNSIGNED (integer_type_node
)),
3971 TYPE_MODE (integer_type_node
));
3974 preserve_temp_slots (to_rtx
);
3977 return want_value
? to_rtx
: NULL_RTX
;
3980 /* Compute FROM and store the value in the rtx we got. */
3983 result
= store_expr (from
, to_rtx
, want_value
);
3984 preserve_temp_slots (result
);
3987 return want_value
? result
: NULL_RTX
;
3990 /* Generate code for computing expression EXP,
3991 and storing the value into TARGET.
3992 TARGET may contain a QUEUED rtx.
3994 If WANT_VALUE is nonzero, return a copy of the value
3995 not in TARGET, so that we can be sure to use the proper
3996 value in a containing expression even if TARGET has something
3997 else stored in it. If possible, we copy the value through a pseudo
3998 and return that pseudo. Or, if the value is constant, we try to
3999 return the constant. In some cases, we return a pseudo
4000 copied *from* TARGET.
4002 If the mode is BLKmode then we may return TARGET itself.
4003 It turns out that in BLKmode it doesn't cause a problem.
4004 because C has no operators that could combine two different
4005 assignments into the same BLKmode object with different values
4006 with no sequence point. Will other languages need this to
4009 If WANT_VALUE is 0, we return NULL, to make sure
4010 to catch quickly any cases where the caller uses the value
4011 and fails to set WANT_VALUE. */
4014 store_expr (exp
, target
, want_value
)
4020 int dont_return_target
= 0;
4021 int dont_store_target
= 0;
4023 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4025 /* Perform first part of compound expression, then assign from second
4027 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4029 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4031 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4033 /* For conditional expression, get safe form of the target. Then
4034 test the condition, doing the appropriate assignment on either
4035 side. This avoids the creation of unnecessary temporaries.
4036 For non-BLKmode, it is more efficient not to do this. */
4038 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4041 target
= protect_from_queue (target
, 1);
4043 do_pending_stack_adjust ();
4045 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4046 start_cleanup_deferral ();
4047 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
4048 end_cleanup_deferral ();
4050 emit_jump_insn (gen_jump (lab2
));
4053 start_cleanup_deferral ();
4054 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
4055 end_cleanup_deferral ();
4060 return want_value
? target
: NULL_RTX
;
4062 else if (queued_subexp_p (target
))
4063 /* If target contains a postincrement, let's not risk
4064 using it as the place to generate the rhs. */
4066 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4068 /* Expand EXP into a new pseudo. */
4069 temp
= gen_reg_rtx (GET_MODE (target
));
4070 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
4073 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
4075 /* If target is volatile, ANSI requires accessing the value
4076 *from* the target, if it is accessed. So make that happen.
4077 In no case return the target itself. */
4078 if (! MEM_VOLATILE_P (target
) && want_value
)
4079 dont_return_target
= 1;
4081 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
4082 && GET_MODE (target
) != BLKmode
)
4083 /* If target is in memory and caller wants value in a register instead,
4084 arrange that. Pass TARGET as target for expand_expr so that,
4085 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4086 We know expand_expr will not use the target in that case.
4087 Don't do this if TARGET is volatile because we are supposed
4088 to write it and then read it. */
4090 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4091 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4093 /* If TEMP is already in the desired TARGET, only copy it from
4094 memory and don't store it there again. */
4096 || (rtx_equal_p (temp
, target
)
4097 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4098 dont_store_target
= 1;
4099 temp
= copy_to_reg (temp
);
4101 dont_return_target
= 1;
4103 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4104 /* If this is an scalar in a register that is stored in a wider mode
4105 than the declared mode, compute the result into its declared mode
4106 and then convert to the wider mode. Our value is the computed
4109 /* If we don't want a value, we can do the conversion inside EXP,
4110 which will often result in some optimizations. Do the conversion
4111 in two steps: first change the signedness, if needed, then
4112 the extend. But don't do this if the type of EXP is a subtype
4113 of something else since then the conversion might involve
4114 more than just converting modes. */
4115 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4116 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4118 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4119 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4122 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
4126 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
4127 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4131 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4133 /* If TEMP is a volatile MEM and we want a result value, make
4134 the access now so it gets done only once. Likewise if
4135 it contains TARGET. */
4136 if (GET_CODE (temp
) == MEM
&& want_value
4137 && (MEM_VOLATILE_P (temp
)
4138 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4139 temp
= copy_to_reg (temp
);
4141 /* If TEMP is a VOIDmode constant, use convert_modes to make
4142 sure that we properly convert it. */
4143 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4145 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4146 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4147 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4148 GET_MODE (target
), temp
,
4149 SUBREG_PROMOTED_UNSIGNED_P (target
));
4152 convert_move (SUBREG_REG (target
), temp
,
4153 SUBREG_PROMOTED_UNSIGNED_P (target
));
4155 /* If we promoted a constant, change the mode back down to match
4156 target. Otherwise, the caller might get confused by a result whose
4157 mode is larger than expected. */
4159 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
4160 && GET_MODE (temp
) != VOIDmode
)
4162 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4163 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4164 SUBREG_PROMOTED_UNSIGNED_P (temp
)
4165 = SUBREG_PROMOTED_UNSIGNED_P (target
);
4168 return want_value
? temp
: NULL_RTX
;
4172 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4173 /* Return TARGET if it's a specified hardware register.
4174 If TARGET is a volatile mem ref, either return TARGET
4175 or return a reg copied *from* TARGET; ANSI requires this.
4177 Otherwise, if TEMP is not TARGET, return TEMP
4178 if it is constant (for efficiency),
4179 or if we really want the correct value. */
4180 if (!(target
&& GET_CODE (target
) == REG
4181 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4182 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4183 && ! rtx_equal_p (temp
, target
)
4184 && (CONSTANT_P (temp
) || want_value
))
4185 dont_return_target
= 1;
4188 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4189 the same as that of TARGET, adjust the constant. This is needed, for
4190 example, in case it is a CONST_DOUBLE and we want only a word-sized
4192 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4193 && TREE_CODE (exp
) != ERROR_MARK
4194 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4195 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4196 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4198 if (current_function_check_memory_usage
4199 && GET_CODE (target
) == MEM
4200 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
4202 in_check_memory_usage
= 1;
4203 if (GET_CODE (temp
) == MEM
)
4204 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
4205 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4206 XEXP (temp
, 0), Pmode
,
4207 expr_size (exp
), TYPE_MODE (sizetype
));
4209 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
4210 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4211 expr_size (exp
), TYPE_MODE (sizetype
),
4212 GEN_INT (MEMORY_USE_WO
),
4213 TYPE_MODE (integer_type_node
));
4214 in_check_memory_usage
= 0;
4217 /* If value was not generated in the target, store it there.
4218 Convert the value to TARGET's type first if nec. */
4219 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4220 one or both of them are volatile memory refs, we have to distinguish
4222 - expand_expr has used TARGET. In this case, we must not generate
4223 another copy. This can be detected by TARGET being equal according
4225 - expand_expr has not used TARGET - that means that the source just
4226 happens to have the same RTX form. Since temp will have been created
4227 by expand_expr, it will compare unequal according to == .
4228 We must generate a copy in this case, to reach the correct number
4229 of volatile memory references. */
4231 if ((! rtx_equal_p (temp
, target
)
4232 || (temp
!= target
&& (side_effects_p (temp
)
4233 || side_effects_p (target
))))
4234 && TREE_CODE (exp
) != ERROR_MARK
4235 && ! dont_store_target
)
4237 target
= protect_from_queue (target
, 1);
4238 if (GET_MODE (temp
) != GET_MODE (target
)
4239 && GET_MODE (temp
) != VOIDmode
)
4241 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4242 if (dont_return_target
)
4244 /* In this case, we will return TEMP,
4245 so make sure it has the proper mode.
4246 But don't forget to store the value into TARGET. */
4247 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4248 emit_move_insn (target
, temp
);
4251 convert_move (target
, temp
, unsignedp
);
4254 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4256 /* Handle copying a string constant into an array.
4257 The string constant may be shorter than the array.
4258 So copy just the string's actual length, and clear the rest. */
4262 /* Get the size of the data type of the string,
4263 which is actually the size of the target. */
4264 size
= expr_size (exp
);
4265 if (GET_CODE (size
) == CONST_INT
4266 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4267 emit_block_move (target
, temp
, size
);
4270 /* Compute the size of the data to copy from the string. */
4272 = size_binop (MIN_EXPR
,
4273 make_tree (sizetype
, size
),
4274 size_int (TREE_STRING_LENGTH (exp
)));
4275 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4279 /* Copy that much. */
4280 emit_block_move (target
, temp
, copy_size_rtx
);
4282 /* Figure out how much is left in TARGET that we have to clear.
4283 Do all calculations in ptr_mode. */
4285 addr
= XEXP (target
, 0);
4286 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
4288 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4290 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
4291 size
= plus_constant (size
, -TREE_STRING_LENGTH (exp
));
4295 addr
= force_reg (ptr_mode
, addr
);
4296 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
4297 copy_size_rtx
, NULL_RTX
, 0,
4300 size
= expand_binop (ptr_mode
, sub_optab
, size
,
4301 copy_size_rtx
, NULL_RTX
, 0,
4304 label
= gen_label_rtx ();
4305 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4306 GET_MODE (size
), 0, label
);
4309 if (size
!= const0_rtx
)
4311 rtx dest
= gen_rtx_MEM (BLKmode
, addr
);
4313 MEM_COPY_ATTRIBUTES (dest
, target
);
4315 /* The residual likely does not have the same alignment
4316 as the original target. While we could compute the
4317 alignment of the residual, it hardely seems worth
4319 set_mem_align (dest
, BITS_PER_UNIT
);
4321 /* Be sure we can write on ADDR. */
4322 in_check_memory_usage
= 1;
4323 if (current_function_check_memory_usage
)
4324 emit_library_call (chkr_check_addr_libfunc
,
4325 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
4327 size
, TYPE_MODE (sizetype
),
4328 GEN_INT (MEMORY_USE_WO
),
4329 TYPE_MODE (integer_type_node
));
4330 in_check_memory_usage
= 0;
4331 clear_storage (dest
, size
);
4338 /* Handle calls that return values in multiple non-contiguous locations.
4339 The Irix 6 ABI has examples of this. */
4340 else if (GET_CODE (target
) == PARALLEL
)
4341 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)));
4342 else if (GET_MODE (temp
) == BLKmode
)
4343 emit_block_move (target
, temp
, expr_size (exp
));
4345 emit_move_insn (target
, temp
);
4348 /* If we don't want a value, return NULL_RTX. */
4352 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4353 ??? The latter test doesn't seem to make sense. */
4354 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4357 /* Return TARGET itself if it is a hard register. */
4358 else if (want_value
&& GET_MODE (target
) != BLKmode
4359 && ! (GET_CODE (target
) == REG
4360 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4361 return copy_to_reg (target
);
4367 /* Return 1 if EXP just contains zeros. */
4375 switch (TREE_CODE (exp
))
4379 case NON_LVALUE_EXPR
:
4380 case VIEW_CONVERT_EXPR
:
4381 return is_zeros_p (TREE_OPERAND (exp
, 0));
4384 return integer_zerop (exp
);
4388 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4391 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4394 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4395 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4396 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4397 if (! is_zeros_p (TREE_VALUE (elt
)))
4407 /* Return 1 if EXP contains mostly (3/4) zeros. */
4410 mostly_zeros_p (exp
)
4413 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4415 int elts
= 0, zeros
= 0;
4416 tree elt
= CONSTRUCTOR_ELTS (exp
);
4417 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4419 /* If there are no ranges of true bits, it is all zero. */
4420 return elt
== NULL_TREE
;
4422 for (; elt
; elt
= TREE_CHAIN (elt
))
4424 /* We do not handle the case where the index is a RANGE_EXPR,
4425 so the statistic will be somewhat inaccurate.
4426 We do make a more accurate count in store_constructor itself,
4427 so since this function is only used for nested array elements,
4428 this should be close enough. */
4429 if (mostly_zeros_p (TREE_VALUE (elt
)))
4434 return 4 * zeros
>= 3 * elts
;
4437 return is_zeros_p (exp
);
4440 /* Helper function for store_constructor.
4441 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4442 TYPE is the type of the CONSTRUCTOR, not the element type.
4443 CLEARED is as for store_constructor.
4444 ALIAS_SET is the alias set to use for any stores.
4446 This provides a recursive shortcut back to store_constructor when it isn't
4447 necessary to go through store_field. This is so that we can pass through
4448 the cleared field to let store_constructor know that we may not have to
4449 clear a substructure if the outer structure has already been cleared. */
4452 store_constructor_field (target
, bitsize
, bitpos
, mode
, exp
, type
, cleared
,
4455 unsigned HOST_WIDE_INT bitsize
;
4456 HOST_WIDE_INT bitpos
;
4457 enum machine_mode mode
;
4462 if (TREE_CODE (exp
) == CONSTRUCTOR
4463 && bitpos
% BITS_PER_UNIT
== 0
4464 /* If we have a non-zero bitpos for a register target, then we just
4465 let store_field do the bitfield handling. This is unlikely to
4466 generate unnecessary clear instructions anyways. */
4467 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4469 if (GET_CODE (target
) == MEM
)
4471 = adjust_address (target
,
4472 GET_MODE (target
) == BLKmode
4474 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4475 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4478 /* Update the alias set, if required. */
4479 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4480 && MEM_ALIAS_SET (target
) != 0)
4482 target
= copy_rtx (target
);
4483 set_mem_alias_set (target
, alias_set
);
4486 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4489 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4493 /* Store the value of constructor EXP into the rtx TARGET.
4494 TARGET is either a REG or a MEM; we know it cannot conflict, since
4495 safe_from_p has been called.
4496 CLEARED is true if TARGET is known to have been zero'd.
4497 SIZE is the number of bytes of TARGET we are allowed to modify: this
4498 may not be the same as the size of EXP if we are assigning to a field
4499 which has been packed to exclude padding bits. */
4502 store_constructor (exp
, target
, cleared
, size
)
4508 tree type
= TREE_TYPE (exp
);
4509 #ifdef WORD_REGISTER_OPERATIONS
4510 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4513 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4514 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4518 /* We either clear the aggregate or indicate the value is dead. */
4519 if ((TREE_CODE (type
) == UNION_TYPE
4520 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4522 && ! CONSTRUCTOR_ELTS (exp
))
4523 /* If the constructor is empty, clear the union. */
4525 clear_storage (target
, expr_size (exp
));
4529 /* If we are building a static constructor into a register,
4530 set the initial value as zero so we can fold the value into
4531 a constant. But if more than one register is involved,
4532 this probably loses. */
4533 else if (! cleared
&& GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4534 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4536 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4540 /* If the constructor has fewer fields than the structure
4541 or if we are initializing the structure to mostly zeros,
4542 clear the whole structure first. Don't do this if TARGET is a
4543 register whose mode size isn't equal to SIZE since clear_storage
4544 can't handle this case. */
4545 else if (! cleared
&& size
> 0
4546 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4547 != fields_length (type
))
4548 || mostly_zeros_p (exp
))
4549 && (GET_CODE (target
) != REG
4550 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4553 clear_storage (target
, GEN_INT (size
));
4558 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4560 /* Store each element of the constructor into
4561 the corresponding field of TARGET. */
4563 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4565 tree field
= TREE_PURPOSE (elt
);
4566 tree value
= TREE_VALUE (elt
);
4567 enum machine_mode mode
;
4568 HOST_WIDE_INT bitsize
;
4569 HOST_WIDE_INT bitpos
= 0;
4572 rtx to_rtx
= target
;
4574 /* Just ignore missing fields.
4575 We cleared the whole structure, above,
4576 if any fields are missing. */
4580 if (cleared
&& is_zeros_p (value
))
4583 if (host_integerp (DECL_SIZE (field
), 1))
4584 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4588 unsignedp
= TREE_UNSIGNED (field
);
4589 mode
= DECL_MODE (field
);
4590 if (DECL_BIT_FIELD (field
))
4593 offset
= DECL_FIELD_OFFSET (field
);
4594 if (host_integerp (offset
, 0)
4595 && host_integerp (bit_position (field
), 0))
4597 bitpos
= int_bit_position (field
);
4601 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4607 if (contains_placeholder_p (offset
))
4608 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4609 offset
, make_tree (TREE_TYPE (exp
), target
));
4611 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4612 if (GET_CODE (to_rtx
) != MEM
)
4615 if (GET_MODE (offset_rtx
) != ptr_mode
)
4616 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4618 #ifdef POINTERS_EXTEND_UNSIGNED
4619 if (GET_MODE (offset_rtx
) != Pmode
)
4620 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
4623 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4624 highest_pow2_factor (offset
));
4627 if (TREE_READONLY (field
))
4629 if (GET_CODE (to_rtx
) == MEM
)
4630 to_rtx
= copy_rtx (to_rtx
);
4632 RTX_UNCHANGING_P (to_rtx
) = 1;
4635 #ifdef WORD_REGISTER_OPERATIONS
4636 /* If this initializes a field that is smaller than a word, at the
4637 start of a word, try to widen it to a full word.
4638 This special case allows us to output C++ member function
4639 initializations in a form that the optimizers can understand. */
4640 if (GET_CODE (target
) == REG
4641 && bitsize
< BITS_PER_WORD
4642 && bitpos
% BITS_PER_WORD
== 0
4643 && GET_MODE_CLASS (mode
) == MODE_INT
4644 && TREE_CODE (value
) == INTEGER_CST
4646 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4648 tree type
= TREE_TYPE (value
);
4650 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4652 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4653 value
= convert (type
, value
);
4656 if (BYTES_BIG_ENDIAN
)
4658 = fold (build (LSHIFT_EXPR
, type
, value
,
4659 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4660 bitsize
= BITS_PER_WORD
;
4665 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4666 && DECL_NONADDRESSABLE_P (field
))
4668 to_rtx
= copy_rtx (to_rtx
);
4669 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4672 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4673 value
, type
, cleared
,
4674 get_alias_set (TREE_TYPE (field
)));
4677 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4682 tree domain
= TYPE_DOMAIN (type
);
4683 tree elttype
= TREE_TYPE (type
);
4684 int const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4685 && TYPE_MAX_VALUE (domain
)
4686 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4687 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4688 HOST_WIDE_INT minelt
= 0;
4689 HOST_WIDE_INT maxelt
= 0;
4691 /* If we have constant bounds for the range of the type, get them. */
4694 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4695 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4698 /* If the constructor has fewer elements than the array,
4699 clear the whole array first. Similarly if this is
4700 static constructor of a non-BLKmode object. */
4701 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4705 HOST_WIDE_INT count
= 0, zero_count
= 0;
4706 need_to_clear
= ! const_bounds_p
;
4708 /* This loop is a more accurate version of the loop in
4709 mostly_zeros_p (it handles RANGE_EXPR in an index).
4710 It is also needed to check for missing elements. */
4711 for (elt
= CONSTRUCTOR_ELTS (exp
);
4712 elt
!= NULL_TREE
&& ! need_to_clear
;
4713 elt
= TREE_CHAIN (elt
))
4715 tree index
= TREE_PURPOSE (elt
);
4716 HOST_WIDE_INT this_node_count
;
4718 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4720 tree lo_index
= TREE_OPERAND (index
, 0);
4721 tree hi_index
= TREE_OPERAND (index
, 1);
4723 if (! host_integerp (lo_index
, 1)
4724 || ! host_integerp (hi_index
, 1))
4730 this_node_count
= (tree_low_cst (hi_index
, 1)
4731 - tree_low_cst (lo_index
, 1) + 1);
4734 this_node_count
= 1;
4736 count
+= this_node_count
;
4737 if (mostly_zeros_p (TREE_VALUE (elt
)))
4738 zero_count
+= this_node_count
;
4741 /* Clear the entire array first if there are any missing elements,
4742 or if the incidence of zero elements is >= 75%. */
4744 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4748 if (need_to_clear
&& size
> 0)
4751 clear_storage (target
, GEN_INT (size
));
4754 else if (REG_P (target
))
4755 /* Inform later passes that the old value is dead. */
4756 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4758 /* Store each element of the constructor into
4759 the corresponding element of TARGET, determined
4760 by counting the elements. */
4761 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4763 elt
= TREE_CHAIN (elt
), i
++)
4765 enum machine_mode mode
;
4766 HOST_WIDE_INT bitsize
;
4767 HOST_WIDE_INT bitpos
;
4769 tree value
= TREE_VALUE (elt
);
4770 tree index
= TREE_PURPOSE (elt
);
4771 rtx xtarget
= target
;
4773 if (cleared
&& is_zeros_p (value
))
4776 unsignedp
= TREE_UNSIGNED (elttype
);
4777 mode
= TYPE_MODE (elttype
);
4778 if (mode
== BLKmode
)
4779 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4780 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4783 bitsize
= GET_MODE_BITSIZE (mode
);
4785 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4787 tree lo_index
= TREE_OPERAND (index
, 0);
4788 tree hi_index
= TREE_OPERAND (index
, 1);
4789 rtx index_r
, pos_rtx
, hi_r
, loop_top
, loop_end
;
4790 struct nesting
*loop
;
4791 HOST_WIDE_INT lo
, hi
, count
;
4794 /* If the range is constant and "small", unroll the loop. */
4796 && host_integerp (lo_index
, 0)
4797 && host_integerp (hi_index
, 0)
4798 && (lo
= tree_low_cst (lo_index
, 0),
4799 hi
= tree_low_cst (hi_index
, 0),
4800 count
= hi
- lo
+ 1,
4801 (GET_CODE (target
) != MEM
4803 || (host_integerp (TYPE_SIZE (elttype
), 1)
4804 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4807 lo
-= minelt
; hi
-= minelt
;
4808 for (; lo
<= hi
; lo
++)
4810 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4812 if (GET_CODE (target
) == MEM
4813 && !MEM_KEEP_ALIAS_SET_P (target
)
4814 && TYPE_NONALIASED_COMPONENT (type
))
4816 target
= copy_rtx (target
);
4817 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4820 store_constructor_field
4821 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4822 get_alias_set (elttype
));
4827 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4828 loop_top
= gen_label_rtx ();
4829 loop_end
= gen_label_rtx ();
4831 unsignedp
= TREE_UNSIGNED (domain
);
4833 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4836 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4838 SET_DECL_RTL (index
, index_r
);
4839 if (TREE_CODE (value
) == SAVE_EXPR
4840 && SAVE_EXPR_RTL (value
) == 0)
4842 /* Make sure value gets expanded once before the
4844 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4847 store_expr (lo_index
, index_r
, 0);
4848 loop
= expand_start_loop (0);
4850 /* Assign value to element index. */
4852 = convert (ssizetype
,
4853 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4854 index
, TYPE_MIN_VALUE (domain
))));
4855 position
= size_binop (MULT_EXPR
, position
,
4857 TYPE_SIZE_UNIT (elttype
)));
4859 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4860 xtarget
= offset_address (target
, pos_rtx
,
4861 highest_pow2_factor (position
));
4862 xtarget
= adjust_address (xtarget
, mode
, 0);
4863 if (TREE_CODE (value
) == CONSTRUCTOR
)
4864 store_constructor (value
, xtarget
, cleared
,
4865 bitsize
/ BITS_PER_UNIT
);
4867 store_expr (value
, xtarget
, 0);
4869 expand_exit_loop_if_false (loop
,
4870 build (LT_EXPR
, integer_type_node
,
4873 expand_increment (build (PREINCREMENT_EXPR
,
4875 index
, integer_one_node
), 0, 0);
4877 emit_label (loop_end
);
4880 else if ((index
!= 0 && ! host_integerp (index
, 0))
4881 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4886 index
= ssize_int (1);
4889 index
= convert (ssizetype
,
4890 fold (build (MINUS_EXPR
, index
,
4891 TYPE_MIN_VALUE (domain
))));
4893 position
= size_binop (MULT_EXPR
, index
,
4895 TYPE_SIZE_UNIT (elttype
)));
4896 xtarget
= offset_address (target
,
4897 expand_expr (position
, 0, VOIDmode
, 0),
4898 highest_pow2_factor (position
));
4899 xtarget
= adjust_address (xtarget
, mode
, 0);
4900 store_expr (value
, xtarget
, 0);
4905 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4906 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4908 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4910 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4911 && TYPE_NONALIASED_COMPONENT (type
))
4913 target
= copy_rtx (target
);
4914 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4917 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4918 type
, cleared
, get_alias_set (elttype
));
4924 /* Set constructor assignments. */
4925 else if (TREE_CODE (type
) == SET_TYPE
)
4927 tree elt
= CONSTRUCTOR_ELTS (exp
);
4928 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4929 tree domain
= TYPE_DOMAIN (type
);
4930 tree domain_min
, domain_max
, bitlength
;
4932 /* The default implementation strategy is to extract the constant
4933 parts of the constructor, use that to initialize the target,
4934 and then "or" in whatever non-constant ranges we need in addition.
4936 If a large set is all zero or all ones, it is
4937 probably better to set it using memset (if available) or bzero.
4938 Also, if a large set has just a single range, it may also be
4939 better to first clear all the first clear the set (using
4940 bzero/memset), and set the bits we want. */
4942 /* Check for all zeros. */
4943 if (elt
== NULL_TREE
&& size
> 0)
4946 clear_storage (target
, GEN_INT (size
));
4950 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4951 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4952 bitlength
= size_binop (PLUS_EXPR
,
4953 size_diffop (domain_max
, domain_min
),
4956 nbits
= tree_low_cst (bitlength
, 1);
4958 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4959 are "complicated" (more than one range), initialize (the
4960 constant parts) by copying from a constant. */
4961 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4962 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4964 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4965 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4966 char *bit_buffer
= (char *) alloca (nbits
);
4967 HOST_WIDE_INT word
= 0;
4968 unsigned int bit_pos
= 0;
4969 unsigned int ibit
= 0;
4970 unsigned int offset
= 0; /* In bytes from beginning of set. */
4972 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4975 if (bit_buffer
[ibit
])
4977 if (BYTES_BIG_ENDIAN
)
4978 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4980 word
|= 1 << bit_pos
;
4984 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4986 if (word
!= 0 || ! cleared
)
4988 rtx datum
= GEN_INT (word
);
4991 /* The assumption here is that it is safe to use
4992 XEXP if the set is multi-word, but not if
4993 it's single-word. */
4994 if (GET_CODE (target
) == MEM
)
4995 to_rtx
= adjust_address (target
, mode
, offset
);
4996 else if (offset
== 0)
5000 emit_move_insn (to_rtx
, datum
);
5007 offset
+= set_word_size
/ BITS_PER_UNIT
;
5012 /* Don't bother clearing storage if the set is all ones. */
5013 if (TREE_CHAIN (elt
) != NULL_TREE
5014 || (TREE_PURPOSE (elt
) == NULL_TREE
5016 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5017 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5018 || (tree_low_cst (TREE_VALUE (elt
), 0)
5019 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5020 != (HOST_WIDE_INT
) nbits
))))
5021 clear_storage (target
, expr_size (exp
));
5023 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5025 /* Start of range of element or NULL. */
5026 tree startbit
= TREE_PURPOSE (elt
);
5027 /* End of range of element, or element value. */
5028 tree endbit
= TREE_VALUE (elt
);
5029 #ifdef TARGET_MEM_FUNCTIONS
5030 HOST_WIDE_INT startb
, endb
;
5032 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5034 bitlength_rtx
= expand_expr (bitlength
,
5035 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5037 /* Handle non-range tuple element like [ expr ]. */
5038 if (startbit
== NULL_TREE
)
5040 startbit
= save_expr (endbit
);
5044 startbit
= convert (sizetype
, startbit
);
5045 endbit
= convert (sizetype
, endbit
);
5046 if (! integer_zerop (domain_min
))
5048 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5049 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5051 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5052 EXPAND_CONST_ADDRESS
);
5053 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5054 EXPAND_CONST_ADDRESS
);
5060 ((build_qualified_type (type_for_mode (GET_MODE (target
), 0),
5063 emit_move_insn (targetx
, target
);
5066 else if (GET_CODE (target
) == MEM
)
5071 #ifdef TARGET_MEM_FUNCTIONS
5072 /* Optimization: If startbit and endbit are
5073 constants divisible by BITS_PER_UNIT,
5074 call memset instead. */
5075 if (TREE_CODE (startbit
) == INTEGER_CST
5076 && TREE_CODE (endbit
) == INTEGER_CST
5077 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5078 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5080 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5082 plus_constant (XEXP (targetx
, 0),
5083 startb
/ BITS_PER_UNIT
),
5085 constm1_rtx
, TYPE_MODE (integer_type_node
),
5086 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5087 TYPE_MODE (sizetype
));
5091 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
5092 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
5093 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5094 startbit_rtx
, TYPE_MODE (sizetype
),
5095 endbit_rtx
, TYPE_MODE (sizetype
));
5098 emit_move_insn (target
, targetx
);
5106 /* Store the value of EXP (an expression tree)
5107 into a subfield of TARGET which has mode MODE and occupies
5108 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5109 If MODE is VOIDmode, it means that we are storing into a bit-field.
5111 If VALUE_MODE is VOIDmode, return nothing in particular.
5112 UNSIGNEDP is not used in this case.
5114 Otherwise, return an rtx for the value stored. This rtx
5115 has mode VALUE_MODE if that is convenient to do.
5116 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5118 TYPE is the type of the underlying object,
5120 ALIAS_SET is the alias set for the destination. This value will
5121 (in general) be different from that for TARGET, since TARGET is a
5122 reference to the containing structure. */
5125 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
, unsignedp
, type
,
5128 HOST_WIDE_INT bitsize
;
5129 HOST_WIDE_INT bitpos
;
5130 enum machine_mode mode
;
5132 enum machine_mode value_mode
;
5137 HOST_WIDE_INT width_mask
= 0;
5139 if (TREE_CODE (exp
) == ERROR_MARK
)
5142 /* If we have nothing to store, do nothing unless the expression has
5145 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5146 else if (bitsize
>=0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5147 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5149 /* If we are storing into an unaligned field of an aligned union that is
5150 in a register, we may have the mode of TARGET being an integer mode but
5151 MODE == BLKmode. In that case, get an aligned object whose size and
5152 alignment are the same as TARGET and store TARGET into it (we can avoid
5153 the store if the field being stored is the entire width of TARGET). Then
5154 call ourselves recursively to store the field into a BLKmode version of
5155 that object. Finally, load from the object into TARGET. This is not
5156 very efficient in general, but should only be slightly more expensive
5157 than the otherwise-required unaligned accesses. Perhaps this can be
5158 cleaned up later. */
5161 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5165 (build_qualified_type (type
, TYPE_QUALS (type
) | TYPE_QUAL_CONST
),
5167 rtx blk_object
= copy_rtx (object
);
5169 PUT_MODE (blk_object
, BLKmode
);
5171 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5172 emit_move_insn (object
, target
);
5174 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5177 emit_move_insn (target
, object
);
5179 /* We want to return the BLKmode version of the data. */
5183 if (GET_CODE (target
) == CONCAT
)
5185 /* We're storing into a struct containing a single __complex. */
5189 return store_expr (exp
, target
, 0);
5192 /* If the structure is in a register or if the component
5193 is a bit field, we cannot use addressing to access it.
5194 Use bit-field techniques or SUBREG to store in it. */
5196 if (mode
== VOIDmode
5197 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5198 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5199 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5200 || GET_CODE (target
) == REG
5201 || GET_CODE (target
) == SUBREG
5202 /* If the field isn't aligned enough to store as an ordinary memref,
5203 store it as a bit field. */
5204 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
5205 && (MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
)
5206 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5207 /* If the RHS and field are a constant size and the size of the
5208 RHS isn't the same size as the bitfield, we must use bitfield
5211 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5212 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5214 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5216 /* If BITSIZE is narrower than the size of the type of EXP
5217 we will be narrowing TEMP. Normally, what's wanted are the
5218 low-order bits. However, if EXP's type is a record and this is
5219 big-endian machine, we want the upper BITSIZE bits. */
5220 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5221 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
5222 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5223 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5224 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5228 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5230 if (mode
!= VOIDmode
&& mode
!= BLKmode
5231 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5232 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5234 /* If the modes of TARGET and TEMP are both BLKmode, both
5235 must be in memory and BITPOS must be aligned on a byte
5236 boundary. If so, we simply do a block copy. */
5237 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5239 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5240 || bitpos
% BITS_PER_UNIT
!= 0)
5243 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5244 emit_block_move (target
, temp
,
5245 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5248 return value_mode
== VOIDmode
? const0_rtx
: target
;
5251 /* Store the value in the bitfield. */
5252 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5253 int_size_in_bytes (type
));
5255 if (value_mode
!= VOIDmode
)
5257 /* The caller wants an rtx for the value.
5258 If possible, avoid refetching from the bitfield itself. */
5260 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5263 enum machine_mode tmode
;
5266 return expand_and (temp
,
5270 GET_MODE (temp
) == VOIDmode
5272 : GET_MODE (temp
))), NULL_RTX
);
5274 tmode
= GET_MODE (temp
);
5275 if (tmode
== VOIDmode
)
5277 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5278 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5279 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5282 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5283 NULL_RTX
, value_mode
, VOIDmode
,
5284 int_size_in_bytes (type
));
5290 rtx addr
= XEXP (target
, 0);
5291 rtx to_rtx
= target
;
5293 /* If a value is wanted, it must be the lhs;
5294 so make the address stable for multiple use. */
5296 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5297 && ! CONSTANT_ADDRESS_P (addr
)
5298 /* A frame-pointer reference is already stable. */
5299 && ! (GET_CODE (addr
) == PLUS
5300 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5301 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5302 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5303 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5305 /* Now build a reference to just the desired component. */
5307 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5309 if (to_rtx
== target
)
5310 to_rtx
= copy_rtx (to_rtx
);
5312 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5313 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5314 set_mem_alias_set (to_rtx
, alias_set
);
5316 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5320 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5321 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5322 codes and find the ultimate containing object, which we return.
5324 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5325 bit position, and *PUNSIGNEDP to the signedness of the field.
5326 If the position of the field is variable, we store a tree
5327 giving the variable offset (in units) in *POFFSET.
5328 This offset is in addition to the bit position.
5329 If the position is not variable, we store 0 in *POFFSET.
5331 If any of the extraction expressions is volatile,
5332 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5334 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5335 is a mode that can be used to access the field. In that case, *PBITSIZE
5338 If the field describes a variable-sized object, *PMODE is set to
5339 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5340 this case, but the address of the object can be found. */
5343 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5344 punsignedp
, pvolatilep
)
5346 HOST_WIDE_INT
*pbitsize
;
5347 HOST_WIDE_INT
*pbitpos
;
5349 enum machine_mode
*pmode
;
5354 enum machine_mode mode
= VOIDmode
;
5355 tree offset
= size_zero_node
;
5356 tree bit_offset
= bitsize_zero_node
;
5357 tree placeholder_ptr
= 0;
5360 /* First get the mode, signedness, and size. We do this from just the
5361 outermost expression. */
5362 if (TREE_CODE (exp
) == COMPONENT_REF
)
5364 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5365 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5366 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5368 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5370 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5372 size_tree
= TREE_OPERAND (exp
, 1);
5373 *punsignedp
= TREE_UNSIGNED (exp
);
5377 mode
= TYPE_MODE (TREE_TYPE (exp
));
5378 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5380 if (mode
== BLKmode
)
5381 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5383 *pbitsize
= GET_MODE_BITSIZE (mode
);
5388 if (! host_integerp (size_tree
, 1))
5389 mode
= BLKmode
, *pbitsize
= -1;
5391 *pbitsize
= tree_low_cst (size_tree
, 1);
5394 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5395 and find the ultimate containing object. */
5398 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5399 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5400 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5402 tree field
= TREE_OPERAND (exp
, 1);
5403 tree this_offset
= DECL_FIELD_OFFSET (field
);
5405 /* If this field hasn't been filled in yet, don't go
5406 past it. This should only happen when folding expressions
5407 made during type construction. */
5408 if (this_offset
== 0)
5410 else if (! TREE_CONSTANT (this_offset
)
5411 && contains_placeholder_p (this_offset
))
5412 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5414 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5415 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5416 DECL_FIELD_BIT_OFFSET (field
));
5418 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5421 else if (TREE_CODE (exp
) == ARRAY_REF
5422 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5424 tree index
= TREE_OPERAND (exp
, 1);
5425 tree array
= TREE_OPERAND (exp
, 0);
5426 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5427 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5428 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5430 /* We assume all arrays have sizes that are a multiple of a byte.
5431 First subtract the lower bound, if any, in the type of the
5432 index, then convert to sizetype and multiply by the size of the
5434 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5435 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5438 /* If the index has a self-referential type, pass it to a
5439 WITH_RECORD_EXPR; if the component size is, pass our
5440 component to one. */
5441 if (! TREE_CONSTANT (index
)
5442 && contains_placeholder_p (index
))
5443 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5444 if (! TREE_CONSTANT (unit_size
)
5445 && contains_placeholder_p (unit_size
))
5446 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5448 offset
= size_binop (PLUS_EXPR
, offset
,
5449 size_binop (MULT_EXPR
,
5450 convert (sizetype
, index
),
5454 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5456 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5458 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5459 We might have been called from tree optimization where we
5460 haven't set up an object yet. */
5468 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5469 && TREE_CODE (exp
) != VIEW_CONVERT_EXPR
5470 && ! ((TREE_CODE (exp
) == NOP_EXPR
5471 || TREE_CODE (exp
) == CONVERT_EXPR
)
5472 && (TYPE_MODE (TREE_TYPE (exp
))
5473 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5476 /* If any reference in the chain is volatile, the effect is volatile. */
5477 if (TREE_THIS_VOLATILE (exp
))
5480 exp
= TREE_OPERAND (exp
, 0);
5483 /* If OFFSET is constant, see if we can return the whole thing as a
5484 constant bit position. Otherwise, split it up. */
5485 if (host_integerp (offset
, 0)
5486 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5488 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5489 && host_integerp (tem
, 0))
5490 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5492 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5498 /* Return 1 if T is an expression that get_inner_reference handles. */
5501 handled_component_p (t
)
5504 switch (TREE_CODE (t
))
5509 case ARRAY_RANGE_REF
:
5510 case NON_LVALUE_EXPR
:
5511 case VIEW_CONVERT_EXPR
:
5516 return (TYPE_MODE (TREE_TYPE (t
))
5517 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5524 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5526 static enum memory_use_mode
5527 get_memory_usage_from_modifier (modifier
)
5528 enum expand_modifier modifier
;
5534 return MEMORY_USE_RO
;
5536 case EXPAND_MEMORY_USE_WO
:
5537 return MEMORY_USE_WO
;
5539 case EXPAND_MEMORY_USE_RW
:
5540 return MEMORY_USE_RW
;
5542 case EXPAND_MEMORY_USE_DONT
:
5543 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5544 MEMORY_USE_DONT, because they are modifiers to a call of
5545 expand_expr in the ADDR_EXPR case of expand_expr. */
5546 case EXPAND_CONST_ADDRESS
:
5547 case EXPAND_INITIALIZER
:
5548 return MEMORY_USE_DONT
;
5549 case EXPAND_MEMORY_USE_BAD
:
5555 /* Given an rtx VALUE that may contain additions and multiplications, return
5556 an equivalent value that just refers to a register, memory, or constant.
5557 This is done by generating instructions to perform the arithmetic and
5558 returning a pseudo-register containing the value.
5560 The returned value may be a REG, SUBREG, MEM or constant. */
5563 force_operand (value
, target
)
5567 /* Use a temporary to force order of execution of calls to
5571 /* Use subtarget as the target for operand 0 of a binary operation. */
5572 rtx subtarget
= get_subtarget (target
);
5574 /* Check for a PIC address load. */
5576 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5577 && XEXP (value
, 0) == pic_offset_table_rtx
5578 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5579 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5580 || GET_CODE (XEXP (value
, 1)) == CONST
))
5583 subtarget
= gen_reg_rtx (GET_MODE (value
));
5584 emit_move_insn (subtarget
, value
);
5588 if (GET_CODE (value
) == PLUS
)
5589 binoptab
= add_optab
;
5590 else if (GET_CODE (value
) == MINUS
)
5591 binoptab
= sub_optab
;
5592 else if (GET_CODE (value
) == MULT
)
5594 op2
= XEXP (value
, 1);
5595 if (!CONSTANT_P (op2
)
5596 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5598 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5599 return expand_mult (GET_MODE (value
), tmp
,
5600 force_operand (op2
, NULL_RTX
),
5606 op2
= XEXP (value
, 1);
5607 if (!CONSTANT_P (op2
)
5608 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5610 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5612 binoptab
= add_optab
;
5613 op2
= negate_rtx (GET_MODE (value
), op2
);
5616 /* Check for an addition with OP2 a constant integer and our first
5617 operand a PLUS of a virtual register and something else. In that
5618 case, we want to emit the sum of the virtual register and the
5619 constant first and then add the other value. This allows virtual
5620 register instantiation to simply modify the constant rather than
5621 creating another one around this addition. */
5622 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5623 && GET_CODE (XEXP (value
, 0)) == PLUS
5624 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5625 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5626 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5628 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5629 XEXP (XEXP (value
, 0), 0), op2
,
5630 subtarget
, 0, OPTAB_LIB_WIDEN
);
5631 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5632 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5633 target
, 0, OPTAB_LIB_WIDEN
);
5636 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5637 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5638 force_operand (op2
, NULL_RTX
),
5639 target
, 0, OPTAB_LIB_WIDEN
);
5640 /* We give UNSIGNEDP = 0 to expand_binop
5641 because the only operations we are expanding here are signed ones. */
5644 #ifdef INSN_SCHEDULING
5645 /* On machines that have insn scheduling, we want all memory reference to be
5646 explicit, so we need to deal with such paradoxical SUBREGs. */
5647 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5648 && (GET_MODE_SIZE (GET_MODE (value
))
5649 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5651 = simplify_gen_subreg (GET_MODE (value
),
5652 force_reg (GET_MODE (SUBREG_REG (value
)),
5653 force_operand (SUBREG_REG (value
),
5655 GET_MODE (SUBREG_REG (value
)),
5656 SUBREG_BYTE (value
));
5662 /* Subroutine of expand_expr: return nonzero iff there is no way that
5663 EXP can reference X, which is being modified. TOP_P is nonzero if this
5664 call is going to be used to determine whether we need a temporary
5665 for EXP, as opposed to a recursive call to this function.
5667 It is always safe for this routine to return zero since it merely
5668 searches for optimization opportunities. */
5671 safe_from_p (x
, exp
, top_p
)
5678 static tree save_expr_list
;
5681 /* If EXP has varying size, we MUST use a target since we currently
5682 have no way of allocating temporaries of variable size
5683 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5684 So we assume here that something at a higher level has prevented a
5685 clash. This is somewhat bogus, but the best we can do. Only
5686 do this when X is BLKmode and when we are at the top level. */
5687 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5688 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5689 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5690 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5691 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5693 && GET_MODE (x
) == BLKmode
)
5694 /* If X is in the outgoing argument area, it is always safe. */
5695 || (GET_CODE (x
) == MEM
5696 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5697 || (GET_CODE (XEXP (x
, 0)) == PLUS
5698 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5701 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5702 find the underlying pseudo. */
5703 if (GET_CODE (x
) == SUBREG
)
5706 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5710 /* A SAVE_EXPR might appear many times in the expression passed to the
5711 top-level safe_from_p call, and if it has a complex subexpression,
5712 examining it multiple times could result in a combinatorial explosion.
5713 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5714 with optimization took about 28 minutes to compile -- even though it was
5715 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5716 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5717 we have processed. Note that the only test of top_p was above. */
5726 rtn
= safe_from_p (x
, exp
, 0);
5728 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5729 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5734 /* Now look at our tree code and possibly recurse. */
5735 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5738 exp_rtl
= DECL_RTL_SET_P (exp
) ? DECL_RTL (exp
) : NULL_RTX
;
5745 if (TREE_CODE (exp
) == TREE_LIST
)
5746 return ((TREE_VALUE (exp
) == 0
5747 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5748 && (TREE_CHAIN (exp
) == 0
5749 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5750 else if (TREE_CODE (exp
) == ERROR_MARK
)
5751 return 1; /* An already-visited SAVE_EXPR? */
5756 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5760 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5761 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5765 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5766 the expression. If it is set, we conflict iff we are that rtx or
5767 both are in memory. Otherwise, we check all operands of the
5768 expression recursively. */
5770 switch (TREE_CODE (exp
))
5773 /* If the operand is static or we are static, we can't conflict.
5774 Likewise if we don't conflict with the operand at all. */
5775 if (staticp (TREE_OPERAND (exp
, 0))
5776 || TREE_STATIC (exp
)
5777 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5780 /* Otherwise, the only way this can conflict is if we are taking
5781 the address of a DECL a that address if part of X, which is
5783 exp
= TREE_OPERAND (exp
, 0);
5786 if (!DECL_RTL_SET_P (exp
)
5787 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5790 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5795 if (GET_CODE (x
) == MEM
5796 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5797 get_alias_set (exp
)))
5802 /* Assume that the call will clobber all hard registers and
5804 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5805 || GET_CODE (x
) == MEM
)
5810 /* If a sequence exists, we would have to scan every instruction
5811 in the sequence to see if it was safe. This is probably not
5813 if (RTL_EXPR_SEQUENCE (exp
))
5816 exp_rtl
= RTL_EXPR_RTL (exp
);
5819 case WITH_CLEANUP_EXPR
:
5820 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5823 case CLEANUP_POINT_EXPR
:
5824 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5827 exp_rtl
= SAVE_EXPR_RTL (exp
);
5831 /* If we've already scanned this, don't do it again. Otherwise,
5832 show we've scanned it and record for clearing the flag if we're
5834 if (TREE_PRIVATE (exp
))
5837 TREE_PRIVATE (exp
) = 1;
5838 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5840 TREE_PRIVATE (exp
) = 0;
5844 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5848 /* The only operand we look at is operand 1. The rest aren't
5849 part of the expression. */
5850 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5852 case METHOD_CALL_EXPR
:
5853 /* This takes an rtx argument, but shouldn't appear here. */
5860 /* If we have an rtx, we do not need to scan our operands. */
5864 nops
= first_rtl_op (TREE_CODE (exp
));
5865 for (i
= 0; i
< nops
; i
++)
5866 if (TREE_OPERAND (exp
, i
) != 0
5867 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5870 /* If this is a language-specific tree code, it may require
5871 special handling. */
5872 if ((unsigned int) TREE_CODE (exp
)
5873 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5874 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5878 /* If we have an rtl, find any enclosed object. Then see if we conflict
5882 if (GET_CODE (exp_rtl
) == SUBREG
)
5884 exp_rtl
= SUBREG_REG (exp_rtl
);
5885 if (GET_CODE (exp_rtl
) == REG
5886 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5890 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5891 are memory and they conflict. */
5892 return ! (rtx_equal_p (x
, exp_rtl
)
5893 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5894 && true_dependence (exp_rtl
, GET_MODE (x
), x
,
5895 rtx_addr_varies_p
)));
5898 /* If we reach here, it is safe. */
5902 /* Subroutine of expand_expr: return rtx if EXP is a
5903 variable or parameter; else return 0. */
5910 switch (TREE_CODE (exp
))
5914 return DECL_RTL (exp
);
5920 #ifdef MAX_INTEGER_COMPUTATION_MODE
5923 check_max_integer_computation_mode (exp
)
5926 enum tree_code code
;
5927 enum machine_mode mode
;
5929 /* Strip any NOPs that don't change the mode. */
5931 code
= TREE_CODE (exp
);
5933 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5934 if (code
== NOP_EXPR
5935 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5938 /* First check the type of the overall operation. We need only look at
5939 unary, binary and relational operations. */
5940 if (TREE_CODE_CLASS (code
) == '1'
5941 || TREE_CODE_CLASS (code
) == '2'
5942 || TREE_CODE_CLASS (code
) == '<')
5944 mode
= TYPE_MODE (TREE_TYPE (exp
));
5945 if (GET_MODE_CLASS (mode
) == MODE_INT
5946 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5947 internal_error ("unsupported wide integer operation");
5950 /* Check operand of a unary op. */
5951 if (TREE_CODE_CLASS (code
) == '1')
5953 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5954 if (GET_MODE_CLASS (mode
) == MODE_INT
5955 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5956 internal_error ("unsupported wide integer operation");
5959 /* Check operands of a binary/comparison op. */
5960 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5962 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5963 if (GET_MODE_CLASS (mode
) == MODE_INT
5964 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5965 internal_error ("unsupported wide integer operation");
5967 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5968 if (GET_MODE_CLASS (mode
) == MODE_INT
5969 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5970 internal_error ("unsupported wide integer operation");
5975 /* Return the highest power of two that EXP is known to be a multiple of.
5976 This is used in updating alignment of MEMs in array references. */
5978 static HOST_WIDE_INT
5979 highest_pow2_factor (exp
)
5982 HOST_WIDE_INT c0
, c1
;
5984 switch (TREE_CODE (exp
))
5987 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5988 lowest bit that's a one. If the result is zero, pessimize by
5989 returning 1. This is overly-conservative, but such things should not
5990 happen in the offset expressions that we are called with. */
5991 if (host_integerp (exp
, 0))
5993 c0
= tree_low_cst (exp
, 0);
5994 c0
= c0
< 0 ? - c0
: c0
;
5995 return c0
!= 0 ? c0
& -c0
: 1;
5999 case PLUS_EXPR
: case MINUS_EXPR
:
6000 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6001 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6002 return MIN (c0
, c1
);
6005 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6006 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6009 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6011 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6012 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6013 return MAX (1, c0
/ c1
);
6015 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6016 case COMPOUND_EXPR
: case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6017 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6020 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6021 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6022 return MIN (c0
, c1
);
6031 /* Return an object on the placeholder list that matches EXP, a
6032 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6033 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6034 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6035 is a location which initially points to a starting location in the
6036 placeholder list (zero means start of the list) and where a pointer into
6037 the placeholder list at which the object is found is placed. */
6040 find_placeholder (exp
, plist
)
6044 tree type
= TREE_TYPE (exp
);
6045 tree placeholder_expr
;
6047 for (placeholder_expr
6048 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6049 placeholder_expr
!= 0;
6050 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6052 tree need_type
= TYPE_MAIN_VARIANT (type
);
6055 /* Find the outermost reference that is of the type we want. If none,
6056 see if any object has a type that is a pointer to the type we
6058 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6059 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6060 || TREE_CODE (elt
) == COND_EXPR
)
6061 ? TREE_OPERAND (elt
, 1)
6062 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6063 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6064 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6065 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6066 ? TREE_OPERAND (elt
, 0) : 0))
6067 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6070 *plist
= placeholder_expr
;
6074 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6076 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6077 || TREE_CODE (elt
) == COND_EXPR
)
6078 ? TREE_OPERAND (elt
, 1)
6079 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6080 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6081 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6082 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6083 ? TREE_OPERAND (elt
, 0) : 0))
6084 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6085 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6089 *plist
= placeholder_expr
;
6090 return build1 (INDIRECT_REF
, need_type
, elt
);
6097 /* expand_expr: generate code for computing expression EXP.
6098 An rtx for the computed value is returned. The value is never null.
6099 In the case of a void EXP, const0_rtx is returned.
6101 The value may be stored in TARGET if TARGET is nonzero.
6102 TARGET is just a suggestion; callers must assume that
6103 the rtx returned may not be the same as TARGET.
6105 If TARGET is CONST0_RTX, it means that the value will be ignored.
6107 If TMODE is not VOIDmode, it suggests generating the
6108 result in mode TMODE. But this is done only when convenient.
6109 Otherwise, TMODE is ignored and the value generated in its natural mode.
6110 TMODE is just a suggestion; callers must assume that
6111 the rtx returned may not have mode TMODE.
6113 Note that TARGET may have neither TMODE nor MODE. In that case, it
6114 probably will not be used.
6116 If MODIFIER is EXPAND_SUM then when EXP is an addition
6117 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6118 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6119 products as above, or REG or MEM, or constant.
6120 Ordinarily in such cases we would output mul or add instructions
6121 and then return a pseudo reg containing the sum.
6123 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6124 it also marks a label as absolutely required (it can't be dead).
6125 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6126 This is used for outputting expressions used in initializers.
6128 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6129 with a constant address even if that address is not normally legitimate.
6130 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6133 expand_expr (exp
, target
, tmode
, modifier
)
6136 enum machine_mode tmode
;
6137 enum expand_modifier modifier
;
6140 tree type
= TREE_TYPE (exp
);
6141 int unsignedp
= TREE_UNSIGNED (type
);
6142 enum machine_mode mode
;
6143 enum tree_code code
= TREE_CODE (exp
);
6145 rtx subtarget
, original_target
;
6148 /* Used by check-memory-usage to make modifier read only. */
6149 enum expand_modifier ro_modifier
;
6151 /* Handle ERROR_MARK before anybody tries to access its type. */
6152 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6154 op0
= CONST0_RTX (tmode
);
6160 mode
= TYPE_MODE (type
);
6161 /* Use subtarget as the target for operand 0 of a binary operation. */
6162 subtarget
= get_subtarget (target
);
6163 original_target
= target
;
6164 ignore
= (target
== const0_rtx
6165 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6166 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6167 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6168 && TREE_CODE (type
) == VOID_TYPE
));
6170 /* Make a read-only version of the modifier. */
6171 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
6172 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
6173 ro_modifier
= modifier
;
6175 ro_modifier
= EXPAND_NORMAL
;
6177 /* If we are going to ignore this result, we need only do something
6178 if there is a side-effect somewhere in the expression. If there
6179 is, short-circuit the most common cases here. Note that we must
6180 not call expand_expr with anything but const0_rtx in case this
6181 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6185 if (! TREE_SIDE_EFFECTS (exp
))
6188 /* Ensure we reference a volatile object even if value is ignored, but
6189 don't do this if all we are doing is taking its address. */
6190 if (TREE_THIS_VOLATILE (exp
)
6191 && TREE_CODE (exp
) != FUNCTION_DECL
6192 && mode
!= VOIDmode
&& mode
!= BLKmode
6193 && modifier
!= EXPAND_CONST_ADDRESS
)
6195 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
6196 if (GET_CODE (temp
) == MEM
)
6197 temp
= copy_to_reg (temp
);
6201 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6202 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6203 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6204 VOIDmode
, ro_modifier
);
6205 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6206 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6208 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6210 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
,
6214 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6215 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6216 /* If the second operand has no side effects, just evaluate
6218 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6219 VOIDmode
, ro_modifier
);
6220 else if (code
== BIT_FIELD_REF
)
6222 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6224 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
,
6226 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
,
6234 #ifdef MAX_INTEGER_COMPUTATION_MODE
6235 /* Only check stuff here if the mode we want is different from the mode
6236 of the expression; if it's the same, check_max_integer_computiation_mode
6237 will handle it. Do we really need to check this stuff at all? */
6240 && GET_MODE (target
) != mode
6241 && TREE_CODE (exp
) != INTEGER_CST
6242 && TREE_CODE (exp
) != PARM_DECL
6243 && TREE_CODE (exp
) != ARRAY_REF
6244 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6245 && TREE_CODE (exp
) != COMPONENT_REF
6246 && TREE_CODE (exp
) != BIT_FIELD_REF
6247 && TREE_CODE (exp
) != INDIRECT_REF
6248 && TREE_CODE (exp
) != CALL_EXPR
6249 && TREE_CODE (exp
) != VAR_DECL
6250 && TREE_CODE (exp
) != RTL_EXPR
)
6252 enum machine_mode mode
= GET_MODE (target
);
6254 if (GET_MODE_CLASS (mode
) == MODE_INT
6255 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6256 internal_error ("unsupported wide integer operation");
6260 && TREE_CODE (exp
) != INTEGER_CST
6261 && TREE_CODE (exp
) != PARM_DECL
6262 && TREE_CODE (exp
) != ARRAY_REF
6263 && TREE_CODE (exp
) != ARRAY_RANGE_REF
6264 && TREE_CODE (exp
) != COMPONENT_REF
6265 && TREE_CODE (exp
) != BIT_FIELD_REF
6266 && TREE_CODE (exp
) != INDIRECT_REF
6267 && TREE_CODE (exp
) != VAR_DECL
6268 && TREE_CODE (exp
) != CALL_EXPR
6269 && TREE_CODE (exp
) != RTL_EXPR
6270 && GET_MODE_CLASS (tmode
) == MODE_INT
6271 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6272 internal_error ("unsupported wide integer operation");
6274 check_max_integer_computation_mode (exp
);
6277 /* If will do cse, generate all results into pseudo registers
6278 since 1) that allows cse to find more things
6279 and 2) otherwise cse could produce an insn the machine
6282 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6283 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6290 tree function
= decl_function_context (exp
);
6291 /* Handle using a label in a containing function. */
6292 if (function
!= current_function_decl
6293 && function
!= inline_function_decl
&& function
!= 0)
6295 struct function
*p
= find_function_data (function
);
6296 p
->expr
->x_forced_labels
6297 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6298 p
->expr
->x_forced_labels
);
6302 if (modifier
== EXPAND_INITIALIZER
)
6303 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6308 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6309 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6310 if (function
!= current_function_decl
6311 && function
!= inline_function_decl
&& function
!= 0)
6312 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6317 if (DECL_RTL (exp
) == 0)
6319 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6320 return CONST0_RTX (mode
);
6323 /* ... fall through ... */
6326 /* If a static var's type was incomplete when the decl was written,
6327 but the type is complete now, lay out the decl now. */
6328 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6329 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6331 rtx value
= DECL_RTL_IF_SET (exp
);
6333 layout_decl (exp
, 0);
6335 /* If the RTL was already set, update its mode and memory
6339 PUT_MODE (value
, DECL_MODE (exp
));
6340 SET_DECL_RTL (exp
, 0);
6341 set_mem_attributes (value
, exp
, 1);
6342 SET_DECL_RTL (exp
, value
);
6346 /* Although static-storage variables start off initialized, according to
6347 ANSI C, a memcpy could overwrite them with uninitialized values. So
6348 we check them too. This also lets us check for read-only variables
6349 accessed via a non-const declaration, in case it won't be detected
6350 any other way (e.g., in an embedded system or OS kernel without
6353 Aggregates are not checked here; they're handled elsewhere. */
6354 if (cfun
&& current_function_check_memory_usage
6356 && GET_CODE (DECL_RTL (exp
)) == MEM
6357 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6359 enum memory_use_mode memory_usage
;
6360 memory_usage
= get_memory_usage_from_modifier (modifier
);
6362 in_check_memory_usage
= 1;
6363 if (memory_usage
!= MEMORY_USE_DONT
)
6364 emit_library_call (chkr_check_addr_libfunc
,
6365 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
6366 XEXP (DECL_RTL (exp
), 0), Pmode
,
6367 GEN_INT (int_size_in_bytes (type
)),
6368 TYPE_MODE (sizetype
),
6369 GEN_INT (memory_usage
),
6370 TYPE_MODE (integer_type_node
));
6371 in_check_memory_usage
= 0;
6374 /* ... fall through ... */
6378 if (DECL_RTL (exp
) == 0)
6381 /* Ensure variable marked as used even if it doesn't go through
6382 a parser. If it hasn't be used yet, write out an external
6384 if (! TREE_USED (exp
))
6386 assemble_external (exp
);
6387 TREE_USED (exp
) = 1;
6390 /* Show we haven't gotten RTL for this yet. */
6393 /* Handle variables inherited from containing functions. */
6394 context
= decl_function_context (exp
);
6396 /* We treat inline_function_decl as an alias for the current function
6397 because that is the inline function whose vars, types, etc.
6398 are being merged into the current function.
6399 See expand_inline_function. */
6401 if (context
!= 0 && context
!= current_function_decl
6402 && context
!= inline_function_decl
6403 /* If var is static, we don't need a static chain to access it. */
6404 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6405 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6409 /* Mark as non-local and addressable. */
6410 DECL_NONLOCAL (exp
) = 1;
6411 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6413 mark_addressable (exp
);
6414 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6416 addr
= XEXP (DECL_RTL (exp
), 0);
6417 if (GET_CODE (addr
) == MEM
)
6419 = replace_equiv_address (addr
,
6420 fix_lexical_addr (XEXP (addr
, 0), exp
));
6422 addr
= fix_lexical_addr (addr
, exp
);
6424 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6427 /* This is the case of an array whose size is to be determined
6428 from its initializer, while the initializer is still being parsed.
6431 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6432 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6433 temp
= validize_mem (DECL_RTL (exp
));
6435 /* If DECL_RTL is memory, we are in the normal case and either
6436 the address is not valid or it is not a register and -fforce-addr
6437 is specified, get the address into a register. */
6439 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6440 && modifier
!= EXPAND_CONST_ADDRESS
6441 && modifier
!= EXPAND_SUM
6442 && modifier
!= EXPAND_INITIALIZER
6443 && (! memory_address_p (DECL_MODE (exp
),
6444 XEXP (DECL_RTL (exp
), 0))
6446 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6447 temp
= replace_equiv_address (DECL_RTL (exp
),
6448 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6450 /* If we got something, return it. But first, set the alignment
6451 if the address is a register. */
6454 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6455 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6460 /* If the mode of DECL_RTL does not match that of the decl, it
6461 must be a promoted value. We return a SUBREG of the wanted mode,
6462 but mark it so that we know that it was already extended. */
6464 if (GET_CODE (DECL_RTL (exp
)) == REG
6465 && GET_MODE (DECL_RTL (exp
)) != mode
)
6467 /* Get the signedness used for this variable. Ensure we get the
6468 same mode we got when the variable was declared. */
6469 if (GET_MODE (DECL_RTL (exp
))
6470 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
6473 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6474 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6475 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6479 return DECL_RTL (exp
);
6482 return immed_double_const (TREE_INT_CST_LOW (exp
),
6483 TREE_INT_CST_HIGH (exp
), mode
);
6486 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
6487 EXPAND_MEMORY_USE_BAD
);
6490 /* If optimized, generate immediate CONST_DOUBLE
6491 which will be turned into memory by reload if necessary.
6493 We used to force a register so that loop.c could see it. But
6494 this does not allow gen_* patterns to perform optimizations with
6495 the constants. It also produces two insns in cases like "x = 1.0;".
6496 On most machines, floating-point constants are not permitted in
6497 many insns, so we'd end up copying it to a register in any case.
6499 Now, we do the copying in expand_binop, if appropriate. */
6500 return immed_real_const (exp
);
6504 if (! TREE_CST_RTL (exp
))
6505 output_constant_def (exp
, 1);
6507 /* TREE_CST_RTL probably contains a constant address.
6508 On RISC machines where a constant address isn't valid,
6509 make some insns to get that address into a register. */
6510 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6511 && modifier
!= EXPAND_CONST_ADDRESS
6512 && modifier
!= EXPAND_INITIALIZER
6513 && modifier
!= EXPAND_SUM
6514 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6516 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6517 return replace_equiv_address (TREE_CST_RTL (exp
),
6518 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6519 return TREE_CST_RTL (exp
);
6521 case EXPR_WITH_FILE_LOCATION
:
6524 const char *saved_input_filename
= input_filename
;
6525 int saved_lineno
= lineno
;
6526 input_filename
= EXPR_WFL_FILENAME (exp
);
6527 lineno
= EXPR_WFL_LINENO (exp
);
6528 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6529 emit_line_note (input_filename
, lineno
);
6530 /* Possibly avoid switching back and forth here. */
6531 to_return
= expand_expr (EXPR_WFL_NODE (exp
),
6532 target
|| ! ignore
? target
: const0_rtx
,
6534 input_filename
= saved_input_filename
;
6535 lineno
= saved_lineno
;
6540 context
= decl_function_context (exp
);
6542 /* If this SAVE_EXPR was at global context, assume we are an
6543 initialization function and move it into our context. */
6545 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6547 /* We treat inline_function_decl as an alias for the current function
6548 because that is the inline function whose vars, types, etc.
6549 are being merged into the current function.
6550 See expand_inline_function. */
6551 if (context
== current_function_decl
|| context
== inline_function_decl
)
6554 /* If this is non-local, handle it. */
6557 /* The following call just exists to abort if the context is
6558 not of a containing function. */
6559 find_function_data (context
);
6561 temp
= SAVE_EXPR_RTL (exp
);
6562 if (temp
&& GET_CODE (temp
) == REG
)
6564 put_var_into_stack (exp
);
6565 temp
= SAVE_EXPR_RTL (exp
);
6567 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6570 replace_equiv_address (temp
,
6571 fix_lexical_addr (XEXP (temp
, 0), exp
));
6573 if (SAVE_EXPR_RTL (exp
) == 0)
6575 if (mode
== VOIDmode
)
6578 temp
= assign_temp (build_qualified_type (type
,
6580 | TYPE_QUAL_CONST
)),
6583 SAVE_EXPR_RTL (exp
) = temp
;
6584 if (!optimize
&& GET_CODE (temp
) == REG
)
6585 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6588 /* If the mode of TEMP does not match that of the expression, it
6589 must be a promoted value. We pass store_expr a SUBREG of the
6590 wanted mode but mark it so that we know that it was already
6591 extended. Note that `unsignedp' was modified above in
6594 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6596 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6597 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6598 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6601 if (temp
== const0_rtx
)
6602 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6603 EXPAND_MEMORY_USE_BAD
);
6605 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6607 TREE_USED (exp
) = 1;
6610 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6611 must be a promoted value. We return a SUBREG of the wanted mode,
6612 but mark it so that we know that it was already extended. */
6614 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6615 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6617 /* Compute the signedness and make the proper SUBREG. */
6618 promote_mode (type
, mode
, &unsignedp
, 0);
6619 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6620 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6621 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6625 return SAVE_EXPR_RTL (exp
);
6630 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6631 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6635 case PLACEHOLDER_EXPR
:
6637 tree old_list
= placeholder_list
;
6638 tree placeholder_expr
= 0;
6640 exp
= find_placeholder (exp
, &placeholder_expr
);
6644 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6645 temp
= expand_expr (exp
, original_target
, tmode
, ro_modifier
);
6646 placeholder_list
= old_list
;
6650 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6653 case WITH_RECORD_EXPR
:
6654 /* Put the object on the placeholder list, expand our first operand,
6655 and pop the list. */
6656 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6658 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6659 tmode
, ro_modifier
);
6660 placeholder_list
= TREE_CHAIN (placeholder_list
);
6664 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6665 expand_goto (TREE_OPERAND (exp
, 0));
6667 expand_computed_goto (TREE_OPERAND (exp
, 0));
6671 expand_exit_loop_if_false (NULL
,
6672 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6675 case LABELED_BLOCK_EXPR
:
6676 if (LABELED_BLOCK_BODY (exp
))
6677 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6678 /* Should perhaps use expand_label, but this is simpler and safer. */
6679 do_pending_stack_adjust ();
6680 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6683 case EXIT_BLOCK_EXPR
:
6684 if (EXIT_BLOCK_RETURN (exp
))
6685 sorry ("returned value in block_exit_expr");
6686 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6691 expand_start_loop (1);
6692 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6700 tree vars
= TREE_OPERAND (exp
, 0);
6701 int vars_need_expansion
= 0;
6703 /* Need to open a binding contour here because
6704 if there are any cleanups they must be contained here. */
6705 expand_start_bindings (2);
6707 /* Mark the corresponding BLOCK for output in its proper place. */
6708 if (TREE_OPERAND (exp
, 2) != 0
6709 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6710 insert_block (TREE_OPERAND (exp
, 2));
6712 /* If VARS have not yet been expanded, expand them now. */
6715 if (!DECL_RTL_SET_P (vars
))
6717 vars_need_expansion
= 1;
6720 expand_decl_init (vars
);
6721 vars
= TREE_CHAIN (vars
);
6724 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6726 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6732 if (RTL_EXPR_SEQUENCE (exp
))
6734 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6736 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6737 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6739 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6740 free_temps_for_rtl_expr (exp
);
6741 return RTL_EXPR_RTL (exp
);
6744 /* If we don't need the result, just ensure we evaluate any
6749 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6750 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6751 EXPAND_MEMORY_USE_BAD
);
6755 /* All elts simple constants => refer to a constant in memory. But
6756 if this is a non-BLKmode mode, let it store a field at a time
6757 since that should make a CONST_INT or CONST_DOUBLE when we
6758 fold. Likewise, if we have a target we can use, it is best to
6759 store directly into the target unless the type is large enough
6760 that memcpy will be used. If we are making an initializer and
6761 all operands are constant, put it in memory as well. */
6762 else if ((TREE_STATIC (exp
)
6763 && ((mode
== BLKmode
6764 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6765 || TREE_ADDRESSABLE (exp
)
6766 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6767 && (! MOVE_BY_PIECES_P
6768 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6770 && ! mostly_zeros_p (exp
))))
6771 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6773 rtx constructor
= output_constant_def (exp
, 1);
6775 if (modifier
!= EXPAND_CONST_ADDRESS
6776 && modifier
!= EXPAND_INITIALIZER
6777 && modifier
!= EXPAND_SUM
)
6778 constructor
= validize_mem (constructor
);
6784 /* Handle calls that pass values in multiple non-contiguous
6785 locations. The Irix 6 ABI has examples of this. */
6786 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6787 || GET_CODE (target
) == PARALLEL
)
6789 = assign_temp (build_qualified_type (type
,
6791 | (TREE_READONLY (exp
)
6792 * TYPE_QUAL_CONST
))),
6793 TREE_ADDRESSABLE (exp
), 1, 1);
6795 store_constructor (exp
, target
, 0,
6796 int_size_in_bytes (TREE_TYPE (exp
)));
6802 tree exp1
= TREE_OPERAND (exp
, 0);
6804 tree string
= string_constant (exp1
, &index
);
6806 /* Try to optimize reads from const strings. */
6808 && TREE_CODE (string
) == STRING_CST
6809 && TREE_CODE (index
) == INTEGER_CST
6810 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6811 && GET_MODE_CLASS (mode
) == MODE_INT
6812 && GET_MODE_SIZE (mode
) == 1
6813 && modifier
!= EXPAND_MEMORY_USE_WO
)
6815 GEN_INT (TREE_STRING_POINTER (string
)[TREE_INT_CST_LOW (index
)]);
6817 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6818 op0
= memory_address (mode
, op0
);
6820 if (cfun
&& current_function_check_memory_usage
6821 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6823 enum memory_use_mode memory_usage
;
6824 memory_usage
= get_memory_usage_from_modifier (modifier
);
6826 if (memory_usage
!= MEMORY_USE_DONT
)
6828 in_check_memory_usage
= 1;
6829 emit_library_call (chkr_check_addr_libfunc
,
6830 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, op0
,
6831 Pmode
, GEN_INT (int_size_in_bytes (type
)),
6832 TYPE_MODE (sizetype
),
6833 GEN_INT (memory_usage
),
6834 TYPE_MODE (integer_type_node
));
6835 in_check_memory_usage
= 0;
6839 temp
= gen_rtx_MEM (mode
, op0
);
6840 set_mem_attributes (temp
, exp
, 0);
6842 /* If we are writing to this object and its type is a record with
6843 readonly fields, we must mark it as readonly so it will
6844 conflict with readonly references to those fields. */
6845 if (modifier
== EXPAND_MEMORY_USE_WO
&& readonly_fields_p (type
))
6846 RTX_UNCHANGING_P (temp
) = 1;
6852 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6856 tree array
= TREE_OPERAND (exp
, 0);
6857 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6858 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6859 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6862 /* Optimize the special-case of a zero lower bound.
6864 We convert the low_bound to sizetype to avoid some problems
6865 with constant folding. (E.g. suppose the lower bound is 1,
6866 and its mode is QI. Without the conversion, (ARRAY
6867 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6868 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6870 if (! integer_zerop (low_bound
))
6871 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6873 /* Fold an expression like: "foo"[2].
6874 This is not done in fold so it won't happen inside &.
6875 Don't fold if this is for wide characters since it's too
6876 difficult to do correctly and this is a very rare case. */
6878 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6879 && TREE_CODE (array
) == STRING_CST
6880 && TREE_CODE (index
) == INTEGER_CST
6881 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6882 && GET_MODE_CLASS (mode
) == MODE_INT
6883 && GET_MODE_SIZE (mode
) == 1)
6885 GEN_INT (TREE_STRING_POINTER (array
)[TREE_INT_CST_LOW (index
)]);
6887 /* If this is a constant index into a constant array,
6888 just get the value from the array. Handle both the cases when
6889 we have an explicit constructor and when our operand is a variable
6890 that was declared const. */
6892 if (modifier
!= EXPAND_CONST_ADDRESS
&& modifier
!= EXPAND_INITIALIZER
6893 && TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6894 && TREE_CODE (index
) == INTEGER_CST
6895 && 0 > compare_tree_int (index
,
6896 list_length (CONSTRUCTOR_ELTS
6897 (TREE_OPERAND (exp
, 0)))))
6901 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6902 i
= TREE_INT_CST_LOW (index
);
6903 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6907 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6908 tmode
, ro_modifier
);
6911 else if (optimize
>= 1
6912 && modifier
!= EXPAND_CONST_ADDRESS
6913 && modifier
!= EXPAND_INITIALIZER
6914 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6915 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6916 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6918 if (TREE_CODE (index
) == INTEGER_CST
)
6920 tree init
= DECL_INITIAL (array
);
6922 if (TREE_CODE (init
) == CONSTRUCTOR
)
6926 for (elem
= CONSTRUCTOR_ELTS (init
);
6928 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6929 elem
= TREE_CHAIN (elem
))
6932 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6933 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6934 tmode
, ro_modifier
);
6936 else if (TREE_CODE (init
) == STRING_CST
6937 && 0 > compare_tree_int (index
,
6938 TREE_STRING_LENGTH (init
)))
6940 tree type
= TREE_TYPE (TREE_TYPE (init
));
6941 enum machine_mode mode
= TYPE_MODE (type
);
6943 if (GET_MODE_CLASS (mode
) == MODE_INT
6944 && GET_MODE_SIZE (mode
) == 1)
6946 (TREE_STRING_POINTER
6947 (init
)[TREE_INT_CST_LOW (index
)]));
6956 case ARRAY_RANGE_REF
:
6957 /* If the operand is a CONSTRUCTOR, we can just extract the
6958 appropriate field if it is present. Don't do this if we have
6959 already written the data since we want to refer to that copy
6960 and varasm.c assumes that's what we'll do. */
6961 if (code
== COMPONENT_REF
6962 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6963 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6967 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6968 elt
= TREE_CHAIN (elt
))
6969 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6970 /* We can normally use the value of the field in the
6971 CONSTRUCTOR. However, if this is a bitfield in
6972 an integral mode that we can fit in a HOST_WIDE_INT,
6973 we must mask only the number of bits in the bitfield,
6974 since this is done implicitly by the constructor. If
6975 the bitfield does not meet either of those conditions,
6976 we can't do this optimization. */
6977 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6978 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6980 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6981 <= HOST_BITS_PER_WIDE_INT
))))
6983 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6984 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6986 HOST_WIDE_INT bitsize
6987 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6989 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6991 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6992 op0
= expand_and (op0
, op1
, target
);
6996 enum machine_mode imode
6997 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6999 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7002 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7004 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7014 enum machine_mode mode1
;
7015 HOST_WIDE_INT bitsize
, bitpos
;
7018 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7019 &mode1
, &unsignedp
, &volatilep
);
7022 /* If we got back the original object, something is wrong. Perhaps
7023 we are evaluating an expression too early. In any event, don't
7024 infinitely recurse. */
7028 /* If TEM's type is a union of variable size, pass TARGET to the inner
7029 computation, since it will need a temporary and TARGET is known
7030 to have to do. This occurs in unchecked conversion in Ada. */
7034 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7035 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7037 ? target
: NULL_RTX
),
7039 (modifier
== EXPAND_INITIALIZER
7040 || modifier
== EXPAND_CONST_ADDRESS
)
7041 ? modifier
: EXPAND_NORMAL
);
7043 /* If this is a constant, put it into a register if it is a
7044 legitimate constant and OFFSET is 0 and memory if it isn't. */
7045 if (CONSTANT_P (op0
))
7047 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7048 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7050 op0
= force_reg (mode
, op0
);
7052 op0
= validize_mem (force_const_mem (mode
, op0
));
7057 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
7059 /* If this object is in a register, put it into memory.
7060 This case can't occur in C, but can in Ada if we have
7061 unchecked conversion of an expression from a scalar type to
7062 an array or record type. */
7063 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7064 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7066 /* If the operand is a SAVE_EXPR, we can deal with this by
7067 forcing the SAVE_EXPR into memory. */
7068 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7070 put_var_into_stack (TREE_OPERAND (exp
, 0));
7071 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7076 = build_qualified_type (TREE_TYPE (tem
),
7077 (TYPE_QUALS (TREE_TYPE (tem
))
7078 | TYPE_QUAL_CONST
));
7079 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7081 emit_move_insn (memloc
, op0
);
7086 if (GET_CODE (op0
) != MEM
)
7089 if (GET_MODE (offset_rtx
) != ptr_mode
)
7090 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7092 #ifdef POINTERS_EXTEND_UNSIGNED
7093 if (GET_MODE (offset_rtx
) != Pmode
)
7094 offset_rtx
= convert_memory_address (Pmode
, offset_rtx
);
7097 /* A constant address in OP0 can have VOIDmode, we must not try
7098 to call force_reg for that case. Avoid that case. */
7099 if (GET_CODE (op0
) == MEM
7100 && GET_MODE (op0
) == BLKmode
7101 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7103 && (bitpos
% bitsize
) == 0
7104 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7105 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7107 rtx temp
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7109 if (GET_CODE (XEXP (temp
, 0)) == REG
)
7112 op0
= (replace_equiv_address
7114 force_reg (GET_MODE (XEXP (temp
, 0)),
7119 op0
= offset_address (op0
, offset_rtx
,
7120 highest_pow2_factor (offset
));
7123 /* Don't forget about volatility even if this is a bitfield. */
7124 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7126 if (op0
== orig_op0
)
7127 op0
= copy_rtx (op0
);
7129 MEM_VOLATILE_P (op0
) = 1;
7132 /* Check the access. */
7133 if (cfun
!= 0 && current_function_check_memory_usage
7134 && GET_CODE (op0
) == MEM
)
7136 enum memory_use_mode memory_usage
;
7137 memory_usage
= get_memory_usage_from_modifier (modifier
);
7139 if (memory_usage
!= MEMORY_USE_DONT
)
7144 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
7145 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
7147 /* Check the access right of the pointer. */
7148 in_check_memory_usage
= 1;
7149 if (size
> BITS_PER_UNIT
)
7150 emit_library_call (chkr_check_addr_libfunc
,
7151 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, to
,
7152 Pmode
, GEN_INT (size
/ BITS_PER_UNIT
),
7153 TYPE_MODE (sizetype
),
7154 GEN_INT (memory_usage
),
7155 TYPE_MODE (integer_type_node
));
7156 in_check_memory_usage
= 0;
7160 /* In cases where an aligned union has an unaligned object
7161 as a field, we might be extracting a BLKmode value from
7162 an integer-mode (e.g., SImode) object. Handle this case
7163 by doing the extract into an object as wide as the field
7164 (which we know to be the width of a basic mode), then
7165 storing into memory, and changing the mode to BLKmode. */
7166 if (mode1
== VOIDmode
7167 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7168 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7169 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7170 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7171 && modifier
!= EXPAND_CONST_ADDRESS
7172 && modifier
!= EXPAND_INITIALIZER
)
7173 /* If the field isn't aligned enough to fetch as a memref,
7174 fetch it as a bit field. */
7175 || (mode1
!= BLKmode
7176 && SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))
7177 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7178 < GET_MODE_ALIGNMENT (mode
))
7179 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7180 /* If the type and the field are a constant size and the
7181 size of the type isn't the same size as the bitfield,
7182 we must use bitfield operations. */
7184 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7186 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7189 enum machine_mode ext_mode
= mode
;
7191 if (ext_mode
== BLKmode
7192 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7193 && GET_CODE (target
) == MEM
7194 && bitpos
% BITS_PER_UNIT
== 0))
7195 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7197 if (ext_mode
== BLKmode
)
7199 /* In this case, BITPOS must start at a byte boundary and
7200 TARGET, if specified, must be a MEM. */
7201 if (GET_CODE (op0
) != MEM
7202 || (target
!= 0 && GET_CODE (target
) != MEM
)
7203 || bitpos
% BITS_PER_UNIT
!= 0)
7206 op0
= adjust_address (op0
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
7208 target
= assign_temp (type
, 0, 1, 1);
7210 emit_block_move (target
, op0
,
7211 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7217 op0
= validize_mem (op0
);
7219 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7220 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7222 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7223 unsignedp
, target
, ext_mode
, ext_mode
,
7224 int_size_in_bytes (TREE_TYPE (tem
)));
7226 /* If the result is a record type and BITSIZE is narrower than
7227 the mode of OP0, an integral mode, and this is a big endian
7228 machine, we must put the field into the high-order bits. */
7229 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7230 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7231 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
7232 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7233 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7237 if (mode
== BLKmode
)
7239 rtx
new = assign_temp (build_qualified_type
7240 (type_for_mode (ext_mode
, 0),
7241 TYPE_QUAL_CONST
), 0, 1, 1);
7243 emit_move_insn (new, op0
);
7244 op0
= copy_rtx (new);
7245 PUT_MODE (op0
, BLKmode
);
7246 set_mem_attributes (op0
, exp
, 1);
7252 /* If the result is BLKmode, use that to access the object
7254 if (mode
== BLKmode
)
7257 /* Get a reference to just this component. */
7258 if (modifier
== EXPAND_CONST_ADDRESS
7259 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7260 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7262 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7264 if (op0
== orig_op0
)
7265 op0
= copy_rtx (op0
);
7267 set_mem_attributes (op0
, exp
, 0);
7268 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7269 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7271 MEM_VOLATILE_P (op0
) |= volatilep
;
7272 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7273 || modifier
== EXPAND_CONST_ADDRESS
7274 || modifier
== EXPAND_INITIALIZER
)
7276 else if (target
== 0)
7277 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7279 convert_move (target
, op0
, unsignedp
);
7285 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7287 /* Evaluate the interior expression. */
7288 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7291 /* Get or create an instruction off which to hang a note. */
7292 if (REG_P (subtarget
))
7295 insn
= get_last_insn ();
7298 if (! INSN_P (insn
))
7299 insn
= prev_nonnote_insn (insn
);
7303 target
= gen_reg_rtx (GET_MODE (subtarget
));
7304 insn
= emit_move_insn (target
, subtarget
);
7307 /* Collect the data for the note. */
7308 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7309 vtbl_ref
= plus_constant (vtbl_ref
,
7310 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7311 /* Discard the initial CONST that was added. */
7312 vtbl_ref
= XEXP (vtbl_ref
, 0);
7315 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7320 /* Intended for a reference to a buffer of a file-object in Pascal.
7321 But it's not certain that a special tree code will really be
7322 necessary for these. INDIRECT_REF might work for them. */
7328 /* Pascal set IN expression.
7331 rlo = set_low - (set_low%bits_per_word);
7332 the_word = set [ (index - rlo)/bits_per_word ];
7333 bit_index = index % bits_per_word;
7334 bitmask = 1 << bit_index;
7335 return !!(the_word & bitmask); */
7337 tree set
= TREE_OPERAND (exp
, 0);
7338 tree index
= TREE_OPERAND (exp
, 1);
7339 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7340 tree set_type
= TREE_TYPE (set
);
7341 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7342 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7343 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7344 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7345 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7346 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7347 rtx setaddr
= XEXP (setval
, 0);
7348 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7350 rtx diff
, quo
, rem
, addr
, bit
, result
;
7352 /* If domain is empty, answer is no. Likewise if index is constant
7353 and out of bounds. */
7354 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7355 && TREE_CODE (set_low_bound
) == INTEGER_CST
7356 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7357 || (TREE_CODE (index
) == INTEGER_CST
7358 && TREE_CODE (set_low_bound
) == INTEGER_CST
7359 && tree_int_cst_lt (index
, set_low_bound
))
7360 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7361 && TREE_CODE (index
) == INTEGER_CST
7362 && tree_int_cst_lt (set_high_bound
, index
))))
7366 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7368 /* If we get here, we have to generate the code for both cases
7369 (in range and out of range). */
7371 op0
= gen_label_rtx ();
7372 op1
= gen_label_rtx ();
7374 if (! (GET_CODE (index_val
) == CONST_INT
7375 && GET_CODE (lo_r
) == CONST_INT
))
7376 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7377 GET_MODE (index_val
), iunsignedp
, op1
);
7379 if (! (GET_CODE (index_val
) == CONST_INT
7380 && GET_CODE (hi_r
) == CONST_INT
))
7381 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7382 GET_MODE (index_val
), iunsignedp
, op1
);
7384 /* Calculate the element number of bit zero in the first word
7386 if (GET_CODE (lo_r
) == CONST_INT
)
7387 rlow
= GEN_INT (INTVAL (lo_r
)
7388 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7390 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7391 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7392 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7394 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7395 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7397 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7398 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7399 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7400 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7402 addr
= memory_address (byte_mode
,
7403 expand_binop (index_mode
, add_optab
, diff
,
7404 setaddr
, NULL_RTX
, iunsignedp
,
7407 /* Extract the bit we want to examine. */
7408 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7409 gen_rtx_MEM (byte_mode
, addr
),
7410 make_tree (TREE_TYPE (index
), rem
),
7412 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7413 GET_MODE (target
) == byte_mode
? target
: 0,
7414 1, OPTAB_LIB_WIDEN
);
7416 if (result
!= target
)
7417 convert_move (target
, result
, 1);
7419 /* Output the code to handle the out-of-range case. */
7422 emit_move_insn (target
, const0_rtx
);
7427 case WITH_CLEANUP_EXPR
:
7428 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7430 WITH_CLEANUP_EXPR_RTL (exp
)
7431 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7432 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 1));
7434 /* That's it for this cleanup. */
7435 TREE_OPERAND (exp
, 1) = 0;
7437 return WITH_CLEANUP_EXPR_RTL (exp
);
7439 case CLEANUP_POINT_EXPR
:
7441 /* Start a new binding layer that will keep track of all cleanup
7442 actions to be performed. */
7443 expand_start_bindings (2);
7445 target_temp_slot_level
= temp_slot_level
;
7447 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7448 /* If we're going to use this value, load it up now. */
7450 op0
= force_not_mem (op0
);
7451 preserve_temp_slots (op0
);
7452 expand_end_bindings (NULL_TREE
, 0, 0);
7457 /* Check for a built-in function. */
7458 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7459 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7461 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7463 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7464 == BUILT_IN_FRONTEND
)
7465 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7467 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7470 return expand_call (exp
, target
, ignore
);
7472 case NON_LVALUE_EXPR
:
7475 case REFERENCE_EXPR
:
7476 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7479 if (TREE_CODE (type
) == UNION_TYPE
)
7481 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7483 /* If both input and output are BLKmode, this conversion isn't doing
7484 anything except possibly changing memory attribute. */
7485 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7487 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7490 result
= copy_rtx (result
);
7491 set_mem_attributes (result
, exp
, 0);
7496 target
= assign_temp (type
, 0, 1, 1);
7498 if (GET_CODE (target
) == MEM
)
7499 /* Store data into beginning of memory target. */
7500 store_expr (TREE_OPERAND (exp
, 0),
7501 adjust_address (target
, TYPE_MODE (valtype
), 0), 0);
7503 else if (GET_CODE (target
) == REG
)
7504 /* Store this field into a union of the proper type. */
7505 store_field (target
,
7506 MIN ((int_size_in_bytes (TREE_TYPE
7507 (TREE_OPERAND (exp
, 0)))
7509 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7510 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7511 VOIDmode
, 0, type
, 0);
7515 /* Return the entire union. */
7519 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7521 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7524 /* If the signedness of the conversion differs and OP0 is
7525 a promoted SUBREG, clear that indication since we now
7526 have to do the proper extension. */
7527 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7528 && GET_CODE (op0
) == SUBREG
)
7529 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7534 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7535 if (GET_MODE (op0
) == mode
)
7538 /* If OP0 is a constant, just convert it into the proper mode. */
7539 if (CONSTANT_P (op0
))
7541 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7542 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7544 if (modifier
== EXPAND_INITIALIZER
)
7545 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7549 convert_to_mode (mode
, op0
,
7550 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7552 convert_move (target
, op0
,
7553 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7556 case VIEW_CONVERT_EXPR
:
7557 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, ro_modifier
);
7559 /* If the input and output modes are both the same, we are done.
7560 Otherwise, if neither mode is BLKmode and both are within a word, we
7561 can use gen_lowpart. If neither is true, make sure the operand is
7562 in memory and convert the MEM to the new mode. */
7563 if (TYPE_MODE (type
) == GET_MODE (op0
))
7565 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7566 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7567 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7568 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7569 else if (GET_CODE (op0
) != MEM
)
7571 /* If the operand is not a MEM, force it into memory. Since we
7572 are going to be be changing the mode of the MEM, don't call
7573 force_const_mem for constants because we don't allow pool
7574 constants to change mode. */
7575 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7577 if (TREE_ADDRESSABLE (exp
))
7580 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7582 = assign_stack_temp_for_type
7583 (TYPE_MODE (inner_type
),
7584 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7586 emit_move_insn (target
, op0
);
7590 /* At this point, OP0 is in the correct mode. If the output type is such
7591 that the operand is known to be aligned, indicate that it is.
7592 Otherwise, we need only be concerned about alignment for non-BLKmode
7594 if (GET_CODE (op0
) == MEM
)
7596 op0
= copy_rtx (op0
);
7598 if (TYPE_ALIGN_OK (type
))
7599 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7600 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7601 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7603 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7604 HOST_WIDE_INT temp_size
= MAX (int_size_in_bytes (inner_type
),
7605 GET_MODE_SIZE (TYPE_MODE (type
)));
7606 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7607 temp_size
, 0, type
);
7608 rtx new_with_op0_mode
= copy_rtx (new);
7610 if (TREE_ADDRESSABLE (exp
))
7613 PUT_MODE (new_with_op0_mode
, GET_MODE (op0
));
7614 if (GET_MODE (op0
) == BLKmode
)
7615 emit_block_move (new_with_op0_mode
, op0
,
7616 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))));
7618 emit_move_insn (new_with_op0_mode
, op0
);
7623 PUT_MODE (op0
, TYPE_MODE (type
));
7629 /* We come here from MINUS_EXPR when the second operand is a
7632 this_optab
= ! unsignedp
&& flag_trapv
7633 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7634 ? addv_optab
: add_optab
;
7636 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7637 something else, make sure we add the register to the constant and
7638 then to the other thing. This case can occur during strength
7639 reduction and doing it this way will produce better code if the
7640 frame pointer or argument pointer is eliminated.
7642 fold-const.c will ensure that the constant is always in the inner
7643 PLUS_EXPR, so the only case we need to do anything about is if
7644 sp, ap, or fp is our second argument, in which case we must swap
7645 the innermost first argument and our second argument. */
7647 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7648 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7649 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7650 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7651 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7652 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7654 tree t
= TREE_OPERAND (exp
, 1);
7656 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7657 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7660 /* If the result is to be ptr_mode and we are adding an integer to
7661 something, we might be forming a constant. So try to use
7662 plus_constant. If it produces a sum and we can't accept it,
7663 use force_operand. This allows P = &ARR[const] to generate
7664 efficient code on machines where a SYMBOL_REF is not a valid
7667 If this is an EXPAND_SUM call, always return the sum. */
7668 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7669 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7671 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7672 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7673 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7677 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7679 /* Use immed_double_const to ensure that the constant is
7680 truncated according to the mode of OP1, then sign extended
7681 to a HOST_WIDE_INT. Using the constant directly can result
7682 in non-canonical RTL in a 64x32 cross compile. */
7684 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7686 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7687 op1
= plus_constant (op1
, INTVAL (constant_part
));
7688 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7689 op1
= force_operand (op1
, target
);
7693 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7694 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7695 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7699 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7701 if (! CONSTANT_P (op0
))
7703 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7704 VOIDmode
, modifier
);
7705 /* Don't go to both_summands if modifier
7706 says it's not right to return a PLUS. */
7707 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7711 /* Use immed_double_const to ensure that the constant is
7712 truncated according to the mode of OP1, then sign extended
7713 to a HOST_WIDE_INT. Using the constant directly can result
7714 in non-canonical RTL in a 64x32 cross compile. */
7716 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7718 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7719 op0
= plus_constant (op0
, INTVAL (constant_part
));
7720 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7721 op0
= force_operand (op0
, target
);
7726 /* No sense saving up arithmetic to be done
7727 if it's all in the wrong mode to form part of an address.
7728 And force_operand won't know whether to sign-extend or
7730 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7731 || mode
!= ptr_mode
)
7734 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7737 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7738 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7741 /* Make sure any term that's a sum with a constant comes last. */
7742 if (GET_CODE (op0
) == PLUS
7743 && CONSTANT_P (XEXP (op0
, 1)))
7749 /* If adding to a sum including a constant,
7750 associate it to put the constant outside. */
7751 if (GET_CODE (op1
) == PLUS
7752 && CONSTANT_P (XEXP (op1
, 1)))
7754 rtx constant_term
= const0_rtx
;
7756 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7759 /* Ensure that MULT comes first if there is one. */
7760 else if (GET_CODE (op0
) == MULT
)
7761 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7763 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7765 /* Let's also eliminate constants from op0 if possible. */
7766 op0
= eliminate_constant_term (op0
, &constant_term
);
7768 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7769 their sum should be a constant. Form it into OP1, since the
7770 result we want will then be OP0 + OP1. */
7772 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7777 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7780 /* Put a constant term last and put a multiplication first. */
7781 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7782 temp
= op1
, op1
= op0
, op0
= temp
;
7784 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7785 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7788 /* For initializers, we are allowed to return a MINUS of two
7789 symbolic constants. Here we handle all cases when both operands
7791 /* Handle difference of two symbolic constants,
7792 for the sake of an initializer. */
7793 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7794 && really_constant_p (TREE_OPERAND (exp
, 0))
7795 && really_constant_p (TREE_OPERAND (exp
, 1)))
7797 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7798 VOIDmode
, ro_modifier
);
7799 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7800 VOIDmode
, ro_modifier
);
7802 /* If the last operand is a CONST_INT, use plus_constant of
7803 the negated constant. Else make the MINUS. */
7804 if (GET_CODE (op1
) == CONST_INT
)
7805 return plus_constant (op0
, - INTVAL (op1
));
7807 return gen_rtx_MINUS (mode
, op0
, op1
);
7809 /* Convert A - const to A + (-const). */
7810 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7812 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7813 TREE_OPERAND (exp
, 1)));
7815 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7816 /* If we can't negate the constant in TYPE, leave it alone and
7817 expand_binop will negate it for us. We used to try to do it
7818 here in the signed version of TYPE, but that doesn't work
7819 on POINTER_TYPEs. */;
7822 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7826 this_optab
= ! unsignedp
&& flag_trapv
7827 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7828 ? subv_optab
: sub_optab
;
7832 /* If first operand is constant, swap them.
7833 Thus the following special case checks need only
7834 check the second operand. */
7835 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7837 tree t1
= TREE_OPERAND (exp
, 0);
7838 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7839 TREE_OPERAND (exp
, 1) = t1
;
7842 /* Attempt to return something suitable for generating an
7843 indexed address, for machines that support that. */
7845 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7846 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7847 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7849 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7852 /* Apply distributive law if OP0 is x+c. */
7853 if (GET_CODE (op0
) == PLUS
7854 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7859 (mode
, XEXP (op0
, 0),
7860 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7861 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7862 * INTVAL (XEXP (op0
, 1))));
7864 if (GET_CODE (op0
) != REG
)
7865 op0
= force_operand (op0
, NULL_RTX
);
7866 if (GET_CODE (op0
) != REG
)
7867 op0
= copy_to_mode_reg (mode
, op0
);
7870 gen_rtx_MULT (mode
, op0
,
7871 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7874 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7877 /* Check for multiplying things that have been extended
7878 from a narrower type. If this machine supports multiplying
7879 in that narrower type with a result in the desired type,
7880 do it that way, and avoid the explicit type-conversion. */
7881 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7882 && TREE_CODE (type
) == INTEGER_TYPE
7883 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7884 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7885 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7886 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7887 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7888 /* Don't use a widening multiply if a shift will do. */
7889 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7890 > HOST_BITS_PER_WIDE_INT
)
7891 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7893 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7894 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7896 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7897 /* If both operands are extended, they must either both
7898 be zero-extended or both be sign-extended. */
7899 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7901 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7903 enum machine_mode innermode
7904 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7905 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7906 ? smul_widen_optab
: umul_widen_optab
);
7907 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7908 ? umul_widen_optab
: smul_widen_optab
);
7909 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7911 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7913 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7914 NULL_RTX
, VOIDmode
, 0);
7915 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7916 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7919 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7920 NULL_RTX
, VOIDmode
, 0);
7923 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7924 && innermode
== word_mode
)
7927 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7928 NULL_RTX
, VOIDmode
, 0);
7929 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7930 op1
= convert_modes (innermode
, mode
,
7931 expand_expr (TREE_OPERAND (exp
, 1),
7932 NULL_RTX
, VOIDmode
, 0),
7935 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7936 NULL_RTX
, VOIDmode
, 0);
7937 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7938 unsignedp
, OPTAB_LIB_WIDEN
);
7939 htem
= expand_mult_highpart_adjust (innermode
,
7940 gen_highpart (innermode
, temp
),
7942 gen_highpart (innermode
, temp
),
7944 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7949 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7950 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7951 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7953 case TRUNC_DIV_EXPR
:
7954 case FLOOR_DIV_EXPR
:
7956 case ROUND_DIV_EXPR
:
7957 case EXACT_DIV_EXPR
:
7958 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7960 /* Possible optimization: compute the dividend with EXPAND_SUM
7961 then if the divisor is constant can optimize the case
7962 where some terms of the dividend have coeffs divisible by it. */
7963 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7964 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7965 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7968 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7969 expensive divide. If not, combine will rebuild the original
7971 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7972 && !real_onep (TREE_OPERAND (exp
, 0)))
7973 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7974 build (RDIV_EXPR
, type
,
7975 build_real (type
, dconst1
),
7976 TREE_OPERAND (exp
, 1))),
7977 target
, tmode
, unsignedp
);
7978 this_optab
= sdiv_optab
;
7981 case TRUNC_MOD_EXPR
:
7982 case FLOOR_MOD_EXPR
:
7984 case ROUND_MOD_EXPR
:
7985 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7987 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7988 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7989 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7991 case FIX_ROUND_EXPR
:
7992 case FIX_FLOOR_EXPR
:
7994 abort (); /* Not used for C. */
7996 case FIX_TRUNC_EXPR
:
7997 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7999 target
= gen_reg_rtx (mode
);
8000 expand_fix (target
, op0
, unsignedp
);
8004 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8006 target
= gen_reg_rtx (mode
);
8007 /* expand_float can't figure out what to do if FROM has VOIDmode.
8008 So give it the correct mode. With -O, cse will optimize this. */
8009 if (GET_MODE (op0
) == VOIDmode
)
8010 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8012 expand_float (target
, op0
,
8013 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8017 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8018 temp
= expand_unop (mode
,
8019 ! unsignedp
&& flag_trapv
8020 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8021 ? negv_optab
: neg_optab
, op0
, target
, 0);
8027 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8029 /* Handle complex values specially. */
8030 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8031 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8032 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
8034 /* Unsigned abs is simply the operand. Testing here means we don't
8035 risk generating incorrect code below. */
8036 if (TREE_UNSIGNED (type
))
8039 return expand_abs (mode
, op0
, target
, unsignedp
,
8040 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8044 target
= original_target
;
8045 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
8046 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8047 || GET_MODE (target
) != mode
8048 || (GET_CODE (target
) == REG
8049 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8050 target
= gen_reg_rtx (mode
);
8051 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8052 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8054 /* First try to do it with a special MIN or MAX instruction.
8055 If that does not win, use a conditional jump to select the proper
8057 this_optab
= (TREE_UNSIGNED (type
)
8058 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8059 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8061 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8066 /* At this point, a MEM target is no longer useful; we will get better
8069 if (GET_CODE (target
) == MEM
)
8070 target
= gen_reg_rtx (mode
);
8073 emit_move_insn (target
, op0
);
8075 op0
= gen_label_rtx ();
8077 /* If this mode is an integer too wide to compare properly,
8078 compare word by word. Rely on cse to optimize constant cases. */
8079 if (GET_MODE_CLASS (mode
) == MODE_INT
8080 && ! can_compare_p (GE
, mode
, ccp_jump
))
8082 if (code
== MAX_EXPR
)
8083 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8084 target
, op1
, NULL_RTX
, op0
);
8086 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
8087 op1
, target
, NULL_RTX
, op0
);
8091 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
8092 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8093 unsignedp
, mode
, NULL_RTX
, NULL_RTX
,
8096 emit_move_insn (target
, op1
);
8101 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8102 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8108 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8109 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
8114 /* ??? Can optimize bitwise operations with one arg constant.
8115 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8116 and (a bitwise1 b) bitwise2 b (etc)
8117 but that is probably not worth while. */
8119 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8120 boolean values when we want in all cases to compute both of them. In
8121 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8122 as actual zero-or-1 values and then bitwise anding. In cases where
8123 there cannot be any side effects, better code would be made by
8124 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8125 how to recognize those cases. */
8127 case TRUTH_AND_EXPR
:
8129 this_optab
= and_optab
;
8134 this_optab
= ior_optab
;
8137 case TRUTH_XOR_EXPR
:
8139 this_optab
= xor_optab
;
8146 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8148 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8149 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8152 /* Could determine the answer when only additive constants differ. Also,
8153 the addition of one can be handled by changing the condition. */
8160 case UNORDERED_EXPR
:
8167 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
8171 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8172 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8174 && GET_CODE (original_target
) == REG
8175 && (GET_MODE (original_target
)
8176 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8178 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8181 if (temp
!= original_target
)
8182 temp
= copy_to_reg (temp
);
8184 op1
= gen_label_rtx ();
8185 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8186 GET_MODE (temp
), unsignedp
, op1
);
8187 emit_move_insn (temp
, const1_rtx
);
8192 /* If no set-flag instruction, must generate a conditional
8193 store into a temporary variable. Drop through
8194 and handle this like && and ||. */
8196 case TRUTH_ANDIF_EXPR
:
8197 case TRUTH_ORIF_EXPR
:
8199 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
8200 /* Make sure we don't have a hard reg (such as function's return
8201 value) live across basic blocks, if not optimizing. */
8202 || (!optimize
&& GET_CODE (target
) == REG
8203 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8204 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8207 emit_clr_insn (target
);
8209 op1
= gen_label_rtx ();
8210 jumpifnot (exp
, op1
);
8213 emit_0_to_1_insn (target
);
8216 return ignore
? const0_rtx
: target
;
8218 case TRUTH_NOT_EXPR
:
8219 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8220 /* The parser is careful to generate TRUTH_NOT_EXPR
8221 only with operands that are always zero or one. */
8222 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8223 target
, 1, OPTAB_LIB_WIDEN
);
8229 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8231 return expand_expr (TREE_OPERAND (exp
, 1),
8232 (ignore
? const0_rtx
: target
),
8236 /* If we would have a "singleton" (see below) were it not for a
8237 conversion in each arm, bring that conversion back out. */
8238 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8239 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8240 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8241 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8243 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8244 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8246 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8247 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8248 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8249 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8250 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8251 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8252 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8253 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8254 return expand_expr (build1 (NOP_EXPR
, type
,
8255 build (COND_EXPR
, TREE_TYPE (iftrue
),
8256 TREE_OPERAND (exp
, 0),
8258 target
, tmode
, modifier
);
8262 /* Note that COND_EXPRs whose type is a structure or union
8263 are required to be constructed to contain assignments of
8264 a temporary variable, so that we can evaluate them here
8265 for side effect only. If type is void, we must do likewise. */
8267 /* If an arm of the branch requires a cleanup,
8268 only that cleanup is performed. */
8271 tree binary_op
= 0, unary_op
= 0;
8273 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8274 convert it to our mode, if necessary. */
8275 if (integer_onep (TREE_OPERAND (exp
, 1))
8276 && integer_zerop (TREE_OPERAND (exp
, 2))
8277 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8281 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8286 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
8287 if (GET_MODE (op0
) == mode
)
8291 target
= gen_reg_rtx (mode
);
8292 convert_move (target
, op0
, unsignedp
);
8296 /* Check for X ? A + B : A. If we have this, we can copy A to the
8297 output and conditionally add B. Similarly for unary operations.
8298 Don't do this if X has side-effects because those side effects
8299 might affect A or B and the "?" operation is a sequence point in
8300 ANSI. (operand_equal_p tests for side effects.) */
8302 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8303 && operand_equal_p (TREE_OPERAND (exp
, 2),
8304 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8305 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8306 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8307 && operand_equal_p (TREE_OPERAND (exp
, 1),
8308 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8309 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8310 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8311 && operand_equal_p (TREE_OPERAND (exp
, 2),
8312 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8313 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8314 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8315 && operand_equal_p (TREE_OPERAND (exp
, 1),
8316 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8317 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8319 /* If we are not to produce a result, we have no target. Otherwise,
8320 if a target was specified use it; it will not be used as an
8321 intermediate target unless it is safe. If no target, use a
8326 else if (original_target
8327 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8328 || (singleton
&& GET_CODE (original_target
) == REG
8329 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8330 && original_target
== var_rtx (singleton
)))
8331 && GET_MODE (original_target
) == mode
8332 #ifdef HAVE_conditional_move
8333 && (! can_conditionally_move_p (mode
)
8334 || GET_CODE (original_target
) == REG
8335 || TREE_ADDRESSABLE (type
))
8337 && (GET_CODE (original_target
) != MEM
8338 || TREE_ADDRESSABLE (type
)))
8339 temp
= original_target
;
8340 else if (TREE_ADDRESSABLE (type
))
8343 temp
= assign_temp (type
, 0, 0, 1);
8345 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8346 do the test of X as a store-flag operation, do this as
8347 A + ((X != 0) << log C). Similarly for other simple binary
8348 operators. Only do for C == 1 if BRANCH_COST is low. */
8349 if (temp
&& singleton
&& binary_op
8350 && (TREE_CODE (binary_op
) == PLUS_EXPR
8351 || TREE_CODE (binary_op
) == MINUS_EXPR
8352 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8353 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8354 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8355 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8356 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8359 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8360 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8361 ? addv_optab
: add_optab
)
8362 : TREE_CODE (binary_op
) == MINUS_EXPR
8363 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8364 ? subv_optab
: sub_optab
)
8365 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8368 /* If we had X ? A : A + 1, do this as A + (X == 0).
8370 We have to invert the truth value here and then put it
8371 back later if do_store_flag fails. We cannot simply copy
8372 TREE_OPERAND (exp, 0) to another variable and modify that
8373 because invert_truthvalue can modify the tree pointed to
8375 if (singleton
== TREE_OPERAND (exp
, 1))
8376 TREE_OPERAND (exp
, 0)
8377 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8379 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8380 (safe_from_p (temp
, singleton
, 1)
8382 mode
, BRANCH_COST
<= 1);
8384 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8385 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8386 build_int_2 (tree_log2
8390 (safe_from_p (temp
, singleton
, 1)
8391 ? temp
: NULL_RTX
), 0);
8395 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8396 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8397 unsignedp
, OPTAB_LIB_WIDEN
);
8399 else if (singleton
== TREE_OPERAND (exp
, 1))
8400 TREE_OPERAND (exp
, 0)
8401 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8404 do_pending_stack_adjust ();
8406 op0
= gen_label_rtx ();
8408 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8412 /* If the target conflicts with the other operand of the
8413 binary op, we can't use it. Also, we can't use the target
8414 if it is a hard register, because evaluating the condition
8415 might clobber it. */
8417 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8418 || (GET_CODE (temp
) == REG
8419 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8420 temp
= gen_reg_rtx (mode
);
8421 store_expr (singleton
, temp
, 0);
8424 expand_expr (singleton
,
8425 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8426 if (singleton
== TREE_OPERAND (exp
, 1))
8427 jumpif (TREE_OPERAND (exp
, 0), op0
);
8429 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8431 start_cleanup_deferral ();
8432 if (binary_op
&& temp
== 0)
8433 /* Just touch the other operand. */
8434 expand_expr (TREE_OPERAND (binary_op
, 1),
8435 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8437 store_expr (build (TREE_CODE (binary_op
), type
,
8438 make_tree (type
, temp
),
8439 TREE_OPERAND (binary_op
, 1)),
8442 store_expr (build1 (TREE_CODE (unary_op
), type
,
8443 make_tree (type
, temp
)),
8447 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8448 comparison operator. If we have one of these cases, set the
8449 output to A, branch on A (cse will merge these two references),
8450 then set the output to FOO. */
8452 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8453 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8454 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8455 TREE_OPERAND (exp
, 1), 0)
8456 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8457 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8458 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8460 if (GET_CODE (temp
) == REG
8461 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8462 temp
= gen_reg_rtx (mode
);
8463 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8464 jumpif (TREE_OPERAND (exp
, 0), op0
);
8466 start_cleanup_deferral ();
8467 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8471 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8472 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8473 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8474 TREE_OPERAND (exp
, 2), 0)
8475 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8476 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8477 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8479 if (GET_CODE (temp
) == REG
8480 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8481 temp
= gen_reg_rtx (mode
);
8482 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8483 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8485 start_cleanup_deferral ();
8486 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8491 op1
= gen_label_rtx ();
8492 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8494 start_cleanup_deferral ();
8496 /* One branch of the cond can be void, if it never returns. For
8497 example A ? throw : E */
8499 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8500 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8502 expand_expr (TREE_OPERAND (exp
, 1),
8503 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8504 end_cleanup_deferral ();
8506 emit_jump_insn (gen_jump (op1
));
8509 start_cleanup_deferral ();
8511 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8512 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8514 expand_expr (TREE_OPERAND (exp
, 2),
8515 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8518 end_cleanup_deferral ();
8529 /* Something needs to be initialized, but we didn't know
8530 where that thing was when building the tree. For example,
8531 it could be the return value of a function, or a parameter
8532 to a function which lays down in the stack, or a temporary
8533 variable which must be passed by reference.
8535 We guarantee that the expression will either be constructed
8536 or copied into our original target. */
8538 tree slot
= TREE_OPERAND (exp
, 0);
8539 tree cleanups
= NULL_TREE
;
8542 if (TREE_CODE (slot
) != VAR_DECL
)
8546 target
= original_target
;
8548 /* Set this here so that if we get a target that refers to a
8549 register variable that's already been used, put_reg_into_stack
8550 knows that it should fix up those uses. */
8551 TREE_USED (slot
) = 1;
8555 if (DECL_RTL_SET_P (slot
))
8557 target
= DECL_RTL (slot
);
8558 /* If we have already expanded the slot, so don't do
8560 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8565 target
= assign_temp (type
, 2, 0, 1);
8566 /* All temp slots at this level must not conflict. */
8567 preserve_temp_slots (target
);
8568 SET_DECL_RTL (slot
, target
);
8569 if (TREE_ADDRESSABLE (slot
))
8570 put_var_into_stack (slot
);
8572 /* Since SLOT is not known to the called function
8573 to belong to its stack frame, we must build an explicit
8574 cleanup. This case occurs when we must build up a reference
8575 to pass the reference as an argument. In this case,
8576 it is very likely that such a reference need not be
8579 if (TREE_OPERAND (exp
, 2) == 0)
8580 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8581 cleanups
= TREE_OPERAND (exp
, 2);
8586 /* This case does occur, when expanding a parameter which
8587 needs to be constructed on the stack. The target
8588 is the actual stack address that we want to initialize.
8589 The function we call will perform the cleanup in this case. */
8591 /* If we have already assigned it space, use that space,
8592 not target that we were passed in, as our target
8593 parameter is only a hint. */
8594 if (DECL_RTL_SET_P (slot
))
8596 target
= DECL_RTL (slot
);
8597 /* If we have already expanded the slot, so don't do
8599 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8604 SET_DECL_RTL (slot
, target
);
8605 /* If we must have an addressable slot, then make sure that
8606 the RTL that we just stored in slot is OK. */
8607 if (TREE_ADDRESSABLE (slot
))
8608 put_var_into_stack (slot
);
8612 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8613 /* Mark it as expanded. */
8614 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8616 store_expr (exp1
, target
, 0);
8618 expand_decl_cleanup (NULL_TREE
, cleanups
);
8625 tree lhs
= TREE_OPERAND (exp
, 0);
8626 tree rhs
= TREE_OPERAND (exp
, 1);
8628 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8634 /* If lhs is complex, expand calls in rhs before computing it.
8635 That's so we don't compute a pointer and save it over a
8636 call. If lhs is simple, compute it first so we can give it
8637 as a target if the rhs is just a call. This avoids an
8638 extra temp and copy and that prevents a partial-subsumption
8639 which makes bad code. Actually we could treat
8640 component_ref's of vars like vars. */
8642 tree lhs
= TREE_OPERAND (exp
, 0);
8643 tree rhs
= TREE_OPERAND (exp
, 1);
8647 /* Check for |= or &= of a bitfield of size one into another bitfield
8648 of size 1. In this case, (unless we need the result of the
8649 assignment) we can do this more efficiently with a
8650 test followed by an assignment, if necessary.
8652 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8653 things change so we do, this code should be enhanced to
8656 && TREE_CODE (lhs
) == COMPONENT_REF
8657 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8658 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8659 && TREE_OPERAND (rhs
, 0) == lhs
8660 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8661 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8662 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8664 rtx label
= gen_label_rtx ();
8666 do_jump (TREE_OPERAND (rhs
, 1),
8667 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8668 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8669 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8670 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8672 : integer_zero_node
)),
8674 do_pending_stack_adjust ();
8679 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8685 if (!TREE_OPERAND (exp
, 0))
8686 expand_null_return ();
8688 expand_return (TREE_OPERAND (exp
, 0));
8691 case PREINCREMENT_EXPR
:
8692 case PREDECREMENT_EXPR
:
8693 return expand_increment (exp
, 0, ignore
);
8695 case POSTINCREMENT_EXPR
:
8696 case POSTDECREMENT_EXPR
:
8697 /* Faster to treat as pre-increment if result is not used. */
8698 return expand_increment (exp
, ! ignore
, ignore
);
8701 /* Are we taking the address of a nested function? */
8702 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8703 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8704 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8705 && ! TREE_STATIC (exp
))
8707 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8708 op0
= force_operand (op0
, target
);
8710 /* If we are taking the address of something erroneous, just
8712 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8714 /* If we are taking the address of a constant and are at the
8715 top level, we have to use output_constant_def since we can't
8716 call force_const_mem at top level. */
8718 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8719 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8721 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8724 /* We make sure to pass const0_rtx down if we came in with
8725 ignore set, to avoid doing the cleanups twice for something. */
8726 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8727 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8728 (modifier
== EXPAND_INITIALIZER
8729 ? modifier
: EXPAND_CONST_ADDRESS
));
8731 /* If we are going to ignore the result, OP0 will have been set
8732 to const0_rtx, so just return it. Don't get confused and
8733 think we are taking the address of the constant. */
8737 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8738 clever and returns a REG when given a MEM. */
8739 op0
= protect_from_queue (op0
, 1);
8741 /* We would like the object in memory. If it is a constant, we can
8742 have it be statically allocated into memory. For a non-constant,
8743 we need to allocate some memory and store the value into it. */
8745 if (CONSTANT_P (op0
))
8746 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8748 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8749 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8750 || GET_CODE (op0
) == PARALLEL
)
8752 /* If this object is in a register, it must can't be BLKmode. */
8753 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8754 tree nt
= build_qualified_type (inner_type
,
8755 (TYPE_QUALS (inner_type
)
8756 | TYPE_QUAL_CONST
));
8757 rtx memloc
= assign_temp (nt
, 1, 1, 1);
8759 if (GET_CODE (op0
) == PARALLEL
)
8760 /* Handle calls that pass values in multiple non-contiguous
8761 locations. The Irix 6 ABI has examples of this. */
8762 emit_group_store (memloc
, op0
, int_size_in_bytes (inner_type
));
8764 emit_move_insn (memloc
, op0
);
8769 if (GET_CODE (op0
) != MEM
)
8772 mark_temp_addr_taken (op0
);
8773 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8775 op0
= XEXP (op0
, 0);
8776 #ifdef POINTERS_EXTEND_UNSIGNED
8777 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8778 && mode
== ptr_mode
)
8779 op0
= convert_memory_address (ptr_mode
, op0
);
8784 /* If OP0 is not aligned as least as much as the type requires, we
8785 need to make a temporary, copy OP0 to it, and take the address of
8786 the temporary. We want to use the alignment of the type, not of
8787 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8788 the test for BLKmode means that can't happen. The test for
8789 BLKmode is because we never make mis-aligned MEMs with
8792 We don't need to do this at all if the machine doesn't have
8793 strict alignment. */
8794 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8795 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8797 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8799 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8801 = assign_stack_temp_for_type
8802 (TYPE_MODE (inner_type
),
8803 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8804 : int_size_in_bytes (inner_type
),
8805 1, build_qualified_type (inner_type
,
8806 (TYPE_QUALS (inner_type
)
8807 | TYPE_QUAL_CONST
)));
8809 if (TYPE_ALIGN_OK (inner_type
))
8812 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)));
8816 op0
= force_operand (XEXP (op0
, 0), target
);
8819 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8820 op0
= force_reg (Pmode
, op0
);
8822 if (GET_CODE (op0
) == REG
8823 && ! REG_USERVAR_P (op0
))
8824 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8826 #ifdef POINTERS_EXTEND_UNSIGNED
8827 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8828 && mode
== ptr_mode
)
8829 op0
= convert_memory_address (ptr_mode
, op0
);
8834 case ENTRY_VALUE_EXPR
:
8837 /* COMPLEX type for Extended Pascal & Fortran */
8840 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8843 /* Get the rtx code of the operands. */
8844 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8845 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8848 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8852 /* Move the real (op0) and imaginary (op1) parts to their location. */
8853 emit_move_insn (gen_realpart (mode
, target
), op0
);
8854 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8856 insns
= get_insns ();
8859 /* Complex construction should appear as a single unit. */
8860 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8861 each with a separate pseudo as destination.
8862 It's not correct for flow to treat them as a unit. */
8863 if (GET_CODE (target
) != CONCAT
)
8864 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8872 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8873 return gen_realpart (mode
, op0
);
8876 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8877 return gen_imagpart (mode
, op0
);
8881 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8885 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8888 target
= gen_reg_rtx (mode
);
8892 /* Store the realpart and the negated imagpart to target. */
8893 emit_move_insn (gen_realpart (partmode
, target
),
8894 gen_realpart (partmode
, op0
));
8896 imag_t
= gen_imagpart (partmode
, target
);
8897 temp
= expand_unop (partmode
,
8898 ! unsignedp
&& flag_trapv
8899 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8900 ? negv_optab
: neg_optab
,
8901 gen_imagpart (partmode
, op0
), imag_t
, 0);
8903 emit_move_insn (imag_t
, temp
);
8905 insns
= get_insns ();
8908 /* Conjugate should appear as a single unit
8909 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8910 each with a separate pseudo as destination.
8911 It's not correct for flow to treat them as a unit. */
8912 if (GET_CODE (target
) != CONCAT
)
8913 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8920 case TRY_CATCH_EXPR
:
8922 tree handler
= TREE_OPERAND (exp
, 1);
8924 expand_eh_region_start ();
8926 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8928 expand_eh_region_end_cleanup (handler
);
8933 case TRY_FINALLY_EXPR
:
8935 tree try_block
= TREE_OPERAND (exp
, 0);
8936 tree finally_block
= TREE_OPERAND (exp
, 1);
8937 rtx finally_label
= gen_label_rtx ();
8938 rtx done_label
= gen_label_rtx ();
8939 rtx return_link
= gen_reg_rtx (Pmode
);
8940 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8941 (tree
) finally_label
, (tree
) return_link
);
8942 TREE_SIDE_EFFECTS (cleanup
) = 1;
8944 /* Start a new binding layer that will keep track of all cleanup
8945 actions to be performed. */
8946 expand_start_bindings (2);
8948 target_temp_slot_level
= temp_slot_level
;
8950 expand_decl_cleanup (NULL_TREE
, cleanup
);
8951 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8953 preserve_temp_slots (op0
);
8954 expand_end_bindings (NULL_TREE
, 0, 0);
8955 emit_jump (done_label
);
8956 emit_label (finally_label
);
8957 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8958 emit_indirect_jump (return_link
);
8959 emit_label (done_label
);
8963 case GOTO_SUBROUTINE_EXPR
:
8965 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8966 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8967 rtx return_address
= gen_label_rtx ();
8968 emit_move_insn (return_link
,
8969 gen_rtx_LABEL_REF (Pmode
, return_address
));
8971 emit_label (return_address
);
8976 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8979 return get_exception_pointer (cfun
);
8982 /* Function descriptors are not valid except for as
8983 initialization constants, and should not be expanded. */
8987 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8990 /* Here to do an ordinary binary operator, generating an instruction
8991 from the optab already placed in `this_optab'. */
8993 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8995 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8996 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8998 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8999 unsignedp
, OPTAB_LIB_WIDEN
);
9005 /* Return the tree node if a ARG corresponds to a string constant or zero
9006 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9007 in bytes within the string that ARG is accessing. The type of the
9008 offset will be `sizetype'. */
9011 string_constant (arg
, ptr_offset
)
9017 if (TREE_CODE (arg
) == ADDR_EXPR
9018 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9020 *ptr_offset
= size_zero_node
;
9021 return TREE_OPERAND (arg
, 0);
9023 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9025 tree arg0
= TREE_OPERAND (arg
, 0);
9026 tree arg1
= TREE_OPERAND (arg
, 1);
9031 if (TREE_CODE (arg0
) == ADDR_EXPR
9032 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9034 *ptr_offset
= convert (sizetype
, arg1
);
9035 return TREE_OPERAND (arg0
, 0);
9037 else if (TREE_CODE (arg1
) == ADDR_EXPR
9038 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9040 *ptr_offset
= convert (sizetype
, arg0
);
9041 return TREE_OPERAND (arg1
, 0);
9048 /* Expand code for a post- or pre- increment or decrement
9049 and return the RTX for the result.
9050 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9053 expand_increment (exp
, post
, ignore
)
9059 tree incremented
= TREE_OPERAND (exp
, 0);
9060 optab this_optab
= add_optab
;
9062 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9063 int op0_is_copy
= 0;
9064 int single_insn
= 0;
9065 /* 1 means we can't store into OP0 directly,
9066 because it is a subreg narrower than a word,
9067 and we don't dare clobber the rest of the word. */
9070 /* Stabilize any component ref that might need to be
9071 evaluated more than once below. */
9073 || TREE_CODE (incremented
) == BIT_FIELD_REF
9074 || (TREE_CODE (incremented
) == COMPONENT_REF
9075 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9076 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9077 incremented
= stabilize_reference (incremented
);
9078 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9079 ones into save exprs so that they don't accidentally get evaluated
9080 more than once by the code below. */
9081 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9082 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9083 incremented
= save_expr (incremented
);
9085 /* Compute the operands as RTX.
9086 Note whether OP0 is the actual lvalue or a copy of it:
9087 I believe it is a copy iff it is a register or subreg
9088 and insns were generated in computing it. */
9090 temp
= get_last_insn ();
9091 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9093 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9094 in place but instead must do sign- or zero-extension during assignment,
9095 so we copy it into a new register and let the code below use it as
9098 Note that we can safely modify this SUBREG since it is know not to be
9099 shared (it was made by the expand_expr call above). */
9101 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9104 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9108 else if (GET_CODE (op0
) == SUBREG
9109 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9111 /* We cannot increment this SUBREG in place. If we are
9112 post-incrementing, get a copy of the old value. Otherwise,
9113 just mark that we cannot increment in place. */
9115 op0
= copy_to_reg (op0
);
9120 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9121 && temp
!= get_last_insn ());
9122 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9123 EXPAND_MEMORY_USE_BAD
);
9125 /* Decide whether incrementing or decrementing. */
9126 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9127 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9128 this_optab
= sub_optab
;
9130 /* Convert decrement by a constant into a negative increment. */
9131 if (this_optab
== sub_optab
9132 && GET_CODE (op1
) == CONST_INT
)
9134 op1
= GEN_INT (-INTVAL (op1
));
9135 this_optab
= add_optab
;
9138 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9139 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9141 /* For a preincrement, see if we can do this with a single instruction. */
9144 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9145 if (icode
!= (int) CODE_FOR_nothing
9146 /* Make sure that OP0 is valid for operands 0 and 1
9147 of the insn we want to queue. */
9148 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9149 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9150 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9154 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9155 then we cannot just increment OP0. We must therefore contrive to
9156 increment the original value. Then, for postincrement, we can return
9157 OP0 since it is a copy of the old value. For preincrement, expand here
9158 unless we can do it with a single insn.
9160 Likewise if storing directly into OP0 would clobber high bits
9161 we need to preserve (bad_subreg). */
9162 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9164 /* This is the easiest way to increment the value wherever it is.
9165 Problems with multiple evaluation of INCREMENTED are prevented
9166 because either (1) it is a component_ref or preincrement,
9167 in which case it was stabilized above, or (2) it is an array_ref
9168 with constant index in an array in a register, which is
9169 safe to reevaluate. */
9170 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9171 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9172 ? MINUS_EXPR
: PLUS_EXPR
),
9175 TREE_OPERAND (exp
, 1));
9177 while (TREE_CODE (incremented
) == NOP_EXPR
9178 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9180 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9181 incremented
= TREE_OPERAND (incremented
, 0);
9184 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9185 return post
? op0
: temp
;
9190 /* We have a true reference to the value in OP0.
9191 If there is an insn to add or subtract in this mode, queue it.
9192 Queueing the increment insn avoids the register shuffling
9193 that often results if we must increment now and first save
9194 the old value for subsequent use. */
9196 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9197 op0
= stabilize (op0
);
9200 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9201 if (icode
!= (int) CODE_FOR_nothing
9202 /* Make sure that OP0 is valid for operands 0 and 1
9203 of the insn we want to queue. */
9204 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9205 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9207 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9208 op1
= force_reg (mode
, op1
);
9210 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9212 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9214 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9215 ? force_reg (Pmode
, XEXP (op0
, 0))
9216 : copy_to_reg (XEXP (op0
, 0)));
9219 op0
= replace_equiv_address (op0
, addr
);
9220 temp
= force_reg (GET_MODE (op0
), op0
);
9221 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9222 op1
= force_reg (mode
, op1
);
9224 /* The increment queue is LIFO, thus we have to `queue'
9225 the instructions in reverse order. */
9226 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9227 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9232 /* Preincrement, or we can't increment with one simple insn. */
9234 /* Save a copy of the value before inc or dec, to return it later. */
9235 temp
= value
= copy_to_reg (op0
);
9237 /* Arrange to return the incremented value. */
9238 /* Copy the rtx because expand_binop will protect from the queue,
9239 and the results of that would be invalid for us to return
9240 if our caller does emit_queue before using our result. */
9241 temp
= copy_rtx (value
= op0
);
9243 /* Increment however we can. */
9244 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9245 current_function_check_memory_usage
? NULL_RTX
: op0
,
9246 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9247 /* Make sure the value is stored into OP0. */
9249 emit_move_insn (op0
, op1
);
9254 /* At the start of a function, record that we have no previously-pushed
9255 arguments waiting to be popped. */
9258 init_pending_stack_adjust ()
9260 pending_stack_adjust
= 0;
9263 /* When exiting from function, if safe, clear out any pending stack adjust
9264 so the adjustment won't get done.
9266 Note, if the current function calls alloca, then it must have a
9267 frame pointer regardless of the value of flag_omit_frame_pointer. */
9270 clear_pending_stack_adjust ()
9272 #ifdef EXIT_IGNORE_STACK
9274 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9275 && EXIT_IGNORE_STACK
9276 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9277 && ! flag_inline_functions
)
9279 stack_pointer_delta
-= pending_stack_adjust
,
9280 pending_stack_adjust
= 0;
9285 /* Pop any previously-pushed arguments that have not been popped yet. */
9288 do_pending_stack_adjust ()
9290 if (inhibit_defer_pop
== 0)
9292 if (pending_stack_adjust
!= 0)
9293 adjust_stack (GEN_INT (pending_stack_adjust
));
9294 pending_stack_adjust
= 0;
9298 /* Expand conditional expressions. */
9300 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9301 LABEL is an rtx of code CODE_LABEL, in this function and all the
9305 jumpifnot (exp
, label
)
9309 do_jump (exp
, label
, NULL_RTX
);
9312 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9319 do_jump (exp
, NULL_RTX
, label
);
9322 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9323 the result is zero, or IF_TRUE_LABEL if the result is one.
9324 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9325 meaning fall through in that case.
9327 do_jump always does any pending stack adjust except when it does not
9328 actually perform a jump. An example where there is no jump
9329 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9331 This function is responsible for optimizing cases such as
9332 &&, || and comparison operators in EXP. */
9335 do_jump (exp
, if_false_label
, if_true_label
)
9337 rtx if_false_label
, if_true_label
;
9339 enum tree_code code
= TREE_CODE (exp
);
9340 /* Some cases need to create a label to jump to
9341 in order to properly fall through.
9342 These cases set DROP_THROUGH_LABEL nonzero. */
9343 rtx drop_through_label
= 0;
9347 enum machine_mode mode
;
9349 #ifdef MAX_INTEGER_COMPUTATION_MODE
9350 check_max_integer_computation_mode (exp
);
9361 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9367 /* This is not true with #pragma weak */
9369 /* The address of something can never be zero. */
9371 emit_jump (if_true_label
);
9376 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9377 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9378 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
9379 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
9382 /* If we are narrowing the operand, we have to do the compare in the
9384 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9385 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9387 case NON_LVALUE_EXPR
:
9388 case REFERENCE_EXPR
:
9393 /* These cannot change zero->non-zero or vice versa. */
9394 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9397 case WITH_RECORD_EXPR
:
9398 /* Put the object on the placeholder list, recurse through our first
9399 operand, and pop the list. */
9400 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9402 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9403 placeholder_list
= TREE_CHAIN (placeholder_list
);
9407 /* This is never less insns than evaluating the PLUS_EXPR followed by
9408 a test and can be longer if the test is eliminated. */
9410 /* Reduce to minus. */
9411 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9412 TREE_OPERAND (exp
, 0),
9413 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9414 TREE_OPERAND (exp
, 1))));
9415 /* Process as MINUS. */
9419 /* Non-zero iff operands of minus differ. */
9420 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9421 TREE_OPERAND (exp
, 0),
9422 TREE_OPERAND (exp
, 1)),
9423 NE
, NE
, if_false_label
, if_true_label
);
9427 /* If we are AND'ing with a small constant, do this comparison in the
9428 smallest type that fits. If the machine doesn't have comparisons
9429 that small, it will be converted back to the wider comparison.
9430 This helps if we are testing the sign bit of a narrower object.
9431 combine can't do this for us because it can't know whether a
9432 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9434 if (! SLOW_BYTE_ACCESS
9435 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9436 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9437 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9438 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9439 && (type
= type_for_mode (mode
, 1)) != 0
9440 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9441 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9442 != CODE_FOR_nothing
))
9444 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9449 case TRUTH_NOT_EXPR
:
9450 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9453 case TRUTH_ANDIF_EXPR
:
9454 if (if_false_label
== 0)
9455 if_false_label
= drop_through_label
= gen_label_rtx ();
9456 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9457 start_cleanup_deferral ();
9458 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9459 end_cleanup_deferral ();
9462 case TRUTH_ORIF_EXPR
:
9463 if (if_true_label
== 0)
9464 if_true_label
= drop_through_label
= gen_label_rtx ();
9465 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9466 start_cleanup_deferral ();
9467 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9468 end_cleanup_deferral ();
9473 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9474 preserve_temp_slots (NULL_RTX
);
9478 do_pending_stack_adjust ();
9479 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9485 case ARRAY_RANGE_REF
:
9487 HOST_WIDE_INT bitsize
, bitpos
;
9489 enum machine_mode mode
;
9494 /* Get description of this reference. We don't actually care
9495 about the underlying object here. */
9496 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9497 &unsignedp
, &volatilep
);
9499 type
= type_for_size (bitsize
, unsignedp
);
9500 if (! SLOW_BYTE_ACCESS
9501 && type
!= 0 && bitsize
>= 0
9502 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9503 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9504 != CODE_FOR_nothing
))
9506 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9513 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9514 if (integer_onep (TREE_OPERAND (exp
, 1))
9515 && integer_zerop (TREE_OPERAND (exp
, 2)))
9516 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9518 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9519 && integer_onep (TREE_OPERAND (exp
, 2)))
9520 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9524 rtx label1
= gen_label_rtx ();
9525 drop_through_label
= gen_label_rtx ();
9527 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9529 start_cleanup_deferral ();
9530 /* Now the THEN-expression. */
9531 do_jump (TREE_OPERAND (exp
, 1),
9532 if_false_label
? if_false_label
: drop_through_label
,
9533 if_true_label
? if_true_label
: drop_through_label
);
9534 /* In case the do_jump just above never jumps. */
9535 do_pending_stack_adjust ();
9536 emit_label (label1
);
9538 /* Now the ELSE-expression. */
9539 do_jump (TREE_OPERAND (exp
, 2),
9540 if_false_label
? if_false_label
: drop_through_label
,
9541 if_true_label
? if_true_label
: drop_through_label
);
9542 end_cleanup_deferral ();
9548 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9550 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9551 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9553 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9554 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9557 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9558 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9559 fold (build1 (REALPART_EXPR
,
9560 TREE_TYPE (inner_type
),
9562 fold (build1 (REALPART_EXPR
,
9563 TREE_TYPE (inner_type
),
9565 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9566 fold (build1 (IMAGPART_EXPR
,
9567 TREE_TYPE (inner_type
),
9569 fold (build1 (IMAGPART_EXPR
,
9570 TREE_TYPE (inner_type
),
9572 if_false_label
, if_true_label
);
9575 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9576 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9578 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9579 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9580 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9582 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9588 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9590 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9591 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9593 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9594 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9597 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9598 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9599 fold (build1 (REALPART_EXPR
,
9600 TREE_TYPE (inner_type
),
9602 fold (build1 (REALPART_EXPR
,
9603 TREE_TYPE (inner_type
),
9605 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9606 fold (build1 (IMAGPART_EXPR
,
9607 TREE_TYPE (inner_type
),
9609 fold (build1 (IMAGPART_EXPR
,
9610 TREE_TYPE (inner_type
),
9612 if_false_label
, if_true_label
);
9615 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9616 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9618 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9619 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9620 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9622 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9627 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9628 if (GET_MODE_CLASS (mode
) == MODE_INT
9629 && ! can_compare_p (LT
, mode
, ccp_jump
))
9630 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9632 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9636 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9637 if (GET_MODE_CLASS (mode
) == MODE_INT
9638 && ! can_compare_p (LE
, mode
, ccp_jump
))
9639 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9641 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9645 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9646 if (GET_MODE_CLASS (mode
) == MODE_INT
9647 && ! can_compare_p (GT
, mode
, ccp_jump
))
9648 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9650 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9654 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9655 if (GET_MODE_CLASS (mode
) == MODE_INT
9656 && ! can_compare_p (GE
, mode
, ccp_jump
))
9657 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9659 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9662 case UNORDERED_EXPR
:
9665 enum rtx_code cmp
, rcmp
;
9668 if (code
== UNORDERED_EXPR
)
9669 cmp
= UNORDERED
, rcmp
= ORDERED
;
9671 cmp
= ORDERED
, rcmp
= UNORDERED
;
9672 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9675 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9676 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9677 /* If the target doesn't provide either UNORDERED or ORDERED
9678 comparisons, canonicalize on UNORDERED for the library. */
9679 || rcmp
== UNORDERED
))
9683 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9685 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9690 enum rtx_code rcode1
;
9691 enum tree_code tcode2
;
9715 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9716 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9717 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9721 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9722 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9725 /* If the target doesn't support combined unordered
9726 compares, decompose into UNORDERED + comparison. */
9727 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9728 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9729 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9730 do_jump (exp
, if_false_label
, if_true_label
);
9736 __builtin_expect (<test>, 0) and
9737 __builtin_expect (<test>, 1)
9739 We need to do this here, so that <test> is not converted to a SCC
9740 operation on machines that use condition code registers and COMPARE
9741 like the PowerPC, and then the jump is done based on whether the SCC
9742 operation produced a 1 or 0. */
9744 /* Check for a built-in function. */
9745 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
9747 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
9748 tree arglist
= TREE_OPERAND (exp
, 1);
9750 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9751 && DECL_BUILT_IN (fndecl
)
9752 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
9753 && arglist
!= NULL_TREE
9754 && TREE_CHAIN (arglist
) != NULL_TREE
)
9756 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
9759 if (seq
!= NULL_RTX
)
9766 /* fall through and generate the normal code. */
9770 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9772 /* This is not needed any more and causes poor code since it causes
9773 comparisons and tests from non-SI objects to have different code
9775 /* Copy to register to avoid generating bad insns by cse
9776 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9777 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9778 temp
= copy_to_reg (temp
);
9780 do_pending_stack_adjust ();
9781 /* Do any postincrements in the expression that was tested. */
9784 if (GET_CODE (temp
) == CONST_INT
9785 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
9786 || GET_CODE (temp
) == LABEL_REF
)
9788 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9792 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9793 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9794 /* Note swapping the labels gives us not-equal. */
9795 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9796 else if (GET_MODE (temp
) != VOIDmode
)
9797 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9798 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9799 GET_MODE (temp
), NULL_RTX
,
9800 if_false_label
, if_true_label
);
9805 if (drop_through_label
)
9807 /* If do_jump produces code that might be jumped around,
9808 do any stack adjusts from that code, before the place
9809 where control merges in. */
9810 do_pending_stack_adjust ();
9811 emit_label (drop_through_label
);
9815 /* Given a comparison expression EXP for values too wide to be compared
9816 with one insn, test the comparison and jump to the appropriate label.
9817 The code of EXP is ignored; we always test GT if SWAP is 0,
9818 and LT if SWAP is 1. */
9821 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9824 rtx if_false_label
, if_true_label
;
9826 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9827 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9828 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9829 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9831 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9834 /* Compare OP0 with OP1, word at a time, in mode MODE.
9835 UNSIGNEDP says to do unsigned comparison.
9836 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9839 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9840 enum machine_mode mode
;
9843 rtx if_false_label
, if_true_label
;
9845 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9846 rtx drop_through_label
= 0;
9849 if (! if_true_label
|| ! if_false_label
)
9850 drop_through_label
= gen_label_rtx ();
9851 if (! if_true_label
)
9852 if_true_label
= drop_through_label
;
9853 if (! if_false_label
)
9854 if_false_label
= drop_through_label
;
9856 /* Compare a word at a time, high order first. */
9857 for (i
= 0; i
< nwords
; i
++)
9859 rtx op0_word
, op1_word
;
9861 if (WORDS_BIG_ENDIAN
)
9863 op0_word
= operand_subword_force (op0
, i
, mode
);
9864 op1_word
= operand_subword_force (op1
, i
, mode
);
9868 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9869 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9872 /* All but high-order word must be compared as unsigned. */
9873 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9874 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
9875 NULL_RTX
, if_true_label
);
9877 /* Consider lower words only if these are equal. */
9878 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9879 NULL_RTX
, NULL_RTX
, if_false_label
);
9883 emit_jump (if_false_label
);
9884 if (drop_through_label
)
9885 emit_label (drop_through_label
);
9888 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9889 with one insn, test the comparison and jump to the appropriate label. */
9892 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9894 rtx if_false_label
, if_true_label
;
9896 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9897 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9898 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9899 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9901 rtx drop_through_label
= 0;
9903 if (! if_false_label
)
9904 drop_through_label
= if_false_label
= gen_label_rtx ();
9906 for (i
= 0; i
< nwords
; i
++)
9907 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9908 operand_subword_force (op1
, i
, mode
),
9909 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9910 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
9913 emit_jump (if_true_label
);
9914 if (drop_through_label
)
9915 emit_label (drop_through_label
);
9918 /* Jump according to whether OP0 is 0.
9919 We assume that OP0 has an integer mode that is too wide
9920 for the available compare insns. */
9923 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9925 rtx if_false_label
, if_true_label
;
9927 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9930 rtx drop_through_label
= 0;
9932 /* The fastest way of doing this comparison on almost any machine is to
9933 "or" all the words and compare the result. If all have to be loaded
9934 from memory and this is a very wide item, it's possible this may
9935 be slower, but that's highly unlikely. */
9937 part
= gen_reg_rtx (word_mode
);
9938 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9939 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9940 part
= expand_binop (word_mode
, ior_optab
, part
,
9941 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9942 part
, 1, OPTAB_WIDEN
);
9946 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9947 NULL_RTX
, if_false_label
, if_true_label
);
9952 /* If we couldn't do the "or" simply, do this with a series of compares. */
9953 if (! if_false_label
)
9954 drop_through_label
= if_false_label
= gen_label_rtx ();
9956 for (i
= 0; i
< nwords
; i
++)
9957 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9958 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
9959 if_false_label
, NULL_RTX
);
9962 emit_jump (if_true_label
);
9964 if (drop_through_label
)
9965 emit_label (drop_through_label
);
9968 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9969 (including code to compute the values to be compared)
9970 and set (CC0) according to the result.
9971 The decision as to signed or unsigned comparison must be made by the caller.
9973 We force a stack adjustment unless there are currently
9974 things pushed on the stack that aren't yet used.
9976 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9980 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
)
9984 enum machine_mode mode
;
9989 /* If one operand is constant, make it the second one. Only do this
9990 if the other operand is not constant as well. */
9992 if (swap_commutative_operands_p (op0
, op1
))
9997 code
= swap_condition (code
);
10000 if (flag_force_mem
)
10002 op0
= force_not_mem (op0
);
10003 op1
= force_not_mem (op1
);
10006 do_pending_stack_adjust ();
10008 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10009 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10013 /* There's no need to do this now that combine.c can eliminate lots of
10014 sign extensions. This can be less efficient in certain cases on other
10017 /* If this is a signed equality comparison, we can do it as an
10018 unsigned comparison since zero-extension is cheaper than sign
10019 extension and comparisons with zero are done as unsigned. This is
10020 the case even on machines that can do fast sign extension, since
10021 zero-extension is easier to combine with other operations than
10022 sign-extension is. If we are comparing against a constant, we must
10023 convert it to what it would look like unsigned. */
10024 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10025 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10027 if (GET_CODE (op1
) == CONST_INT
10028 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10029 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10034 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
10036 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10039 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10040 The decision as to signed or unsigned comparison must be made by the caller.
10042 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10046 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
,
10047 if_false_label
, if_true_label
)
10049 enum rtx_code code
;
10051 enum machine_mode mode
;
10053 rtx if_false_label
, if_true_label
;
10056 int dummy_true_label
= 0;
10058 /* Reverse the comparison if that is safe and we want to jump if it is
10060 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10062 if_true_label
= if_false_label
;
10063 if_false_label
= 0;
10064 code
= reverse_condition (code
);
10067 /* If one operand is constant, make it the second one. Only do this
10068 if the other operand is not constant as well. */
10070 if (swap_commutative_operands_p (op0
, op1
))
10075 code
= swap_condition (code
);
10078 if (flag_force_mem
)
10080 op0
= force_not_mem (op0
);
10081 op1
= force_not_mem (op1
);
10084 do_pending_stack_adjust ();
10086 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10087 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10089 if (tem
== const_true_rtx
)
10092 emit_jump (if_true_label
);
10096 if (if_false_label
)
10097 emit_jump (if_false_label
);
10103 /* There's no need to do this now that combine.c can eliminate lots of
10104 sign extensions. This can be less efficient in certain cases on other
10107 /* If this is a signed equality comparison, we can do it as an
10108 unsigned comparison since zero-extension is cheaper than sign
10109 extension and comparisons with zero are done as unsigned. This is
10110 the case even on machines that can do fast sign extension, since
10111 zero-extension is easier to combine with other operations than
10112 sign-extension is. If we are comparing against a constant, we must
10113 convert it to what it would look like unsigned. */
10114 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10115 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10117 if (GET_CODE (op1
) == CONST_INT
10118 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10119 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10124 if (! if_true_label
)
10126 dummy_true_label
= 1;
10127 if_true_label
= gen_label_rtx ();
10130 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
10133 if (if_false_label
)
10134 emit_jump (if_false_label
);
10135 if (dummy_true_label
)
10136 emit_label (if_true_label
);
10139 /* Generate code for a comparison expression EXP (including code to compute
10140 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10141 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10142 generated code will drop through.
10143 SIGNED_CODE should be the rtx operation for this comparison for
10144 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10146 We force a stack adjustment unless there are currently
10147 things pushed on the stack that aren't yet used. */
10150 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10153 enum rtx_code signed_code
, unsigned_code
;
10154 rtx if_false_label
, if_true_label
;
10158 enum machine_mode mode
;
10160 enum rtx_code code
;
10162 /* Don't crash if the comparison was erroneous. */
10163 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10164 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10167 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10168 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10171 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10172 mode
= TYPE_MODE (type
);
10173 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10174 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10175 || (GET_MODE_BITSIZE (mode
)
10176 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10179 /* op0 might have been replaced by promoted constant, in which
10180 case the type of second argument should be used. */
10181 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10182 mode
= TYPE_MODE (type
);
10184 unsignedp
= TREE_UNSIGNED (type
);
10185 code
= unsignedp
? unsigned_code
: signed_code
;
10187 #ifdef HAVE_canonicalize_funcptr_for_compare
10188 /* If function pointers need to be "canonicalized" before they can
10189 be reliably compared, then canonicalize them. */
10190 if (HAVE_canonicalize_funcptr_for_compare
10191 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10192 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10195 rtx new_op0
= gen_reg_rtx (mode
);
10197 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10201 if (HAVE_canonicalize_funcptr_for_compare
10202 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10203 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10206 rtx new_op1
= gen_reg_rtx (mode
);
10208 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10213 /* Do any postincrements in the expression that was tested. */
10216 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10218 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10219 if_false_label
, if_true_label
);
10222 /* Generate code to calculate EXP using a store-flag instruction
10223 and return an rtx for the result. EXP is either a comparison
10224 or a TRUTH_NOT_EXPR whose operand is a comparison.
10226 If TARGET is nonzero, store the result there if convenient.
10228 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10231 Return zero if there is no suitable set-flag instruction
10232 available on this machine.
10234 Once expand_expr has been called on the arguments of the comparison,
10235 we are committed to doing the store flag, since it is not safe to
10236 re-evaluate the expression. We emit the store-flag insn by calling
10237 emit_store_flag, but only expand the arguments if we have a reason
10238 to believe that emit_store_flag will be successful. If we think that
10239 it will, but it isn't, we have to simulate the store-flag with a
10240 set/jump/set sequence. */
10243 do_store_flag (exp
, target
, mode
, only_cheap
)
10246 enum machine_mode mode
;
10249 enum rtx_code code
;
10250 tree arg0
, arg1
, type
;
10252 enum machine_mode operand_mode
;
10256 enum insn_code icode
;
10257 rtx subtarget
= target
;
10260 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10261 result at the end. We can't simply invert the test since it would
10262 have already been inverted if it were valid. This case occurs for
10263 some floating-point comparisons. */
10265 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10266 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10268 arg0
= TREE_OPERAND (exp
, 0);
10269 arg1
= TREE_OPERAND (exp
, 1);
10271 /* Don't crash if the comparison was erroneous. */
10272 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10275 type
= TREE_TYPE (arg0
);
10276 operand_mode
= TYPE_MODE (type
);
10277 unsignedp
= TREE_UNSIGNED (type
);
10279 /* We won't bother with BLKmode store-flag operations because it would mean
10280 passing a lot of information to emit_store_flag. */
10281 if (operand_mode
== BLKmode
)
10284 /* We won't bother with store-flag operations involving function pointers
10285 when function pointers must be canonicalized before comparisons. */
10286 #ifdef HAVE_canonicalize_funcptr_for_compare
10287 if (HAVE_canonicalize_funcptr_for_compare
10288 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10289 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10291 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10292 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10293 == FUNCTION_TYPE
))))
10300 /* Get the rtx comparison code to use. We know that EXP is a comparison
10301 operation of some type. Some comparisons against 1 and -1 can be
10302 converted to comparisons with zero. Do so here so that the tests
10303 below will be aware that we have a comparison with zero. These
10304 tests will not catch constants in the first operand, but constants
10305 are rarely passed as the first operand. */
10307 switch (TREE_CODE (exp
))
10316 if (integer_onep (arg1
))
10317 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10319 code
= unsignedp
? LTU
: LT
;
10322 if (! unsignedp
&& integer_all_onesp (arg1
))
10323 arg1
= integer_zero_node
, code
= LT
;
10325 code
= unsignedp
? LEU
: LE
;
10328 if (! unsignedp
&& integer_all_onesp (arg1
))
10329 arg1
= integer_zero_node
, code
= GE
;
10331 code
= unsignedp
? GTU
: GT
;
10334 if (integer_onep (arg1
))
10335 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10337 code
= unsignedp
? GEU
: GE
;
10340 case UNORDERED_EXPR
:
10366 /* Put a constant second. */
10367 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10369 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10370 code
= swap_condition (code
);
10373 /* If this is an equality or inequality test of a single bit, we can
10374 do this by shifting the bit being tested to the low-order bit and
10375 masking the result with the constant 1. If the condition was EQ,
10376 we xor it with 1. This does not require an scc insn and is faster
10377 than an scc insn even if we have it. */
10379 if ((code
== NE
|| code
== EQ
)
10380 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10381 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10383 tree inner
= TREE_OPERAND (arg0
, 0);
10384 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10387 /* If INNER is a right shift of a constant and it plus BITNUM does
10388 not overflow, adjust BITNUM and INNER. */
10390 if (TREE_CODE (inner
) == RSHIFT_EXPR
10391 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10392 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10393 && bitnum
< TYPE_PRECISION (type
)
10394 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10395 bitnum
- TYPE_PRECISION (type
)))
10397 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10398 inner
= TREE_OPERAND (inner
, 0);
10401 /* If we are going to be able to omit the AND below, we must do our
10402 operations as unsigned. If we must use the AND, we have a choice.
10403 Normally unsigned is faster, but for some machines signed is. */
10404 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10405 #ifdef LOAD_EXTEND_OP
10406 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10412 if (! get_subtarget (subtarget
)
10413 || GET_MODE (subtarget
) != operand_mode
10414 || ! safe_from_p (subtarget
, inner
, 1))
10417 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10420 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10421 size_int (bitnum
), subtarget
, ops_unsignedp
);
10423 if (GET_MODE (op0
) != mode
)
10424 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10426 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10427 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10428 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10430 /* Put the AND last so it can combine with more things. */
10431 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10432 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10437 /* Now see if we are likely to be able to do this. Return if not. */
10438 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10441 icode
= setcc_gen_code
[(int) code
];
10442 if (icode
== CODE_FOR_nothing
10443 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10445 /* We can only do this if it is one of the special cases that
10446 can be handled without an scc insn. */
10447 if ((code
== LT
&& integer_zerop (arg1
))
10448 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10450 else if (BRANCH_COST
>= 0
10451 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10452 && TREE_CODE (type
) != REAL_TYPE
10453 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10454 != CODE_FOR_nothing
)
10455 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10456 != CODE_FOR_nothing
)))
10462 if (! get_subtarget (target
)
10463 || GET_MODE (subtarget
) != operand_mode
10464 || ! safe_from_p (subtarget
, arg1
, 1))
10467 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10468 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10471 target
= gen_reg_rtx (mode
);
10473 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10474 because, if the emit_store_flag does anything it will succeed and
10475 OP0 and OP1 will not be used subsequently. */
10477 result
= emit_store_flag (target
, code
,
10478 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10479 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10480 operand_mode
, unsignedp
, 1);
10485 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10486 result
, 0, OPTAB_LIB_WIDEN
);
10490 /* If this failed, we have to do this with set/compare/jump/set code. */
10491 if (GET_CODE (target
) != REG
10492 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10493 target
= gen_reg_rtx (GET_MODE (target
));
10495 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10496 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10497 operand_mode
, NULL_RTX
);
10498 if (GET_CODE (result
) == CONST_INT
)
10499 return (((result
== const0_rtx
&& ! invert
)
10500 || (result
!= const0_rtx
&& invert
))
10501 ? const0_rtx
: const1_rtx
);
10503 label
= gen_label_rtx ();
10504 if (bcc_gen_fctn
[(int) code
] == 0)
10507 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10508 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10509 emit_label (label
);
10515 /* Stubs in case we haven't got a casesi insn. */
10516 #ifndef HAVE_casesi
10517 # define HAVE_casesi 0
10518 # define gen_casesi(a, b, c, d, e) (0)
10519 # define CODE_FOR_casesi CODE_FOR_nothing
10522 /* If the machine does not have a case insn that compares the bounds,
10523 this means extra overhead for dispatch tables, which raises the
10524 threshold for using them. */
10525 #ifndef CASE_VALUES_THRESHOLD
10526 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10527 #endif /* CASE_VALUES_THRESHOLD */
10530 case_values_threshold ()
10532 return CASE_VALUES_THRESHOLD
;
10535 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10536 0 otherwise (i.e. if there is no casesi instruction). */
10538 try_casesi (index_type
, index_expr
, minval
, range
,
10539 table_label
, default_label
)
10540 tree index_type
, index_expr
, minval
, range
;
10541 rtx table_label ATTRIBUTE_UNUSED
;
10544 enum machine_mode index_mode
= SImode
;
10545 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10546 rtx op1
, op2
, index
;
10547 enum machine_mode op_mode
;
10552 /* Convert the index to SImode. */
10553 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10555 enum machine_mode omode
= TYPE_MODE (index_type
);
10556 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10558 /* We must handle the endpoints in the original mode. */
10559 index_expr
= build (MINUS_EXPR
, index_type
,
10560 index_expr
, minval
);
10561 minval
= integer_zero_node
;
10562 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10563 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10564 omode
, 1, default_label
);
10565 /* Now we can safely truncate. */
10566 index
= convert_to_mode (index_mode
, index
, 0);
10570 if (TYPE_MODE (index_type
) != index_mode
)
10572 index_expr
= convert (type_for_size (index_bits
, 0),
10574 index_type
= TREE_TYPE (index_expr
);
10577 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10580 index
= protect_from_queue (index
, 0);
10581 do_pending_stack_adjust ();
10583 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
10584 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
10586 index
= copy_to_mode_reg (op_mode
, index
);
10588 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
10590 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
10591 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
10592 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
10593 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
10595 op1
= copy_to_mode_reg (op_mode
, op1
);
10597 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
10599 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
10600 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
10601 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
10602 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
10604 op2
= copy_to_mode_reg (op_mode
, op2
);
10606 emit_jump_insn (gen_casesi (index
, op1
, op2
,
10607 table_label
, default_label
));
10611 /* Attempt to generate a tablejump instruction; same concept. */
10612 #ifndef HAVE_tablejump
10613 #define HAVE_tablejump 0
10614 #define gen_tablejump(x, y) (0)
10617 /* Subroutine of the next function.
10619 INDEX is the value being switched on, with the lowest value
10620 in the table already subtracted.
10621 MODE is its expected mode (needed if INDEX is constant).
10622 RANGE is the length of the jump table.
10623 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10625 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10626 index value is out of range. */
10629 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10630 rtx index
, range
, table_label
, default_label
;
10631 enum machine_mode mode
;
10635 /* Do an unsigned comparison (in the proper mode) between the index
10636 expression and the value which represents the length of the range.
10637 Since we just finished subtracting the lower bound of the range
10638 from the index expression, this comparison allows us to simultaneously
10639 check that the original index expression value is both greater than
10640 or equal to the minimum value of the range and less than or equal to
10641 the maximum value of the range. */
10643 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10646 /* If index is in range, it must fit in Pmode.
10647 Convert to Pmode so we can index with it. */
10649 index
= convert_to_mode (Pmode
, index
, 1);
10651 /* Don't let a MEM slip thru, because then INDEX that comes
10652 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10653 and break_out_memory_refs will go to work on it and mess it up. */
10654 #ifdef PIC_CASE_VECTOR_ADDRESS
10655 if (flag_pic
&& GET_CODE (index
) != REG
)
10656 index
= copy_to_mode_reg (Pmode
, index
);
10659 /* If flag_force_addr were to affect this address
10660 it could interfere with the tricky assumptions made
10661 about addresses that contain label-refs,
10662 which may be valid only very near the tablejump itself. */
10663 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10664 GET_MODE_SIZE, because this indicates how large insns are. The other
10665 uses should all be Pmode, because they are addresses. This code
10666 could fail if addresses and insns are not the same size. */
10667 index
= gen_rtx_PLUS (Pmode
,
10668 gen_rtx_MULT (Pmode
, index
,
10669 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10670 gen_rtx_LABEL_REF (Pmode
, table_label
));
10671 #ifdef PIC_CASE_VECTOR_ADDRESS
10673 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10676 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10677 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10678 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10679 RTX_UNCHANGING_P (vector
) = 1;
10680 convert_move (temp
, vector
, 0);
10682 emit_jump_insn (gen_tablejump (temp
, table_label
));
10684 /* If we are generating PIC code or if the table is PC-relative, the
10685 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10686 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10691 try_tablejump (index_type
, index_expr
, minval
, range
,
10692 table_label
, default_label
)
10693 tree index_type
, index_expr
, minval
, range
;
10694 rtx table_label
, default_label
;
10698 if (! HAVE_tablejump
)
10701 index_expr
= fold (build (MINUS_EXPR
, index_type
,
10702 convert (index_type
, index_expr
),
10703 convert (index_type
, minval
)));
10704 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
10706 index
= protect_from_queue (index
, 0);
10707 do_pending_stack_adjust ();
10709 do_tablejump (index
, TYPE_MODE (index_type
),
10710 convert_modes (TYPE_MODE (index_type
),
10711 TYPE_MODE (TREE_TYPE (range
)),
10712 expand_expr (range
, NULL_RTX
,
10714 TREE_UNSIGNED (TREE_TYPE (range
))),
10715 table_label
, default_label
);